Merge "Add runtime codec libs to fuzzer" into main
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 7ea51ff..5558259 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -562,8 +562,7 @@
                 GET_DEVICE_DESC_CONNECTION(IP_V4));
         append_AudioDeviceDescription(pairs,
                 AUDIO_DEVICE_IN_BUS, AUDIO_DEVICE_OUT_BUS,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                GET_DEVICE_DESC_CONNECTION(BUS));
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE);
         append_AudioDeviceDescription(pairs,
                 AUDIO_DEVICE_IN_PROXY, AUDIO_DEVICE_OUT_PROXY,
                 AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY);
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index e08bf43..71909e5 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -107,7 +107,7 @@
 
     addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
                          .withDefault(new C2StreamProfileLevelInfo::output(0u, PROFILE_AV1_0,
-                                                                           LEVEL_AV1_4_1))
+                                                                           LEVEL_AV1_2))
                          .withFields({
                                  C2F(mProfileLevel, profile).equalTo(PROFILE_AV1_0),
                                  C2F(mProfileLevel, level)
@@ -116,7 +116,7 @@
                                             LEVEL_AV1_3_2, LEVEL_AV1_3_3, LEVEL_AV1_4,
                                             LEVEL_AV1_4_1}),
                          })
-                         .withSetter(ProfileLevelSetter)
+                         .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
                          .build());
 
     std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
@@ -201,12 +201,69 @@
 }
 
 C2R C2SoftAomEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
-                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+                                               C2P<C2StreamProfileLevelInfo::output>& me,
+                                               const C2P<C2StreamPictureSizeInfo::input>& size,
+                                               const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                               const C2P<C2StreamBitrateInfo::output>& bitrate) {
     (void)mayBlock;
     if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
         me.set().profile = PROFILE_AV1_0;
     }
+    struct LevelLimits {
+        C2Config::level_t level;
+        float samplesPerSec;
+        uint64_t samples;
+        uint32_t bitrate;
+        size_t maxHSize;
+        size_t maxVSize;
+    };
+    constexpr LevelLimits kLimits[] = {
+            {LEVEL_AV1_2, 4423680, 147456, 1500000, 2048, 1152},
+            {LEVEL_AV1_2_1, 8363520, 278784, 3000000, 2816, 1584},
+            {LEVEL_AV1_3, 19975680, 665856, 6000000, 4352, 2448},
+            {LEVEL_AV1_3_1, 37950720, 1065024, 10000000, 5504, 3096},
+            {LEVEL_AV1_4, 70778880, 2359296, 12000000, 6144, 3456},
+            {LEVEL_AV1_4_1, 141557760, 2359296, 20000000, 6144, 3456},
+    };
+
+    uint64_t samples = size.v.width * size.v.height;
+    float samplesPerSec = float(samples) * frameRate.v.value;
+
+    // Check if the supplied level meets the samples / bitrate requirements.
+    // If not, update the level with the lowest level meeting the requirements.
+    bool found = false;
+
+    // By default needsUpdate = false in case the supplied level does meet
+    // the requirements.
+    bool needsUpdate = false;
     if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        needsUpdate = true;
+    }
+    for (const LevelLimits& limit : kLimits) {
+        if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+            bitrate.v.value <= limit.bitrate && size.v.width <= limit.maxHSize &&
+            size.v.height <= limit.maxVSize) {
+            // This is the lowest level that meets the requirements, and if
+            // we haven't seen the supplied level yet, that means we don't
+            // need the update.
+            if (needsUpdate) {
+                ALOGD("Given level %x does not cover current configuration: "
+                        "adjusting to %x",
+                        me.v.level, limit.level);
+                me.set().level = limit.level;
+            }
+            found = true;
+            break;
+        }
+        if (me.v.level == limit.level) {
+            // We break out of the loop when the lowest feasible level is
+            // found. The fact that we're here means that our level doesn't
+            // meet the requirement and needs to be updated.
+            needsUpdate = true;
+        }
+    }
+    if (!found) {
+        // We set to the highest supported level.
         me.set().level = LEVEL_AV1_4_1;
     }
     return C2R::Ok();
@@ -248,6 +305,10 @@
     return C2R::Ok();
 }
 
+uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
+        return mProfileLevel->level - LEVEL_AV1_2;
+}
+
 C2SoftAomEnc::C2SoftAomEnc(const char* name, c2_node_id_t id,
                            const std::shared_ptr<IntfImpl>& intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
@@ -324,6 +385,9 @@
 aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
     aom_codec_err_t codec_return = AOM_CODEC_OK;
 
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_TARGET_SEQ_LEVEL_IDX, mAV1EncLevel);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
     codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED,
                                      MapC2ComplexityToAOMSpeed(mComplexity->value));
     if (codec_return != AOM_CODEC_OK) goto BailOut;
@@ -478,6 +542,7 @@
         mColorAspects = mIntf->getCodedColorAspects_l();
         mQuality = mIntf->getQuality_l();
         mComplexity = mIntf->getComplexity_l();
+        mAV1EncLevel = mIntf->getLevel_l();
     }
 
 
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 3067735..7e5ea63 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -98,6 +98,8 @@
 
     bool mIs10Bit;
 
+    uint32_t mAV1EncLevel;
+
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
@@ -120,7 +122,10 @@
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
                           C2P<C2StreamPictureSizeInfo::input>& me);
 
-    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me);
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+                                  const C2P<C2StreamPictureSizeInfo::input>& size,
+                                  const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                  const C2P<C2StreamBitrateInfo::output>& bitrate);
 
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -149,6 +154,7 @@
     static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
                                        const C2P<C2StreamColorAspectsInfo::input>& coded);
+    uint32_t getLevel_l() const;
 
   private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 664647a..4b189b4 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -42,6 +42,10 @@
         "libnativewindow_headers",
     ],
 
+    static_libs: [
+        "libyuv_static", // for conversion routines
+    ],
+
     shared_libs: [
         "libcutils", // for properties
         "liblog", // for ALOG
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 55a1164..06a21f6 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -21,8 +21,10 @@
 #include <android/hardware_buffer.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 
 #include <inttypes.h>
+#include <libyuv.h>
 
 #include <C2Config.h>
 #include <C2Debug.h>
@@ -32,6 +34,15 @@
 #include <SimpleC2Component.h>
 
 namespace android {
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
 constexpr uint8_t kNeutralUVBitDepth8 = 128;
 constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
@@ -506,6 +517,120 @@
     }
 }
 
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format) {
+    bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+    if (!processed) {
+        convertYUV420Planar16ToY410OrRGBA1010102(
+                (uint32_t*)dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                dstStride / sizeof(uint32_t), width, height,
+                std::static_pointer_cast<const C2ColorAspectsStruct>(aspects));
+    }
+}
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if ((format == CONV_FORMAT_I444) || (format == CONV_FORMAT_I422)) {
+        // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+        // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+        // in I010ToP010, but the libyuv API doesn't make any guarantees.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        if (format == CONV_FORMAT_I444) {
+            libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        } else {
+            libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        }
+        libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride, dstY, dstYStride,
+                           dstUV, dstUStride, width, height);
+    } else {
+        convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if (format == CONV_FORMAT_I444) {
+        // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+        // when it's available.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY, dstYStride,
+                           tmpU, dstUStride, tmpV, dstVStride, width, height);
+        libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                srcVStride, dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format) {
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUStride, dstVStride, width, height,
+                                   isMonochrome);
+    }
+}
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index bc27474..b28c47e 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -31,6 +31,12 @@
 
 namespace android {
 
+typedef enum {
+    CONV_FORMAT_I420,
+    CONV_FORMAT_I422,
+    CONV_FORMAT_I444,
+} CONV_FORMAT_T;
+
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -66,6 +72,30 @@
                                         const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
                                         size_t height, C2Color::matrix_t colorMatrix,
                                         C2Color::range_t colorRange);
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format);
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..c9387dd
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,37 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+    // TODO: b/277797541 - enable once ready
+    enabled: false,
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.dav1d-av1.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp", "C2SoftDav1dDump.cpp"],
+    static_libs: [
+        "libdav1d_8bit",
+        "libdav1d_16bit",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..3f96cb3
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1235 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+namespace android {
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        while (mDecodedPictures.size() > 0) {
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.initDumping();
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+        while (mDecodedPictures.size() > 0) {
+            memset(&p, 0, sizeof(p));
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.destroyDumping();
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
+    return;
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+            }
+
+            Dav1dData data;
+
+            res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
+            if (res != 0) {
+                ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
+                i_ret = -1;
+            } else {
+                data.m.timestamp = in_frameIndex;
+                // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
+                //       inSize, frameIndex, data.m.timestamp);
+
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                mC2SoftDav1dDump.dumpInput(ptr, new_Size);
+#endif
+
+                bool b_draining = false;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    bool b_output_error = false;
+
+                    do {
+                        Dav1dPicture img;
+                        memset(&img, 0, sizeof(img));
+
+                        res = dav1d_get_picture(mDav1dCtx, &img);
+                        if (res == 0) {
+                            mDecodedPictures.push_back(img);
+
+                            if (!end_of_stream) break;
+                        } else if (res == DAV1D_ERR(EAGAIN)) {
+                            /* the decoder needs more data to be able to output something.
+                             * if there is more data pending, continue the loop below or
+                             * otherwise break */
+                            if (data.sz != 0) res = 0;
+                            break;
+                        } else {
+                            ALOGE("warning! Decoder error %d!", res);
+                            b_output_error = true;
+                            break;
+                        }
+                    } while (res == 0);
+
+                    if (b_output_error) break;
+
+                    /* on drain, we must ignore the 1st EAGAIN */
+                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
+                        (end_of_stream)) {
+                        b_draining = true;
+                        res = 0;
+                    }
+                } while (res == 0 && ((data.sz != 0) || b_draining));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    (void)outputBuffer(pool, work);
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    if (mDecodedPictures.size() > 0) {
+        img = mDecodedPictures.front();
+        mDecodedPictures.pop_front();
+        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
+        // outputBuffer.",img.m.timestamp,img.m.timestamp);
+    } else {
+        res = dav1d_get_picture(mDav1dCtx, &img);
+        if (res == 0) {
+            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
+            // outputBuffer.",img.m.timestamp,img.m.timestamp);
+        } else {
+            ALOGE("failed to get a picture from dav1d for outputBuffer.");
+        }
+    }
+
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGD("Not enough data to output a picture.");
+        return false;
+    }
+    if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    const int width = img.p.w;
+    const int height = img.p.h;
+    if (width != mWidth || height != mHeight) {
+        mWidth = width;
+        mHeight = height;
+
+        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+        } else {
+            ALOGE("Config update size failed");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+    }
+
+    getVuiParams(&img);
+    getHDRStaticParams(&img, work);
+    getHDR10PlusInfoData(&img, work);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    CONV_FORMAT_T convFormat;
+    switch (img.p.layout) {
+        case DAV1D_PIXEL_LAYOUT_I444:
+            convFormat = CONV_FORMAT_I444;
+            break;
+        case DAV1D_PIXEL_LAYOUT_I422:
+            convFormat = CONV_FORMAT_I422;
+            break;
+        default:
+            convFormat = CONV_FORMAT_I420;
+            break;
+    }
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            if (isMonochrome) {
+                const size_t tmpSize = mWidth;
+                const bool needFill = tmpSize > mTmpFrameBufferSize;
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                srcU = srcV = mTmpFrameBuffer.get();
+                srcUStride = srcVStride = 0;
+                if (needFill) {
+                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                }
+            }
+            convertPlanar16ToY410OrRGBA1010102(
+                    dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                    dstYStride, mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
+                    convFormat);
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            size_t tmpSize = 0;
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
+                                  srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
+                                  mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
+                                  tmpSize);
+        } else {
+            size_t tmpSize = 0;
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                  srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
+                                  isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
+        }
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                              mWidth, mHeight);
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                             mWidth, mHeight);
+#endif
+        convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                             dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
+                             convFormat);
+    }
+
+    dav1d_picture_unref(&img);
+
+    finishWork(out_frameIndex, work, std::move(block));
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..e3d2a93
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+#include <C2SoftDav1dDump.h>
+
+//#define FILE_DUMP_ENABLE 1
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+    std::deque<Dav1dPicture> mDecodedPictures;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    C2SoftDav1dDump mC2SoftDav1dDump;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.cpp b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
new file mode 100644
index 0000000..ec8d6cd
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDump"
+#include "C2SoftDav1dDump.h"
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = true;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// start dumping from this frame
+static const int STARTING_FRAME_TO_DUMP_DEFAULT = 0;
+static const char STARTING_FRAME_TO_DUMP_PROPERTY[] = "debug.dav1d.startingframetodump";
+
+void C2SoftDav1dDump::initDumping() {
+    nsecs_t now = systemTime();
+    snprintf(mInDataFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, kFileNameLength, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    mFramesToDump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+    mFirstFrameToDump = android::base::GetIntProperty(STARTING_FRAME_TO_DUMP_PROPERTY,
+                                                      STARTING_FRAME_TO_DUMP_DEFAULT);
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+    ALOGD("enableDumping = %d, mFramesToDump = %d", enableDumping, mFramesToDump);
+
+    if (enableDumping) {
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+}
+
+void C2SoftDav1dDump::destroyDumping() {
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+}
+
+void C2SoftDav1dDump::dumpInput(uint8_t* ptr, int size) {
+    if (mInDataFile) {
+        int ret = fwrite(ptr, 1, size, mInDataFile);
+
+        if (ret != size) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName, size, ret);
+        }
+    }
+
+    // Dump the size per inputBuffer if dumping is enabled.
+    if (mInSizeFile) {
+        int ret = fwrite(&size, 1, 4, mInSizeFile);
+
+        if (ret != 4) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4, ret);
+        }
+    }
+}
+
+template <typename T>
+void C2SoftDav1dDump::dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, int width, int height) {
+    mOutputCount++;
+    FILE* fp_out = mDav1dOutYuvFile;
+    int typeSize = sizeof(T);
+    if (fp_out && mOutputCount >= mFirstFrameToDump &&
+        mOutputCount <= (mFirstFrameToDump + mFramesToDump - 1)) {
+        for (int i = 0; i < height; i++) {
+            int ret =
+                    fwrite((uint8_t*)srcY + i * srcYStride * typeSize, 1, width * typeSize, fp_out);
+            if (ret != width * typeSize) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcU + i * srcUStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcV + i * srcVStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDump::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+
+template void C2SoftDav1dDump::dumpOutput<uint8_t>(const uint8_t* srcY, const uint8_t* srcU,
+                                                   const uint8_t* srcV, size_t srcYStride,
+                                                   size_t srcUStride, size_t srcVStride, int width,
+                                                   int height);
+template void C2SoftDav1dDump::dumpOutput<uint16_t>(const uint16_t* srcY, const uint16_t* srcU,
+                                                    const uint16_t* srcV, size_t srcYStride,
+                                                    size_t srcUStride, size_t srcVStride, int width,
+                                                    int height);
+}  // namespace android
\ No newline at end of file
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.h b/media/codec2/components/dav1d/C2SoftDav1dDump.h
new file mode 100644
index 0000000..ea7a48a
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/properties.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <dav1d/dav1d.h>
+
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+constexpr size_t kFileNameLength = 256;
+
+class C2SoftDav1dDump {
+  public:
+    void initDumping();
+    void destroyDumping();
+    void dumpInput(uint8_t* ptr, int new_size);
+    template <typename T>
+    void dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                    size_t srcUStride, size_t srcVStride, int width, int height);
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+  private:
+    int mFramesToDump = 0;
+    int mFirstFrameToDump = 0;
+    int mOutputCount = 0;
+
+    char mInDataFileName[kFileNameLength];
+    char mInSizeFileName[kFileNameLength];
+    char mDav1dOutYuvFileName[kFileNameLength];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+};
+}  // namespace android
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 21889b1..44f85fd 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -36,6 +36,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libaidlcommonsupport",
     ],
 
@@ -97,6 +98,7 @@
     ],
 
     static_libs: [
+        "libPlatformProperties",
         "libaidlcommonsupport",
     ],
 
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 2e0859b..392e3c6 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -411,7 +411,7 @@
 
         mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
                 AIBinder_DeathRecipient_new(OnBinderDied));
-        mDeathContext = new DeathContext{weak_from_this()};
+        mDeathContext = new DeathContext{ref<Component>()};
         AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), OnBinderUnlinked);
         AIBinder_linkToDeath(mListener->asBinder().get(), mDeathRecipient.get(), mDeathContext);
     } else {
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 2489683..58407d1 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -216,7 +216,7 @@
 #endif
         onInterfaceLoaded(c2component->intf());
         std::shared_ptr<Component> comp =
-            SharedRefBase::make<Component>(c2component, listener, shared_from_this(), pool);
+            SharedRefBase::make<Component>(c2component, listener, ref<ComponentStore>(), pool);
         *component = comp;
         if (!component) {
             status = C2_CORRUPTED;
diff --git a/media/codec2/hal/aidl/ParamTypes.cpp b/media/codec2/hal/aidl/ParamTypes.cpp
index 7026f4c..41e6f50 100644
--- a/media/codec2/hal/aidl/ParamTypes.cpp
+++ b/media/codec2/hal/aidl/ParamTypes.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "Codec2-AIDL-ParamTypes"
 #include <android-base/logging.h>
 
+#include <android/binder_manager.h>
+#include <android/sysprop/MediaProperties.sysprop.h>
 #include <codec2/aidl/ParamTypes.h>
 #include <codec2/common/ParamTypes.h>
 
@@ -157,8 +159,30 @@
 namespace c2 {
 namespace utils {
 
-// TODO: read it from aconfig flags
-bool IsEnabled() { return false; }
+bool IsSelected() {
+    // TODO: read from aconfig flags
+    const bool enabled = false;
+
+    if (!enabled) {
+        // Cannot select AIDL if not enabled
+        return false;
+    }
+    using ::android::sysprop::MediaProperties::codec2_hal_selection;
+    using ::android::sysprop::MediaProperties::codec2_hal_selection_values;
+    constexpr codec2_hal_selection_values AIDL = codec2_hal_selection_values::AIDL;
+    constexpr codec2_hal_selection_values HIDL = codec2_hal_selection_values::HIDL;
+    codec2_hal_selection_values selection = codec2_hal_selection().value_or(HIDL);
+    switch (selection) {
+    case AIDL:
+        return true;
+    case HIDL:
+        return false;
+    default:
+        LOG(FATAL) << "Unexpected codec2 HAL selection value: " << (int)selection;
+    }
+
+    return false;
+}
 
 const char* asString(Status status, const char* def) {
     return asString(static_cast<c2_status_t>(status.status), def);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index d5ea92b..4a090e9 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -46,8 +46,7 @@
 
 struct ComponentStore;
 
-struct Component : public BnComponent,
-                   public std::enable_shared_from_this<Component> {
+struct Component : public BnComponent {
     Component(
             const std::shared_ptr<C2Component>&,
             const std::shared_ptr<IComponentListener>& listener,
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 7fc5d2f..b3c97d5 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -50,8 +50,7 @@
 
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 
-struct ComponentStore : public BnComponentStore,
-                        public std::enable_shared_from_this<ComponentStore> {
+struct ComponentStore : public BnComponentStore {
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
index 3f82ee3..7c31a06 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
@@ -37,8 +37,8 @@
 namespace c2 {
 namespace utils {
 
-// Returns true iff AIDL c2 HAL is enabled
-bool IsEnabled();
+// Returns true iff AIDL c2 HAL is selected for the system
+bool IsSelected();
 
 // Make asString() and operator<< work with Status as well as c2_status_t.
 C2_DECLARE_AS_STRING_AND_DEFINE_STREAM_OUT(Status);
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index e3f8b1c..ab6505e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1438,35 +1438,35 @@
 std::vector<std::string> Codec2Client::CacheServiceNames() {
     std::vector<std::string> names;
 
-    if (c2_aidl::utils::IsEnabled()) {
+    if (c2_aidl::utils::IsSelected()) {
         // Get AIDL service names
         AServiceManager_forEachDeclaredInstance(
                 AidlBase::descriptor, &names, [](const char *name, void *context) {
                     std::vector<std::string> *names = (std::vector<std::string> *)context;
                     names->emplace_back(name);
                 });
-    }
+    } else {
+        // Get HIDL service names
+        using ::android::hardware::media::c2::V1_0::IComponentStore;
+        using ::android::hidl::manager::V1_2::IServiceManager;
+        while (true) {
+            sp<IServiceManager> serviceManager = IServiceManager::getService();
+            CHECK(serviceManager) << "Hardware service manager is not running.";
 
-    // Get HIDL service names
-    using ::android::hardware::media::c2::V1_0::IComponentStore;
-    using ::android::hidl::manager::V1_2::IServiceManager;
-    while (true) {
-        sp<IServiceManager> serviceManager = IServiceManager::getService();
-        CHECK(serviceManager) << "Hardware service manager is not running.";
-
-        Return<void> transResult;
-        transResult = serviceManager->listManifestByInterface(
-                IComponentStore::descriptor,
-                [&names](
-                        hidl_vec<hidl_string> const& instanceNames) {
-                    names.insert(names.end(), instanceNames.begin(), instanceNames.end());
-                });
-        if (transResult.isOk()) {
-            break;
+            Return<void> transResult;
+            transResult = serviceManager->listManifestByInterface(
+                    IComponentStore::descriptor,
+                    [&names](
+                            hidl_vec<hidl_string> const& instanceNames) {
+                        names.insert(names.end(), instanceNames.begin(), instanceNames.end());
+                    });
+            if (transResult.isOk()) {
+                break;
+            }
+            LOG(ERROR) << "Could not retrieve the list of service instances of "
+                       << IComponentStore::descriptor
+                       << ". Retrying...";
         }
-        LOG(ERROR) << "Could not retrieve the list of service instances of "
-                   << IComponentStore::descriptor
-                   << ". Retrying...";
     }
     // Sort service names in each category.
     std::stable_sort(
@@ -1545,7 +1545,7 @@
     std::string const& name = GetServiceNames()[index];
     LOG(VERBOSE) << "Creating a Codec2 client to service \"" << name << "\"";
 
-    if (c2_aidl::utils::IsEnabled()) {
+    if (c2_aidl::utils::IsSelected()) {
         std::string instanceName =
             ::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
         if (AServiceManager_isDeclared(instanceName.c_str())) {
@@ -1559,20 +1559,23 @@
             CHECK(transStatus.isOk()) << "Codec2 AIDL service \"" << name << "\""
                                         "does not have IConfigurable.";
             return std::make_shared<Codec2Client>(baseStore, configurable, index);
+        } else {
+            LOG(ERROR) << "Codec2 AIDL service \"" << name << "\" is not declared";
         }
+    } else {
+        std::string instanceName = "android.hardware.media.c2/" + name;
+        sp<HidlBase> baseStore = HidlBase::getService(name);
+        CHECK(baseStore) << "Codec2 service \"" << name << "\""
+                            " inaccessible for unknown reasons.";
+        LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
+        Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
+        CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+                                    "does not have IConfigurable.";
+        sp<c2_hidl::IConfigurable> configurable =
+            static_cast<sp<c2_hidl::IConfigurable>>(transResult);
+        return std::make_shared<Codec2Client>(baseStore, configurable, index);
     }
-
-    std::string instanceName = "android.hardware.media.c2/" + name;
-    sp<HidlBase> baseStore = HidlBase::getService(name);
-    CHECK(baseStore) << "Codec2 service \"" << name << "\""
-                        " inaccessible for unknown reasons.";
-    LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
-    Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
-    CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
-                                "does not have IConfigurable.";
-    sp<c2_hidl::IConfigurable> configurable =
-        static_cast<sp<c2_hidl::IConfigurable>>(transResult);
-    return std::make_shared<Codec2Client>(baseStore, configurable, index);
+    return nullptr;
 }
 
 c2_status_t Codec2Client::ForAllServices(
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index b91ac6d..4c385f1 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -378,7 +378,7 @@
         struct timespec ts;
         if (timeoutNs >= 0) {
             ts.tv_sec = int(timeoutNs / 1000000000);
-            ts.tv_nsec = timeoutNs;
+            ts.tv_nsec = timeoutNs % 1000000000;
         } else {
             ALOGD("polling for indefinite duration requested, but changed to wait for %d sec",
                   kPipeFenceWaitLimitSecs);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 21cb39a..b18e64b 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -1708,8 +1708,8 @@
             [&](const auto& pair) {
                 const auto& p = pair.second;
                 LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
-                        !p.sampleRate.has_value() || !p.channelMask.has_value() ||
-                        !p.format.has_value() || !p.flags.has_value(),
+                        (!p.sampleRate.has_value() || !p.channelMask.has_value() ||
+                                !p.format.has_value() || !p.flags.has_value()),
                         "%s: stored mix port config is not fully specified: %s",
                         __func__, p.toString().c_str());
                 return p.ext.getTag() == Tag::mix &&
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index 9f19f7b..fbc7f90 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -185,6 +185,8 @@
      mDownmixConfig.inputCfg.mask = EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS |
              EFFECT_CONFIG_FORMAT | EFFECT_CONFIG_ACC_MODE;
      mDownmixConfig.outputCfg.mask = mDownmixConfig.inputCfg.mask;
+     mDownmixConfig.inputCfg.buffer.frameCount = bufferFrameCount;
+     mDownmixConfig.outputCfg.buffer.frameCount = bufferFrameCount;
 
      mInFrameSize =
              audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(inputChannelMask);
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 1fed9a5..63cb48d 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -112,7 +112,7 @@
         DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
                                           .band = std::numeric_limits<int>::max(),
                                           .enable = true,
-                                          .cutoffFrequencyHz = 20000,
+                                          .cutoffFrequencyHz = 20000.1,
                                           .gainDb = 200});
 
 static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
@@ -144,7 +144,7 @@
                         {.channel = std::numeric_limits<int>::max(),
                          .band = std::numeric_limits<int>::max(),
                          .enable = true,
-                         .cutoffFrequencyHz = 20000,
+                         .cutoffFrequencyHz = 20000.1,
                          .attackTimeMs = 60000,
                          .releaseTimeMs = 60000,
                          .ratio = 50,
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 9d77135..57c873b 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -416,14 +416,25 @@
 template <typename T>
 bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
                                                    int maxBand) {
-    std::vector<float> freqs(bands.size(), -1);
+    std::map<int, float> freqs;
     for (auto band : bands) {
-        if (!validateChannel(band.channel, maxChannel)) return false;
-        if (!validateBand(band.band, maxBand)) return false;
+        if (!validateChannel(band.channel, maxChannel)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxCh " << maxChannel;
+            return false;
+        }
+        if (!validateBand(band.band, maxBand)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxBand " << maxBand;
+            return false;
+        }
+        if (freqs.find(band.band) != freqs.end()) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " found duplicate";
+            return false;
+        }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    if (std::count(freqs.begin(), freqs.end(), -1)) return false;
-    return std::is_sorted(freqs.begin(), freqs.end());
+    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
+        return a.second <= b.second; //index is already sorted as map key
+    });
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e399cbe..a0a2891 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -567,6 +567,7 @@
     mDone = false;
     mThread = 0;
     mDriftTimeUs = 0;
+    mHasDolbyVision = false;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -714,6 +715,7 @@
         // So we let the creation of the new track now and
         // assign FourCC codes later using getDoviFourCC()
         ALOGV("Add source mime '%s'", mime);
+        mHasDolbyVision = true;
     } else if (Track::getFourCCForMime(mime) == NULL) {
         ALOGE("Unsupported mime '%s'", mime);
         return ERROR_UNSUPPORTED;
@@ -1576,6 +1578,13 @@
                 break;
             }
         }
+        // The brand ‘dby1’ should be used in the compatible_brands field to indicate that the file
+        // is compliant with all Dolby Extensions. For details, refer to
+        // https://professional.dolby.com/siteassets/content-creation/dolby-vision-for-content-creators/dolby_vision_bitstreams_within_the_iso_base_media_file_format_dec2017.pdf
+        // Chapter 7, Dolby Vision Files.
+        if (fileType == OUTPUT_FORMAT_MPEG_4 && mHasDolbyVision) {
+            writeFourcc("dby1");
+        }
     }
 
     endBox();
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index fa4808b..870ebdf 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -785,6 +785,7 @@
     kWhatOutputBuffersChanged = 'outC',
     kWhatFirstTunnelFrameReady = 'ftfR',
     kWhatPollForRenderedBuffers = 'plrb',
+    kWhatMetricsUpdated      = 'mtru',
 };
 
 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
@@ -882,6 +883,7 @@
     virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
     virtual void onOutputBuffersChanged() override;
     virtual void onFirstTunnelFrameReady() override;
+    virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -1008,6 +1010,13 @@
     notify->post();
 }
 
+void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatMetricsUpdated);
+    notify->setMessage("updated-metrics", updatedMetrics);
+    notify->post();
+}
+
 static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
     switch (domain) {
         case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
@@ -4381,6 +4390,49 @@
                     break;
                 }
 
+                case kWhatMetricsUpdated:
+                {
+                    sp<AMessage> updatedMetrics;
+                    CHECK(msg->findMessage("updated-metrics", &updatedMetrics));
+
+                    size_t numEntries = updatedMetrics->countEntries();
+                    AMessage::Type type;
+                    for (size_t i = 0; i < numEntries; ++i) {
+                        const char *name = updatedMetrics->getEntryNameAt(i, &type);
+                        AMessage::ItemData itemData = updatedMetrics->getEntryAt(i);
+                        switch (type) {
+                            case AMessage::kTypeInt32: {
+                                int32_t metricValue;
+                                itemData.find(&metricValue);
+                                mediametrics_setInt32(mMetricsHandle, name, metricValue);
+                                break;
+                            }
+                            case AMessage::kTypeInt64: {
+                                int64_t metricValue;
+                                itemData.find(&metricValue);
+                                mediametrics_setInt64(mMetricsHandle, name, metricValue);
+                                break;
+                            }
+                            case AMessage::kTypeDouble: {
+                                double metricValue;
+                                itemData.find(&metricValue);
+                                mediametrics_setDouble(mMetricsHandle, name, metricValue);
+                                break;
+                            }
+                            case AMessage::kTypeString: {
+                                AString metricValue;
+                                itemData.find(&metricValue);
+                                mediametrics_setCString(mMetricsHandle, name, metricValue.c_str());
+                                break;
+                            }
+                            // ToDo: add support for other types
+                            default:
+                                ALOGW("Updated metrics type not supported.");
+                        }
+                    }
+                    break;
+                }
+
                 case kWhatEOS:
                 {
                     // We already notify the client of this by using the
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 916d41e..2a5989f 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -182,6 +182,12 @@
          * Notify MediaCodec that the first tunnel frame is ready.
          */
         virtual void onFirstTunnelFrameReady() = 0;
+        /**
+         * Notify MediaCodec that there are metrics to be updated.
+         *
+         * @param updatedMetrics metrics need to be updated.
+         */
+        virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) = 0;
     };
 
     /**
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index cf76606..054a4b8 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -241,6 +241,8 @@
     std::map<uint32_t, ItemInfo> mItems;
     Vector<ItemProperty> mProperties;
 
+    bool mHasDolbyVision;
+
     // Writer thread handling
     status_t startWriterThread();
     status_t stopWriterThread();
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index ecd937d..a9ca078 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -185,6 +185,8 @@
     status_t parseSampleEncryption(off64_t offset, off64_t size);
     // returns -1 for invalid layer ID
     int32_t parseHEVCLayerId(const uint8_t *data, size_t size);
+    size_t getNALLengthSizeFromAvcCsd(const uint8_t *data, const size_t size) const;
+    size_t getNALLengthSizeFromHevcCsd(const uint8_t *data, const size_t size) const;
 
     struct TrackFragmentHeaderInfo {
         enum Flags {
@@ -5158,24 +5160,13 @@
         size_t size;
         CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_AVC, &data, &size));
 
-        const uint8_t *ptr = (const uint8_t *)data;
-
-        CHECK(size >= 7);
-        CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
-
-        // The number of bytes used to encode the length of a NAL unit.
-        mNALLengthSize = 1 + (ptr[4] & 3);
+        mNALLengthSize = getNALLengthSizeFromAvcCsd((const uint8_t *)data, size);
     } else if (mIsHEVC) {
         void *data;
         size_t size;
         CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_HEVC, &data, &size));
 
-        const uint8_t *ptr = (const uint8_t *)data;
-
-        CHECK(size >= 22);
-        CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
-
-        mNALLengthSize = 1 + (ptr[14 + 7] & 3);
+        mNALLengthSize = getNALLengthSizeFromHevcCsd((const uint8_t *)data, size);
     } else if (mIsDolbyVision) {
         ALOGV("%s DolbyVision stream detected", __FUNCTION__);
         void *data;
@@ -5190,27 +5181,25 @@
         CHECK(!((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)));
 
         const uint8_t profile = ptr[2] >> 1;
-        // profile == (unknown,1,9) --> AVC; profile = (2,3,4,5,6,7,8) --> HEVC;
-        // profile == (10) --> AV1
-        if (profile > 1 && profile < 9) {
+        // profile == (4,5,6,7,8) --> HEVC; profile == (9) --> AVC; profile == (10) --> AV1
+        if (profile > 3 && profile < 9) {
             CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_HEVC, &data, &size));
 
-            const uint8_t *ptr = (const uint8_t *)data;
+            mNALLengthSize = getNALLengthSizeFromHevcCsd((const uint8_t *)data, size);
+        } else if (9 == profile) {
+            CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_AVC, &data, &size));
 
-            CHECK(size >= 22);
-            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
-
-            mNALLengthSize = 1 + (ptr[14 + 7] & 3);
+            mNALLengthSize = getNALLengthSizeFromAvcCsd((const uint8_t *)data, size);
         } else if (10 == profile) {
             /* AV1 profile nothing to do */
         } else {
-            CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_AVC, &data, &size));
-            const uint8_t *ptr = (const uint8_t *)data;
-
-            CHECK(size >= 7);
-            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
-            // The number of bytes used to encode the length of a NAL unit.
-            mNALLengthSize = 1 + (ptr[4] & 3);
+            if (AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_HEVC, &data, &size)) {
+                mNALLengthSize = getNALLengthSizeFromHevcCsd((const uint8_t *)data, size);
+            } else if (AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_AVC, &data, &size)) {
+                mNALLengthSize = getNALLengthSizeFromAvcCsd((const uint8_t *)data, size);
+            } else {
+                LOG_ALWAYS_FATAL("Invalid Dolby Vision profile = %d", profile);
+            }
         }
     }
 
@@ -6135,6 +6124,24 @@
     return 0;
 }
 
+size_t MPEG4Source::getNALLengthSizeFromAvcCsd(const uint8_t *data, const size_t size) const {
+    CHECK(data != nullptr);
+    CHECK(size >= 7);
+    CHECK_EQ((unsigned)data[0], 1u);  // configurationVersion == 1
+
+    // The number of bytes used to encode the length of a NAL unit.
+    return 1 + (data[4] & 3);
+}
+
+size_t MPEG4Source::getNALLengthSizeFromHevcCsd(const uint8_t *data, const size_t size) const {
+    CHECK(data != nullptr);
+    CHECK(size >= 22);
+    CHECK_EQ((unsigned)data[0], 1u);  // configurationVersion == 1
+
+    // The number of bytes used to encode the length of a NAL unit.
+    return 1 + (data[14 + 7] & 3);
+}
+
 media_status_t MPEG4Source::read(
         MediaBufferHelper **out, const ReadOptions *options) {
     Mutex::Autolock autoLock(mLock);
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 2fb5728..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -672,7 +672,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -689,7 +689,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get input buffers");
     return NULL;
@@ -707,7 +707,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -720,7 +720,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get output buffers");
     return NULL;
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 62e5bd4..514601c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -225,8 +225,7 @@
 {
     bool added = false;
     for (const auto& device : devices) {
-        ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
-        if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
+        if (device && indexOf(device) < 0 && SortedVector::add(device) >= 0) {
             added = true;
         }
     }
@@ -238,7 +237,10 @@
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
 {
-    ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
+    if (!item) {
+        ALOGW("DeviceVector::%s() null device", __func__);
+        return -1;
+    }
     ssize_t ret = indexOf(item);
 
     if (ret < 0) {
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index a2bd5e1..73a96e9 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -77,6 +77,7 @@
         "ResourceManagerMetrics.cpp",
         "ResourceManagerService.cpp",
         "ResourceObserverService.cpp",
+        "ResourceManagerServiceUtils.cpp",
         "ServiceLog.cpp",
         "UidObserver.cpp",
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 5d1ba2b..9552e25 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -39,6 +39,7 @@
 #include "IMediaResourceMonitor.h"
 #include "ResourceManagerMetrics.h"
 #include "ResourceManagerService.h"
+#include "ResourceManagerServiceUtils.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
@@ -160,87 +161,6 @@
     service->removeProcessInfoOverride(mClientInfo.pid);
 }
 
-template <typename T>
-static String8 getString(const std::vector<T>& items) {
-    String8 itemsStr;
-    for (size_t i = 0; i < items.size(); ++i) {
-        itemsStr.appendFormat("%s ", toString(items[i]).c_str());
-    }
-    return itemsStr;
-}
-
-static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const MediaResourceParcel& resource) {
-    if (type != resource.type) {
-      return false;
-    }
-    switch (type) {
-        // Codec subtypes (e.g. video vs. audio) are each considered separate resources, so
-        // compare the subtypes as well.
-        case MediaResource::Type::kSecureCodec:
-        case MediaResource::Type::kNonSecureCodec:
-            if (resource.subType == subType) {
-                return true;
-            }
-            break;
-        // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
-        default:
-            return true;
-    }
-    return false;
-}
-
-static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceList& resources) {
-    for (auto it = resources.begin(); it != resources.end(); it++) {
-        if (hasResourceType(type, subType, it->second)) {
-            return true;
-        }
-    }
-    return false;
-}
-
-static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceInfos& infos) {
-    for (const auto& [id, info] : infos) {
-        if (hasResourceType(type, subType, info.resources)) {
-            return true;
-        }
-    }
-    return false;
-}
-
-static ResourceInfos& getResourceInfosForEdit(int pid, PidResourceInfosMap& map) {
-    PidResourceInfosMap::iterator found = map.find(pid);
-    if (found == map.end()) {
-        // new pid
-        ResourceInfos infosForPid;
-        auto [it, inserted] = map.emplace(pid, infosForPid);
-        found = it;
-    }
-
-    return found->second;
-}
-
-static ResourceInfo& getResourceInfoForEdit(uid_t uid, int64_t clientId,
-                                            const std::string& name,
-        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
-    ResourceInfos::iterator found = infos.find(clientId);
-
-    if (found == infos.end()) {
-        ResourceInfo info{.uid = uid,
-                          .clientId = clientId,
-                          .name = name.empty()? "<unknown client>" : name,
-                          .client = client,
-                          .deathNotifier = nullptr,
-                          .pendingRemoval = false};
-        auto [it, inserted] = infos.emplace(clientId, info);
-        found = it;
-    }
-
-    return found->second;
-}
-
 static void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
     static const char* const kServiceName = "media_resource_monitor";
     sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
@@ -488,7 +408,6 @@
     int32_t pid = clientInfo.pid;
     int32_t uid = clientInfo.uid;
     int64_t clientId = clientInfo.id;
-    const std::string& name = clientInfo.name;
     String8 log = String8::format("addResource(pid %d, uid %d clientId %lld, resources %s)",
             pid, uid, (long long) clientId, getString(resources).c_str());
     mServiceLog->add(log);
@@ -503,7 +422,7 @@
         uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
-    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, name, client, infos);
+    ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
     ResourceList resourceAdded;
 
     for (size_t i = 0; i < resources.size(); ++i) {
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
new file mode 100644
index 0000000..892b1b3
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -0,0 +1,98 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerServiceUtils"
+#include <utils/Log.h>
+
+#include "ResourceManagerService.h"
+#include "ResourceManagerServiceUtils.h"
+
+namespace android {
+
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const MediaResourceParcel& resource) {
+    if (type != resource.type) {
+      return false;
+    }
+    switch (type) {
+        // Codec subtypes (e.g. video vs. audio) are each considered separate resources, so
+        // compare the subtypes as well.
+        case MediaResource::Type::kSecureCodec:
+        case MediaResource::Type::kNonSecureCodec:
+            if (resource.subType == subType) {
+                return true;
+            }
+            break;
+        // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
+        default:
+            return true;
+    }
+    return false;
+}
+
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceList& resources) {
+    for (auto it = resources.begin(); it != resources.end(); it++) {
+        if (hasResourceType(type, subType, it->second)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceInfos& infos) {
+    for (const auto& [id, info] : infos) {
+        if (hasResourceType(type, subType, info.resources)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+ResourceInfos& getResourceInfosForEdit(int pid, PidResourceInfosMap& map) {
+    PidResourceInfosMap::iterator found = map.find(pid);
+    if (found == map.end()) {
+        // new pid
+        ResourceInfos infosForPid;
+        auto [it, inserted] = map.emplace(pid, infosForPid);
+        found = it;
+    }
+
+    return found->second;
+}
+
+ResourceInfo& getResourceInfoForEdit(const ClientInfoParcel& clientInfo,
+        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
+    ResourceInfos::iterator found = infos.find(clientInfo.id);
+
+    if (found == infos.end()) {
+        ResourceInfo info{.uid = static_cast<uid_t>(clientInfo.uid),
+                          .clientId = clientInfo.id,
+                          .name = clientInfo.name.empty()? "<unknown client>" : clientInfo.name,
+                          .client = client,
+                          .deathNotifier = nullptr,
+                          .pendingRemoval = false};
+        auto [it, inserted] = infos.emplace(clientInfo.id, info);
+        found = it;
+    }
+
+    return found->second;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
new file mode 100644
index 0000000..bbc26de
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -0,0 +1,62 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
+#define ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
+
+#include <vector>
+#include <utils/String8.h>
+
+namespace android {
+
+// templated function to stringify the given vector of items.
+template <typename T>
+String8 getString(const std::vector<T>& items) {
+    String8 itemsStr;
+    for (size_t i = 0; i < items.size(); ++i) {
+        itemsStr.appendFormat("%s ", toString(items[i]).c_str());
+    }
+    return itemsStr;
+}
+
+// Bunch of utility functions that looks for a specific Resource.
+
+//Check whether a given resource (of type and subtype) is found in given resource parcel.
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const MediaResourceParcel& resource);
+
+//Check whether a given resource (of type and subtype) is found in given resource list.
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceList& resources);
+
+//Check whether a given resource (of type and subtype) is found in given resource info list.
+bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceInfos& infos);
+
+// Return modifiable list of ResourceInfo for a given process (look up by pid)
+// from the map of ResourceInfos.
+ResourceInfos& getResourceInfosForEdit(int pid, PidResourceInfosMap& map);
+
+// Return modifiable ResourceInfo for a given process (look up by pid)
+// from the map of ResourceInfos.
+// If the item is not in the map, create one and add it to the map.
+ResourceInfo& getResourceInfoForEdit(const ClientInfoParcel& clientInfo,
+        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos);
+
+} // namespace android
+
+#endif //ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_