Merge tag 'android-14.0.0_r50' into leaf-3.2

Android 14.0.0 Release 50 (AP2A.240605.024)

* tag 'android-14.0.0_r50': (357 commits)
  MPEG4Writer: Remove timestamp validation
  VTS: check ro.vendor.api_level instead of ro.board.(first_)api_level
  Revert "Introduce AttributionAndPermissionUtils."
  Fix playbackHeadPosition after starting tracks with a startThresholdInFrames
  Add Java library to query state of VirtualCamera build flag.
  Fail if there's problem with tmp buffer during JPEG capture
  Only add the AIDL manifest decl when using unfrozen
  Remove Android.mk files which are not needed any more.
  Extend output format with downscalable resolutions
  AudioStreamOut: Align with AudioStreamIn
  Improve AudioMix registration/unregistration
  android.media.codec-aconfig-cc: Set double_loadable to true
  EffectBufferHalAidl: remove ashmem mapping
  AudioPolicyService: Make device effect init synchronous
  AudioSystem: Move AudioPolicyService fetch to ServiceHandler
  VtsHalMediaC2: start thread pool
  media.c2 client: fix # of queried param count
  Spatializer: replace parameters with AIDL type
  AudioSystem: Move AudioFlinger fetch to ServiceHandler
  audiosystem_tests: Fix false test failures
  ...

Change-Id: I816fbc0bcc8aa82ce5a222b5409bd271c5598fe4
diff --git a/Android.bp b/Android.bp
index 7a2bb9b..72b8721 100644
--- a/Android.bp
+++ b/Android.bp
@@ -105,7 +105,6 @@
 
 aidl_interface {
     name: "av-audio-types-aidl",
-    unstable: true,
     host_supported: true,
     vendor_available: true,
     double_loadable: true,
@@ -125,4 +124,12 @@
             sdk_version: "module_current",
         },
     },
+    versions_with_info: [
+        {
+            version: "1",
+            imports: ["android.hardware.audio.core-V2"],
+        },
+    ],
+    frozen: true,
+
 }
diff --git a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
index b7a7678..48fb291 100644
--- a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
+++ b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -23,6 +23,8 @@
  * is optional. Vendors may provide an implementation on the system_ext
  * partition. The default instance of this interface, if provided, must be
  * registered prior to the moment when the audio server connects to HAL modules.
+ * Vendors need to set the system property `ro.audio.ihaladaptervendorextension_enabled`
+ * to `true` for the framework to bind to this service.
  *
  * {@hide}
  */
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
new file mode 100644
index 0000000..0002682
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/.hash
@@ -0,0 +1 @@
+ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+  enum ParameterScope {
+    MODULE = 0,
+    STREAM = 1,
+  }
+}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+  enum ParameterScope {
+    MODULE = 0,
+    STREAM = 1,
+  }
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..d0f8e7e 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_license"],
 }
 
@@ -46,6 +47,7 @@
 aconfig_declarations {
     name: "camera_platform_flags",
     package: "com.android.internal.camera.flags",
+    container: "system",
     srcs: ["camera_platform.aconfig"],
 }
 
@@ -64,6 +66,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
+
 cc_library {
     name: "libcamera_client",
 
@@ -119,10 +122,14 @@
         "frameworks/native/include/media/openmax",
     ],
     export_include_dirs: [
-         "include",
-         "include/camera"
+        "include",
+        "include/camera",
     ],
-    export_shared_lib_headers: ["libcamera_metadata", "libnativewindow", "libgui"],
+    export_shared_lib_headers: [
+        "libcamera_metadata",
+        "libnativewindow",
+        "libgui",
+    ],
 
     cflags: [
         "-Werror",
@@ -152,7 +159,7 @@
 
     export_include_dirs: [
         "include",
-        "include/camera"
+        "include/camera",
     ],
 }
 
diff --git a/camera/Android.mk b/camera/Android.mk
deleted file mode 100644
index d9068c0..0000000
--- a/camera/Android.mk
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-include $(call all-subdir-makefiles)
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 0eeeb7f..4bea896 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -278,14 +278,28 @@
     CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId);
 
     /**
-      * Check whether a particular session configuration with optional session parameters
-      * has camera device support.
-      *
-      * @param cameraId The camera id to query session configuration on
-      * @param sessionConfiguration Specific session configuration to be verified.
-      * @return true  - in case the stream combination is supported.
-      *         false - in case there is no device support.
-      */
+     * Check whether a particular session configuration with optional session parameters
+     * has camera device support.
+     *
+     * @param cameraId The camera id to query session configuration for
+     * @param sessionConfiguration Specific session configuration to be verified.
+     * @return true  - in case the stream combination is supported.
+     *         false - in case there is no device support.
+     */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
             in SessionConfiguration sessionConfiguration);
+
+    /**
+     * Get the camera characteristics for a particular session configuration for
+     * the given camera device.
+     *
+     * @param cameraId ID of the device for which the session characteristics must be fetched.
+     * @param sessionConfiguration session configuration for which the characteristics
+     * must be fetched.
+     * @return - characteristics associated with the given session.
+     */
+    CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
+                int targetSdkVersion,
+                boolean overrideToPortrait,
+                in SessionConfiguration sessionConfiguration);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 843e0d4..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -99,15 +99,6 @@
       */
     boolean isSessionConfigurationSupported(in SessionConfiguration sessionConfiguration);
 
-    /**
-     * Get the camera characteristics for a particular session configuration
-     *
-     * @param sessionConfiguration Specific session configuration for which the characteristics
-     * are fetched.
-     * @return - characteristics associated with the given session.
-     */
-    CameraMetadataNative getSessionCharacteristics(in SessionConfiguration sessionConfiguration);
-
     void deleteStream(int streamId);
 
     /**
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 5d2a263..1f50570 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,4 +1,5 @@
 package: "com.android.internal.camera.flags"
+container: "system"
 
 flag {
      namespace: "camera_platform"
@@ -23,6 +24,13 @@
 
 flag {
      namespace: "camera_platform"
+     name: "watch_foreground_changes"
+     description: "Request AppOps to notify changes in the foreground status of the client"
+     bug: "290086710"
+}
+
+flag {
+     namespace: "camera_platform"
      name: "log_ultrawide_usage"
      description: "Enable measuring how much usage there is for ultrawide-angle cameras"
      bug: "300515796"
@@ -76,3 +84,52 @@
      description: "Enable creating MultiResolutionImageReader with usage flag configuration"
      bug: "301588215"
 }
+
+flag {
+     namespace: "camera_platform"
+     name: "use_ro_board_api_level_for_vndk_version"
+     description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
+     bug: "312315580"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_extensions_characteristics_get"
+     description: "Enable get extension specific camera characteristics API"
+     bug: "280649914"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "delay_lazy_hal_instantiation"
+     description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+     bug: "319735068"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "return_buffers_outside_locks"
+     description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+     bug: "315526878"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_device_setup"
+     description: "Create an intermediate Camera Device class for limited CameraDevice access."
+     bug: "320741775"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_privacy_allowlist"
+     description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+     bug: "282814430"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "extension_10_bit"
+     description: "Enables 10-bit support in the camera extensions."
+     bug: "316375635"
+}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 13b705c..6862cb1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/include/camera/camera2/SessionConfiguration.h b/camera/include/camera/camera2/SessionConfiguration.h
index 120bf9e..22f8083 100644
--- a/camera/include/camera/camera2/SessionConfiguration.h
+++ b/camera/include/camera/camera2/SessionConfiguration.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HARDWARE_CAMERA2_SESSIONCONFIGURATION_H
 #define ANDROID_HARDWARE_CAMERA2_SESSIONCONFIGURATION_H
 
+#include "OutputConfiguration.h"
+
 #include <binder/Parcelable.h>
 #include <camera/CameraMetadata.h>
 
@@ -26,8 +28,6 @@
 namespace camera2 {
 namespace params {
 
-class OutputConfiguration;
-
 class SessionConfiguration : public android::Parcelable {
 public:
 
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index d4dd546..421469a 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_ndk_license"],
 }
 
@@ -154,8 +155,8 @@
         "libcamera_metadata",
         "libmediandk",
         "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V2-ndk",
+        "android.frameworks.cameraservice.service-V2-ndk",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -188,7 +189,6 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
-        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 4387cc6..92de1e4 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -213,7 +213,7 @@
 EXPORT
 camera_status_t ACameraCaptureSession_prepareWindow(
         ACameraCaptureSession* session,
-        ACameraWindowType *window) {
+        ANativeWindow *window) {
     ATRACE_CALL();
     if (session == nullptr || window == nullptr) {
         ALOGE("%s: Error: session %p / window %p is null", __FUNCTION__, session, window);
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 8211671..f2ec573 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -124,7 +124,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionOutput_create(
-        ACameraWindowType* window, /*out*/ACaptureSessionOutput** out) {
+        ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || out == nullptr) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -137,7 +137,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_create(
-        ACameraWindowType* window, /*out*/ACaptureSessionOutput** out) {
+        ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || out == nullptr) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -150,7 +150,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionPhysicalOutput_create(
-        ACameraWindowType* window, const char* physicalId,
+        ANativeWindow* window, const char* physicalId,
         /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || physicalId == nullptr || out == nullptr) {
@@ -164,7 +164,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *out,
-        ACameraWindowType* window) {
+        ANativeWindow* window) {
     ATRACE_CALL();
     if ((window == nullptr) || (out == nullptr)) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -190,7 +190,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *out,
-        ACameraWindowType* window) {
+        ANativeWindow* window) {
     ATRACE_CALL();
     if ((window == nullptr) || (out == nullptr)) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
diff --git a/camera/ndk/NdkCaptureRequest.cpp b/camera/ndk/NdkCaptureRequest.cpp
index 87de4a9..b851a1d 100644
--- a/camera/ndk/NdkCaptureRequest.cpp
+++ b/camera/ndk/NdkCaptureRequest.cpp
@@ -27,7 +27,7 @@
 
 EXPORT
 camera_status_t ACameraOutputTarget_create(
-        ACameraWindowType* window, ACameraOutputTarget** out) {
+        ANativeWindow* window, ACameraOutputTarget** out) {
     ATRACE_CALL();
     if (window == nullptr) {
         ALOGE("%s: Error: input window is null", __FUNCTION__);
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 73439c7..449c0b4 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -146,7 +146,7 @@
     return ret;
 }
 
-camera_status_t ACameraCaptureSession::prepare(ACameraWindowType* window) {
+camera_status_t ACameraCaptureSession::prepare(ANativeWindow* window) {
 #ifdef __ANDROID_VNDK__
     std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
 #else
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 88135ba..0d7a2c1 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -23,14 +23,14 @@
 
 #ifdef __ANDROID_VNDK__
 #include "ndk_vendor/impl/ACameraDevice.h"
-#include "ndk_vendor/impl/ACameraCaptureSessionVendor.h"
 #else
 #include "ACameraDevice.h"
+#endif
 
 using namespace android;
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(ACameraWindowType* window, bool isShared = false,
+    explicit ACaptureSessionOutput(ANativeWindow* window, bool isShared = false,
             const char* physicalCameraId = "") :
             mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
 
@@ -47,28 +47,27 @@
         return mWindow > other.mWindow;
     }
 
-    inline bool isWindowEqual(ACameraWindowType* window) const {
+    inline bool isWindowEqual(ANativeWindow* window) const {
         return mWindow == window;
     }
 
     // returns true if the window was successfully added, false otherwise.
-    inline bool addSharedWindow(ACameraWindowType* window) {
+    inline bool addSharedWindow(ANativeWindow* window) {
         auto ret = mSharedWindows.insert(window);
         return ret.second;
     }
 
     // returns the number of elements removed.
-    inline size_t removeSharedWindow(ACameraWindowType* window) {
+    inline size_t removeSharedWindow(ANativeWindow* window) {
         return mSharedWindows.erase(window);
     }
 
-    ACameraWindowType* mWindow;
-    std::set<ACameraWindowType *> mSharedWindows;
+    ANativeWindow* mWindow;
+    std::set<ANativeWindow*> mSharedWindows;
     bool           mIsShared;
     int            mRotation = CAMERA3_STREAM_ROTATION_0;
     std::string mPhysicalCameraId;
 };
-#endif
 
 struct ACaptureSessionOutputContainer {
     std::set<ACaptureSessionOutput> mOutputs;
@@ -147,7 +146,7 @@
         mPreparedCb.context = context;
         mPreparedCb.onWindowPrepared = cb;
     }
-    camera_status_t prepare(ACameraWindowType *window);
+    camera_status_t prepare(ANativeWindow *window);
 
     ACameraDevice* getDevice();
 
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 97d65b0..1fa71f4 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -341,7 +341,7 @@
     return ACAMERA_OK;
 }
 
-camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+camera_status_t CameraDevice::prepareLocked(ANativeWindow *window) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         return ret;
@@ -1097,7 +1097,7 @@
                     if (onWindowPrepared == nullptr) {
                         return;
                     }
-                    ACameraWindowType* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
@@ -1823,7 +1823,7 @@
         return ret;
     }
     // We've found the window corresponding to the surface id.
-    ACameraWindowType *window = it->second.first;
+    ANativeWindow *window = it->second.first;
     sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
     msg->setPointer(kContextKey, session->mPreparedCb.context);
     msg->setPointer(kAnwKey, window);
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 4658d18..2b9f327 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -151,7 +151,7 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
-    camera_status_t prepareLocked(ACameraWindowType *window);
+    camera_status_t prepareLocked(ANativeWindow *window);
 
     camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5d3b65b..8c3424f 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -255,6 +255,7 @@
 template<class T>
 void CameraManagerGlobal::registerAvailCallback(const T *callback) {
     Mutex::Autolock _l(mLock);
+    getCameraServiceLocked();
     Callback cb(callback);
     auto pair = mCallbacks.insert(cb);
     // Send initial callbacks if callback is newly registered
diff --git a/camera/ndk/impl/ACaptureRequest.h b/camera/ndk/impl/ACaptureRequest.h
index 2ffcafe..118c2a5 100644
--- a/camera/ndk/impl/ACaptureRequest.h
+++ b/camera/ndk/impl/ACaptureRequest.h
@@ -22,11 +22,8 @@
 
 using namespace android;
 
-#ifdef __ANDROID_VNDK__
-#include "ndk_vendor/impl/ACaptureRequestVendor.h"
-#else
 struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(ACameraWindowType* window) : mWindow(window) {};
+    explicit ACameraOutputTarget(ANativeWindow* window) : mWindow(window) {};
 
     bool operator == (const ACameraOutputTarget& other) const {
         return mWindow == other.mWindow;
@@ -41,9 +38,8 @@
         return mWindow > other.mWindow;
     }
 
-    ACameraWindowType* mWindow;
+    ANativeWindow* mWindow;
 };
-#endif
 
 struct ACameraOutputTargets {
     std::set<ACameraOutputTarget> mOutputs;
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 099c5c5..cf6b970 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -124,7 +124,7 @@
  */
 typedef void (*ACameraCaptureSession_prepareCallback)(
         void *context,
-        ACameraWindowType *window,
+        ANativeWindow *window,
         ACameraCaptureSession *session);
 
 /// Enum for describing error reason in {@link ACameraCaptureFailure}
@@ -276,7 +276,7 @@
  */
 typedef void (*ACameraCaptureSession_captureCallback_bufferLost)(
         void* context, ACameraCaptureSession* session,
-        ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
+        ACaptureRequest* request, ANativeWindow* window, int64_t frameNumber);
 
 /**
  * ACaptureCaptureSession_captureCallbacks structure used in
@@ -1088,7 +1088,7 @@
  * and no pre-allocation is done.</p>
  *
  * @param session the {@link ACameraCaptureSession} that needs to prepare output buffers.
- * @param window the {@link ACameraWindowType} for which the output buffers need to be prepared.
+ * @param window the {@link ANativeWindow} for which the output buffers need to be prepared.
  *
  * @return <ul><li>
  *             {@link ACAMERA_OK} if the method succeeds</li>
@@ -1102,7 +1102,7 @@
  */
 camera_status_t ACameraCaptureSession_prepareWindow(
     ACameraCaptureSession* session,
-    ACameraWindowType *window) __INTRODUCED_IN(34);
+    ANativeWindow *window) __INTRODUCED_IN(34);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index de10eb3..fbd0ee1 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -364,7 +364,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionOutput_create(
-        ACameraWindowType* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(24);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(24);
 
 /**
  * Free a ACaptureSessionOutput object.
@@ -705,7 +705,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionSharedOutput_create(
-        ACameraWindowType* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(28);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(28);
 
 /**
  * Add a native window to shared ACaptureSessionOutput.
@@ -723,7 +723,7 @@
  *             ACaptureSessionOutput.</li></ul>
  */
 camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output,
-        ACameraWindowType *anw) __INTRODUCED_IN(28);
+        ANativeWindow *anw) __INTRODUCED_IN(28);
 
 /**
  * Remove a native window from shared ACaptureSessionOutput.
@@ -739,7 +739,7 @@
  *             ACaptureSessionOutput.</li></ul>
  */
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
-        ACameraWindowType* anw) __INTRODUCED_IN(28);
+        ANativeWindow* anw) __INTRODUCED_IN(28);
 
 /**
  * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
@@ -797,7 +797,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionPhysicalOutput_create(
-        ACameraWindowType* anw, const char* physicalId,
+        ANativeWindow* anw, const char* physicalId,
         /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(29);
 
 /**
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 2c68cef..1ed17a3 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -76,6 +76,7 @@
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
     ACAMERA_JPEGR,
+    ACAMERA_EFV,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -123,6 +124,7 @@
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
     ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
+    ACAMERA_EFV_START              = ACAMERA_EFV               << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -4705,18 +4707,21 @@
      * </ul>
      * <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
      * In this coordinate system,
-     * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.left,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.top} corresponds to the
      * the top left corner of the cropped RAW frame and
-     * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.right,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.bottom} corresponds to
      * the bottom right corner. Client applications must use the values of the keys
      * in the CaptureResult metadata if present.</p>
-     * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+     * <p>Crop regions ACAMERA_SCALER_CROP_REGION, AE/AWB/AF regions and face coordinates still
      * use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
      * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
@@ -11524,6 +11529,7 @@
 
 
 
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index 0838fba..2217528 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -41,14 +41,11 @@
  * camera2 NDK. This enables us to share the api definition headers and avoid
  * code duplication (since the VNDK variant doesn't use ANativeWindow unlike the
  * NDK variant).
+ * @deprecated No longer needed. Both NDK and VNDK use ANativeWindow now.
+ *             Use ANativeWindow directly.
  */
-#ifdef __ANDROID_VNDK__
-#include <cutils/native_handle.h>
-typedef const native_handle_t ACameraWindowType;
-#else
 #include <android/native_window.h>
 typedef ANativeWindow ACameraWindowType;
-#endif
 
 /** @} */
 
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index dc18544..5ccb510 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -99,7 +99,7 @@
  *
  * @see ACaptureRequest_addTarget
  */
-camera_status_t ACameraOutputTarget_create(ACameraWindowType* window,
+camera_status_t ACameraOutputTarget_create(ANativeWindow* window,
         ACameraOutputTarget** output) __INTRODUCED_IN(24);
 
 /**
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
deleted file mode 100644
index 45098c3..0000000
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "utils.h"
-
-#include <android/binder_auto_utils.h>
-#include <string>
-#include <set>
-
-using ::android::acam::utils::native_handle_ptr_wrapper;
-
-struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
-            const char* physicalCameraId = "") :
-            mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
-
-    bool operator == (const ACaptureSessionOutput& other) const {
-        return (mWindow == other.mWindow);
-    }
-
-    bool operator != (const ACaptureSessionOutput& other) const {
-        return mWindow != other.mWindow;
-    }
-
-    bool operator < (const ACaptureSessionOutput& other) const {
-        return mWindow < other.mWindow;
-    }
-
-    bool operator > (const ACaptureSessionOutput& other) const {
-        return mWindow > other.mWindow;
-    }
-
-    inline bool isWindowEqual(ACameraWindowType* window) const {
-        return mWindow == native_handle_ptr_wrapper(window);
-    }
-
-    // returns true if the window was successfully added, false otherwise.
-    inline bool addSharedWindow(ACameraWindowType* window) {
-        auto ret = mSharedWindows.insert(window);
-        return ret.second;
-    }
-
-    // returns the number of elements removed.
-    inline size_t removeSharedWindow(ACameraWindowType* window) {
-        return mSharedWindows.erase(window);
-    }
-
-    native_handle_ptr_wrapper mWindow;
-    std::set<native_handle_ptr_wrapper> mSharedWindows;
-    bool           mIsShared;
-    int            mRotation = CAMERA3_STREAM_ROTATION_0;
-    std::string mPhysicalCameraId;
-};
-
-
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 87102e4..3325da6 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -26,7 +26,7 @@
 #include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
 #include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
 #include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
-#include <aidlcommonsupport/NativeHandle.h>
+#include <android/native_window_aidl.h>
 #include <inttypes.h>
 #include <map>
 #include <utility>
@@ -59,6 +59,7 @@
 using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
 using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
 using ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using ::aidl::android::view::Surface;
 using ::ndk::ScopedAStatus;
 
 // Static member definitions
@@ -231,8 +232,9 @@
         OutputConfiguration& outputStream = sessionConfig.outputStreams[index];
         outputStream.rotation = utils::convertToAidl(output.mRotation);
         outputStream.windowGroupId = -1;
-        outputStream.windowHandles.resize(output.mSharedWindows.size() + 1);
-        outputStream.windowHandles[0] = std::move(dupToAidl(output.mWindow));
+        auto& surfaces = outputStream.surfaces;
+        surfaces.reserve(output.mSharedWindows.size() + 1);
+        surfaces.emplace_back(output.mWindow);
         outputStream.physicalCameraId = output.mPhysicalCameraId;
         index++;
     }
@@ -298,12 +300,12 @@
 
     OutputConfiguration outConfig;
     outConfig.rotation = utils::convertToAidl(output->mRotation);
-    outConfig.windowHandles.resize(output->mSharedWindows.size() + 1);
-    outConfig.windowHandles[0] = std::move(dupToAidl(output->mWindow));
+    auto& surfaces = outConfig.surfaces;
+    surfaces.reserve(output->mSharedWindows.size() + 1);
+    surfaces.emplace_back(output->mWindow);
     outConfig.physicalCameraId = output->mPhysicalCameraId;
-    int i = 1;
     for (auto& anw : output->mSharedWindows) {
-        outConfig.windowHandles[i++] = std::move(dupToAidl(anw));
+        surfaces.emplace_back(anw);
     }
 
     auto remoteRet = mRemote->updateOutputConfiguration(streamId,
@@ -340,7 +342,7 @@
     return ACAMERA_OK;
 }
 
-camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+camera_status_t CameraDevice::prepareLocked(ANativeWindow *window) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         return ret;
@@ -387,18 +389,19 @@
     std::vector<int32_t> requestSurfaceIdxList;
 
     for (auto& outputTarget : request->targets->mOutputs) {
-        native_handle_ptr_wrapper anw = outputTarget.mWindow;
+        ANativeWindow *anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
         for (const auto& kvPair : mConfiguredOutputs) {
             int streamId = kvPair.first;
             const OutputConfiguration& outConfig = kvPair.second.second;
-            const auto& windowHandles = outConfig.windowHandles;
-            for (int surfaceId = 0; surfaceId < (int) windowHandles.size(); surfaceId++) {
+            const auto& surfaces = outConfig.surfaces;
+            for (int surfaceId = 0; surfaceId < (int) surfaces.size(); surfaceId++) {
                 // If two window handles point to the same native window,
                 // they have the same surfaces.
-                if (utils::isWindowNativeHandleEqual(anw, windowHandles[surfaceId])) {
+                auto& surface = surfaces[surfaceId];
+                if (anw == surface.get()) {
                     found = true;
                     requestStreamIdxList.push_back(streamId);
                     requestSurfaceIdxList.push_back(surfaceId);
@@ -410,7 +413,7 @@
             }
         }
         if (!found) {
-            ALOGE("Unconfigured output target %p in capture request!", anw.mWindow);
+            ALOGE("Unconfigured output target %p in capture request!", anw);
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
@@ -470,7 +473,7 @@
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        native_handle_ptr_wrapper anw = req->mSurfaceList[i];
+        ANativeWindow *anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(std::move(outputTarget));
     }
@@ -637,20 +640,21 @@
         return ret;
     }
 
-    std::map<native_handle_ptr_wrapper, OutputConfiguration> handleToConfig;
+    std::map<ANativeWindow *, OutputConfiguration> windowToConfig;
     for (const auto& outConfig : outputs->mOutputs) {
-        native_handle_ptr_wrapper anw = outConfig.mWindow;
+        ANativeWindow *anw = outConfig.mWindow;
         OutputConfiguration outConfigInsert;
         outConfigInsert.rotation = utils::convertToAidl(outConfig.mRotation);
         outConfigInsert.windowGroupId = -1;
-        outConfigInsert.windowHandles.resize(outConfig.mSharedWindows.size() + 1);
-        outConfigInsert.windowHandles[0] = std::move(dupToAidl(anw));
+        auto& surfaces = outConfigInsert.surfaces;
+        surfaces.reserve(outConfig.mSharedWindows.size() + 1);
+        surfaces.emplace_back(anw);
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
-        handleToConfig.insert({anw, std::move(outConfigInsert)});
+        windowToConfig.insert({anw, std::move(outConfigInsert)});
     }
 
-    std::set<native_handle_ptr_wrapper> addSet;
-    for (auto& kvPair : handleToConfig) {
+    std::set<ANativeWindow *> addSet;
+    for (auto& kvPair : windowToConfig) {
         addSet.insert(kvPair.first);
     }
 
@@ -663,8 +667,8 @@
         auto& anw = outputPair.first;
         auto& configuredOutput = outputPair.second;
 
-        auto itr = handleToConfig.find(anw);
-        if (itr != handleToConfig.end() && (itr->second) == configuredOutput) {
+        auto itr = windowToConfig.find(anw);
+        if (itr != windowToConfig.end() && (itr->second) == configuredOutput) {
             deleteList.push_back(streamId);
         } else {
             addSet.erase(anw);
@@ -714,13 +718,13 @@
     // add new streams
     for (const auto &anw : addSet) {
         int32_t streamId;
-        auto itr = handleToConfig.find(anw);
+        auto itr = windowToConfig.find(anw);
         remoteRet = mRemote->createStream(itr->second, &streamId);
         CHECK_TRANSACTION_AND_RET(remoteRet, "createStream()")
         mConfiguredOutputs.insert(std::make_pair(streamId,
                                                  std::make_pair(anw,
                                                                 std::move(itr->second))));
-        handleToConfig.erase(itr);
+        windowToConfig.erase(itr);
     }
 
     AidlCameraMetadata aidlParams;
@@ -867,9 +871,9 @@
         // Get the surfaces corresponding to the error stream id, go through
         // them and try to match the surfaces in the corresponding
         // CaptureRequest.
-        const auto& errorWindowHandles =
-                outputPairIt->second.second.windowHandles;
-        for (const auto& errorWindowHandle : errorWindowHandles) {
+        const auto& errorSurfaces =
+                outputPairIt->second.second.surfaces;
+        for (const auto& errorSurface : errorSurfaces) {
             for (const auto &requestStreamAndWindowId :
                         request->mCaptureRequest.streamAndWindowIds) {
                 // Go through the surfaces in the capture request and see which
@@ -884,12 +888,11 @@
                     return;
                 }
 
-                const auto &requestWindowHandles =
-                        requestSurfacePairIt->second.second.windowHandles;
+                const auto &requestSurfaces = requestSurfacePairIt->second.second.surfaces;
+                auto& requestSurface = requestSurfaces[requestWindowId];
 
-                if (requestWindowHandles[requestWindowId] == errorWindowHandle) {
-                    const native_handle_t* anw = makeFromAidl(
-                            requestWindowHandles[requestWindowId]);
+                if (requestSurface == errorSurface) {
+                    const ANativeWindow *anw = requestSurface.get();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -1085,7 +1088,7 @@
                     if (onWindowPrepared == nullptr) {
                         return;
                     }
-                    native_handle_t* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
@@ -1342,10 +1345,10 @@
                         return;
                     }
 
-                    native_handle_t* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
-                        ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
+                        ALOGE("%s: Cannot find ANativeWindow!", __FUNCTION__);
                         return;
                     }
 
@@ -1359,7 +1362,6 @@
                     ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
                     (*onBufferLost)(context, session.get(), request, anw, frameNumber);
                     freeACaptureRequest(request);
-                    native_handle_delete(anw); // clean up anw as it was copied from AIDL
                     break;
                 }
             }
@@ -1842,7 +1844,7 @@
         return ScopedAStatus::ok();
     }
     // We've found the window corresponding to the surface id.
-    const native_handle_t *anw = it->second.first.mWindow;
+    const ANativeWindow *anw = it->second.first;
     sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
     msg->setPointer(kContextKey, session->mPreparedCb.context);
     msg->setPointer(kAnwKey, (void *)anw);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 6e0c772..b771d47 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -66,7 +66,6 @@
 using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
 using ::android::AidlMessageQueue;
-using ::android::acam::utils::native_handle_ptr_wrapper;
 
 
 using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
@@ -197,7 +196,7 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
-    camera_status_t prepareLocked(ACameraWindowType *window);
+    camera_status_t prepareLocked(ANativeWindow *window);
 
     // Since this writes to ICameraDeviceUser's fmq, clients must take care that:
     //   a) This function is called serially.
@@ -236,7 +235,7 @@
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
-    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfiguration>> mConfiguredOutputs;
+    std::map<int, std::pair<ANativeWindow *, OutputConfiguration>> mConfiguredOutputs;
 
     // TODO: maybe a bool will suffice for synchronous implementation?
     std::atomic_bool mClosing;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 3aa7817..099786b 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -396,6 +396,7 @@
 
 template <class T>
 void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+    getCameraService();
     Mutex::Autolock _l(mLock);
     Callback cb(callback);
     auto res = mCallbacks.insert(cb);
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
deleted file mode 100644
index fcb7e34..0000000
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "utils.h"
-
-using ::android::acam::utils::native_handle_ptr_wrapper;
-struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
-
-    bool operator == (const ACameraOutputTarget& other) const {
-        return mWindow == other.mWindow;
-    }
-    bool operator != (const ACameraOutputTarget& other) const {
-        return mWindow != other.mWindow;
-    }
-    bool operator < (const ACameraOutputTarget& other) const {
-        return mWindow < other.mWindow;
-    }
-    bool operator > (const ACameraOutputTarget& other) const {
-        return mWindow > other.mWindow;
-    }
-
-    native_handle_ptr_wrapper mWindow;
-};
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index 73a527b..3971c73 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -18,7 +18,6 @@
 
 #include "utils.h"
 
-#include <aidlcommonsupport/NativeHandle.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -138,51 +137,6 @@
     return ret;
 }
 
-bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2) {
-    if (nh1->numFds !=0 || nh2->numFds !=0) {
-        ALOGE("Invalid window native handles being compared");
-        return false;
-    }
-    if (nh1->version != nh2->version || nh1->numFds != nh2->numFds ||
-        nh1->numInts != nh2->numInts) {
-        return false;
-    }
-    for (int i = 0; i < nh1->numInts; i++) {
-        if(nh1->data[i] != nh2->data[i]) {
-            return false;
-        }
-    }
-    return true;
-}
-
-bool isWindowNativeHandleEqual(const native_handle_t *nh1,
-                               const aidl::android::hardware::common::NativeHandle& nh2) {
-    native_handle_t* tempNh = makeFromAidl(nh2);
-    bool equal = isWindowNativeHandleEqual(nh1, tempNh);
-    native_handle_delete(tempNh);
-    return equal;
-}
-
-bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2) {
-    if (isWindowNativeHandleEqual(nh1, nh2)) {
-        return false;
-    }
-    if (nh1->numInts != nh2->numInts) {
-        return nh1->numInts < nh2->numInts;
-    }
-
-    for (int i = 0; i < nh1->numInts; i++) {
-        if (nh1->data[i] != nh2->data[i]) {
-            return nh1->data[i] < nh2->data[i];
-        }
-    }
-    return false;
-}
-
-bool isWindowNativeHandleGreaterThan(const native_handle_t *nh1, const native_handle_t *nh2) {
-    return !isWindowNativeHandleLessThan(nh1, nh2) && !isWindowNativeHandleEqual(nh1, nh2);
-}
-
 } // namespace utils
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 7ad74ad..d0dd2fc 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -38,53 +38,14 @@
 using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
 using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
 using ::aidl::android::frameworks::cameraservice::device::TemplateId;
-using ::aidl::android::hardware::common::NativeHandle;
 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
 using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
 using AidlCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
 
-bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2);
-
-bool isWindowNativeHandleEqual(const native_handle_t* nh1, const NativeHandle& nh2);
-
-bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2);
-
-// Convenience wrapper over isWindowNativeHandleLessThan and isWindowNativeHandleEqual
-bool isWindowNativeHandleGreaterThan(const native_handle_t *nh1, const native_handle_t *nh2);
-
-// Utility class so the native_handle_t can be compared with  its contents instead
-// of just raw pointer comparisons.
-struct native_handle_ptr_wrapper {
-    const native_handle_t *mWindow = nullptr;
-
-    native_handle_ptr_wrapper(const native_handle_t *nh) : mWindow(nh) { }
-
-    native_handle_ptr_wrapper() = default;
-
-    operator const native_handle_t *() const { return mWindow; }
-
-    bool operator ==(const native_handle_ptr_wrapper other) const {
-        return isWindowNativeHandleEqual(mWindow, other.mWindow);
-    }
-
-    bool operator != (const native_handle_ptr_wrapper& other) const {
-        return !isWindowNativeHandleEqual(mWindow, other.mWindow);
-    }
-
-    bool operator < (const native_handle_ptr_wrapper& other) const {
-        return isWindowNativeHandleLessThan(mWindow, other.mWindow);
-    }
-
-    bool operator > (const native_handle_ptr_wrapper& other) const {
-        return !isWindowNativeHandleGreaterThan(mWindow, other.mWindow);
-    }
-
-};
-
 // Utility class so that CaptureRequest can be stored by sp<>
 struct CaptureRequest: public RefBase {
   AidlCaptureRequest mCaptureRequest;
-  std::vector<native_handle_ptr_wrapper> mSurfaceList;
+  std::vector<ANativeWindow *> mSurfaceList;
   // Physical camera settings metadata is stored here, as the capture request
   // might not contain it. That's since, fmq might have consumed it.
   std::vector<PhysicalCameraSettings> mPhysicalCameraSettings;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 74c6cad..0259359 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,8 +31,6 @@
 #include <stdio.h>
 
 #include <android/log.h>
-#include <android/hidl/manager/1.2/IServiceManager.h>
-#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
@@ -40,7 +38,6 @@
 #include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
-#include <cutils/native_handle.h>
 #include <VendorTagDescriptor.h>
 
 namespace {
@@ -53,9 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
-using android::hidl::manager::V1_0::IServiceManager;
-using android::hidl::token::V1_0::ITokenManager;
-using ConfiguredWindows = std::set<const native_handle_t *>;
+using ConfiguredWindows = std::set<ANativeWindow*>;
 
 class CameraHelper {
    public:
@@ -65,11 +60,11 @@
 
     struct PhysicalImgReaderInfo {
         const char* physicalCameraId;
-        const native_handle_t* anw;
+        ANativeWindow* anw;
     };
 
     // Retaining the error code in case the caller needs to analyze it.
-    std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
+    std::variant<int, ConfiguredWindows> initCamera(ANativeWindow* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings, bool prepareWindows = false) {
         ConfiguredWindows configuredWindows;
@@ -109,7 +104,7 @@
         }
         configuredWindows.insert(mImgReaderAnw);
         std::vector<const char*> idPointerList;
-        std::set<const native_handle_t*> physicalStreamMap;
+        std::set<ANativeWindow*> physicalStreamMap;
         for (auto& physicalStream : physicalImgReaders) {
             ACaptureSessionOutput* sessionOutput = nullptr;
             ret = ACaptureSessionPhysicalOutput_create(physicalStream.anw,
@@ -301,7 +296,7 @@
 
 
    private:
-    static void onPreparedCb(void* obj, ACameraWindowType *anw, ACameraCaptureSession *session) {
+    static void onPreparedCb(void* obj, ANativeWindow *anw, ACameraCaptureSession *session) {
         CameraHelper* thiz = reinterpret_cast<CameraHelper*>(obj);
         thiz->handlePrepared(anw, session);
     }
@@ -317,7 +312,7 @@
         return ret;
     }
 
-    void handlePrepared(ACameraWindowType *anw, ACameraCaptureSession *session) {
+    void handlePrepared(ANativeWindow *anw, ACameraCaptureSession *session) {
         // Reduce the pending prepared count of anw by 1. If count is  0, remove the key.
         std::lock_guard<std::mutex> lock(mMutex);
         if (session != mSession) {
@@ -334,7 +329,7 @@
             mPendingPreparedCbs.erase(anw);
         }
     }
-    void incPendingPrepared(ACameraWindowType *anw) {
+    void incPendingPrepared(ANativeWindow *anw) {
         std::lock_guard<std::mutex> lock(mMutex);
         if ((mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end())) {
             mPendingPreparedCbs[anw] = 1;
@@ -344,13 +339,13 @@
     }
 
     // ANW -> pending prepared callbacks
-    std::unordered_map<ACameraWindowType *, int> mPendingPreparedCbs;
+    std::unordered_map<ANativeWindow*, int> mPendingPreparedCbs;
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
     ACameraCaptureSession_prepareCallback mPreparedCb = &onPreparedCb;
 
-    const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
+    ANativeWindow* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
     ACameraDevice* mDevice = nullptr;
@@ -484,7 +479,7 @@
     ~ImageReaderTestCase() {
         if (mImgReaderAnw) {
             AImageReader_delete(mImgReader);
-            // No need to call native_handle_t_release on imageReaderAnw
+            // No need to call AImageReader_release(mImgReaderAnw).
         }
     }
 
@@ -514,17 +509,18 @@
             return ret;
         }
 
-        ret = AImageReader_getWindowNativeHandle(mImgReader, &mImgReaderAnw);
+
+        ret = AImageReader_getWindow(mImgReader, &mImgReaderAnw);
         if (ret != AMEDIA_OK || mImgReaderAnw == nullptr) {
-            ALOGE("Failed to get native_handle_t from AImageReader, ret=%d, mImgReaderAnw=%p.", ret,
-                  mImgReaderAnw);
+            ALOGE("Failed to get ANativeWindow* from AImageReader, ret=%d, mImgReader=%p.", ret,
+                  mImgReader);
             return -1;
         }
 
         return 0;
     }
 
-    const native_handle_t* getNativeWindow() { return mImgReaderAnw; }
+    ANativeWindow* getNativeWindow() { return mImgReaderAnw; }
 
     int getAcquiredImageCount() {
         std::lock_guard<std::mutex> lock(mMutex);
@@ -657,7 +653,7 @@
     int mAcquiredImageCount{0};
 
     AImageReader* mImgReader = nullptr;
-    native_handle_t* mImgReaderAnw = nullptr;
+    ANativeWindow* mImgReaderAnw = nullptr;
 
     AImageReader_ImageListener mReaderAvailableCb{this, onImageAvailable};
     AImageReader_BufferRemovedListener mReaderDetachedCb{this, onBufferRemoved};
@@ -985,20 +981,12 @@
 
 
 
-TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
-    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
-    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
-        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
-    }
+TEST_F(AImageReaderVendorTest, CreateANativeWindow) {
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
-    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
-    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
-        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
-    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
new file mode 100644
index 0000000..9aaac6a
--- /dev/null
+++ b/camera/tests/Android.bp
@@ -0,0 +1,53 @@
+// Copyright 2013 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_team: "trendy_team_camera_framework",
+    // See: http://go/android-license-faq
+    default_applicable_licenses: [
+        "frameworks_av_camera_license",
+    ],
+}
+
+cc_test {
+    name: "camera_client_test",
+    srcs: [
+        "VendorTagDescriptorTests.cpp",
+        "CameraBinderTests.cpp",
+        "CameraZSLTests.cpp",
+        "CameraCharacteristicsPermission.cpp",
+    ],
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libcutils",
+        "libcamera_metadata",
+        "libcamera_client",
+        "libgui",
+        "libsync",
+        "libui",
+        "libdl",
+        "libbinder",
+    ],
+    include_dirs: [
+        "system/media/private/camera/include",
+        "system/media/camera/tests",
+        "frameworks/av/services/camera/libcameraservice",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk
deleted file mode 100644
index 7f8078e..0000000
--- a/camera/tests/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_SRC_FILES:= \
-	VendorTagDescriptorTests.cpp \
-	CameraBinderTests.cpp \
-	CameraZSLTests.cpp \
-	CameraCharacteristicsPermission.cpp
-
-LOCAL_SHARED_LIBRARIES := \
-	liblog \
-	libutils \
-	libcutils \
-	libcamera_metadata \
-	libcamera_client \
-	libgui \
-	libsync \
-	libui \
-	libdl \
-	libbinder
-
-LOCAL_C_INCLUDES += \
-	system/media/private/camera/include \
-	system/media/camera/tests \
-	frameworks/av/services/camera/libcameraservice \
-
-LOCAL_CFLAGS += -Wall -Wextra -Werror
-
-LOCAL_MODULE:= camera_client_test
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/../NOTICE
-LOCAL_MODULE_TAGS := tests
-
-include $(BUILD_NATIVE_TEST)
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index b74b7a1..bd97c39 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
index 07efc20..8371905 100644
--- a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -16,14 +16,19 @@
 
 #include <CameraParameters.h>
 #include <CameraParameters2.h>
+#include <camera/StringUtils.h>
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <utils/String16.h>
 #include <camera/StringUtils.h>
 
+#include <functional>
+
 using namespace std;
 using namespace android;
 
+constexpr int8_t kMaxBytes = 20;
+
 string kValidFormats[] = {
         CameraParameters::PIXEL_FORMAT_YUV422SP,      CameraParameters::PIXEL_FORMAT_YUV420SP,
         CameraParameters::PIXEL_FORMAT_YUV422I,       CameraParameters::PIXEL_FORMAT_YUV420P,
@@ -34,26 +39,22 @@
 class CameraParametersFuzzer {
   public:
     void process(const uint8_t* data, size_t size);
-    ~CameraParametersFuzzer() {
-        delete mCameraParameters;
-        delete mCameraParameters2;
-    }
 
   private:
     void invokeCameraParameters();
     template <class type>
-    void initCameraParameters(type** obj);
+    void initCameraParameters(unique_ptr<type>& obj);
     template <class type>
-    void cameraParametersCommon(type* obj);
-    CameraParameters* mCameraParameters = nullptr;
-    CameraParameters2* mCameraParameters2 = nullptr;
+    void callCameraParametersAPIs(unique_ptr<type>& obj);
+    unique_ptr<CameraParameters> mCameraParameters;
+    unique_ptr<CameraParameters2> mCameraParameters2;
     FuzzedDataProvider* mFDP = nullptr;
 };
 
 template <class type>
-void CameraParametersFuzzer::initCameraParameters(type** obj) {
+void CameraParametersFuzzer::initCameraParameters(unique_ptr<type>& obj) {
     if (mFDP->ConsumeBool()) {
-        *obj = new type();
+        obj = make_unique<type>();
     } else {
         string params;
         if (mFDP->ConsumeBool()) {
@@ -61,94 +62,176 @@
             int32_t height = mFDP->ConsumeIntegral<int32_t>();
             int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
             int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
-            params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+            params = mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                         : CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
             params += '=' + to_string(width) + 'x' + to_string(height) + ';';
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_PREVIEW_FPS_RANGE;
                 params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
             }
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
                 params += '=' + to_string(width) + 'x' + to_string(height) + ';';
             }
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
-                params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+                params += '=' +
+                          (mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                               : mFDP->PickValueInArray(kValidFormats)) + ';';
             }
         } else {
-            params = mFDP->ConsumeRandomLengthString();
+            params = mFDP->ConsumeRandomLengthString(kMaxBytes);
         }
-        *obj = new type(toString8(params));
+        obj = make_unique<type>(toString8(params));
     }
 }
 
 template <class type>
-void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
-    Vector<Size> supportedPreviewSizes;
-    obj->getSupportedPreviewSizes(supportedPreviewSizes);
-    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
-    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
-    obj->setPreviewSize(previewWidth, previewHeight);
-    obj->getPreviewSize(&previewWidth, &previewHeight);
-
+void CameraParametersFuzzer::callCameraParametersAPIs(unique_ptr<type>& obj) {
     Vector<Size> supportedVideoSizes;
-    obj->getSupportedVideoSizes(supportedVideoSizes);
-    if (supportedVideoSizes.size() != 0) {
-        int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
-        if (mFDP->ConsumeBool()) {
-            int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
-            obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
-        } else {
-            videoWidth = mFDP->ConsumeIntegral<int32_t>();
-            videoHeight = mFDP->ConsumeIntegral<int32_t>();
-            obj->setVideoSize(videoWidth, videoHeight);
-        }
-        obj->getVideoSize(&videoWidth, &videoHeight);
-        obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
-    }
-
-    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
-    obj->setPreviewFrameRate(fps);
-    obj->getPreviewFrameRate();
-    string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                               : mFDP->ConsumeRandomLengthString();
-    obj->setPreviewFormat(previewFormat.c_str());
-
-    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
-    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
-    Vector<Size> supportedPictureSizes;
-    obj->setPictureSize(pictureWidth, pictureHeight);
-    obj->getPictureSize(&pictureWidth, &pictureHeight);
-    obj->getSupportedPictureSizes(supportedPictureSizes);
-    string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                               : mFDP->ConsumeRandomLengthString();
-    obj->setPictureFormat(pictureFormat.c_str());
-    obj->getPictureFormat();
-
-    if (mFDP->ConsumeBool()) {
-        obj->dump();
-    } else {
-        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-        Vector<String16> args = {};
-        obj->dump(fd, args);
-        close(fd);
+    while (mFDP->remaining_bytes()) {
+        auto callCameraUtilsAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    Vector<Size> supportedPreviewSizes;
+                    obj->getSupportedPreviewSizes(supportedPreviewSizes);
+                },
+                [&]() {
+                    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+                    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPreviewSize(previewWidth, previewHeight);
+                },
+                [&]() {
+                    int32_t previewWidth, previewHeight;
+                    obj->getPreviewSize(&previewWidth, &previewHeight);
+                },
+                [&]() { obj->getSupportedVideoSizes(supportedVideoSizes); },
+                [&]() {
+                    int32_t videoWidth, videoHeight;
+                    if (supportedVideoSizes.size()) {
+                        int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(
+                                0, supportedVideoSizes.size() - 1);
+                        videoWidth = mFDP->ConsumeBool() ? supportedVideoSizes[idx].width
+                                                         : mFDP->ConsumeIntegral<int32_t>();
+                        videoHeight = mFDP->ConsumeBool() ? supportedVideoSizes[idx].height
+                                                          : mFDP->ConsumeIntegral<int32_t>();
+                        obj->setVideoSize(videoWidth, videoHeight);
+                    }
+                },
+                [&]() {
+                    int32_t videoWidth, videoHeight;
+                    obj->getVideoSize(&videoWidth, &videoHeight);
+                },
+                [&]() {
+                    int32_t preferredVideoWidth, preferredVideoHeight;
+                    obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth,
+                                                         &preferredVideoHeight);
+                },
+                [&]() {
+                    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPreviewFrameRate(fps);
+                },
+                [&]() { obj->getPreviewFrameRate(); },
+                [&]() {
+                    string previewFormat = mFDP->ConsumeBool()
+                                                   ? mFDP->PickValueInArray(kValidFormats)
+                                                   : mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->setPreviewFormat(previewFormat.c_str());
+                },
+                [&]() {
+                    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+                    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPictureSize(pictureWidth, pictureHeight);
+                },
+                [&]() {
+                    int32_t pictureWidth, pictureHeight;
+                    obj->getPictureSize(&pictureWidth, &pictureHeight);
+                },
+                [&]() {
+                    Vector<Size> supportedPictureSizes;
+                    obj->getSupportedPictureSizes(supportedPictureSizes);
+                },
+                [&]() {
+                    string pictureFormat = mFDP->ConsumeBool()
+                                                   ? mFDP->PickValueInArray(kValidFormats)
+                                                   : mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->setPictureFormat(pictureFormat.c_str());
+                },
+                [&]() { obj->getPictureFormat(); },
+                [&]() {
+                    if (mFDP->ConsumeBool()) {
+                        obj->dump();
+                    } else {
+                        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                        Vector<String16> args = {};
+                        obj->dump(fd, args);
+                        close(fd);
+                    }
+                },
+                [&]() { obj->flatten(); },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    float value = mFDP->ConsumeFloatingPoint<float>();
+                    obj->setFloat(key.c_str(), value);
+                },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->getFloat(key.c_str());
+                },
+                [&]() { obj->getPreviewFormat(); },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->remove(key.c_str());
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        string format = mFDP->ConsumeBool()
+                                                ? mFDP->ConsumeRandomLengthString(kMaxBytes)
+                                                : mFDP->PickValueInArray(kValidFormats);
+                        mCameraParameters->previewFormatToEnum(format.c_str());
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        mCameraParameters->isEmpty();
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        Vector<int32_t> formats;
+                        mCameraParameters->getSupportedPreviewFormats(formats);
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters2>) {
+                        string key1 = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        string key2 = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t order;
+                        mCameraParameters2->compareSetOrder(key1.c_str(), key2.c_str(), &order);
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters2>) {
+                        int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+                        int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+                        mCameraParameters2->setPreviewFpsRange(minFps, maxFps);
+                    }
+                },
+        });
+        callCameraUtilsAPIs();
     }
 }
 
 void CameraParametersFuzzer::invokeCameraParameters() {
-    initCameraParameters<CameraParameters>(&mCameraParameters);
-    cameraParametersCommon<CameraParameters>(mCameraParameters);
-    initCameraParameters<CameraParameters2>(&mCameraParameters2);
-    cameraParametersCommon<CameraParameters2>(mCameraParameters2);
-
-    int32_t minFPS, maxFPS;
-    mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
-    string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                        : mFDP->ConsumeRandomLengthString();
-    mCameraParameters->previewFormatToEnum(format.c_str());
-    mCameraParameters->isEmpty();
-    Vector<int32_t> formats;
-    mCameraParameters->getSupportedPreviewFormats(formats);
+    if (mFDP->ConsumeBool()) {
+        initCameraParameters<CameraParameters>(mCameraParameters);
+        callCameraParametersAPIs(mCameraParameters);
+    } else {
+        initCameraParameters<CameraParameters2>(mCameraParameters2);
+        callCameraParametersAPIs(mCameraParameters2);
+    }
 }
 
 void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
index 494ec1b..5ad9530 100644
--- a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -44,7 +44,7 @@
     }
 
     for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
-        string id = fdp.ConsumeRandomLengthString();
+        string id = fdp.ConsumeRandomLengthString(kMaxBytes);
         if (fdp.ConsumeBool()) {
             parcelCamCaptureReq.writeString16(toString16(id));
         }
@@ -120,7 +120,11 @@
         }
     }
 
-    invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    if (fdp.ConsumeBool()) {
+        invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    } else {
+        invokeNewReadWriteParcelsp<CaptureRequest>(captureRequest, fdp);
+    }
     invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
     parcelCamCaptureReq.setDataPosition(0);
     captureRequest->readFromParcel(&parcelCamCaptureReq);
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 2fe9a94..7046075 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -26,85 +26,122 @@
 using namespace android;
 using namespace android::hardware::camera2::params;
 
+constexpr int8_t kMaxLoopIterations = 100;
 constexpr int32_t kSizeMin = 0;
 constexpr int32_t kSizeMax = 1000;
 
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+class C2OutputConfigurationFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
 
-    OutputConfiguration* outputConfiguration = nullptr;
+  private:
+    void invokeC2OutputConfigFuzzer();
+    unique_ptr<OutputConfiguration> getC2OutputConfig();
+    sp<IGraphicBufferProducer> createIGraphicBufferProducer();
+    FuzzedDataProvider* mFDP = nullptr;
+};
 
-    if (fdp.ConsumeBool()) {
-        outputConfiguration = new OutputConfiguration();
+sp<IGraphicBufferProducer> C2OutputConfigurationFuzzer::createIGraphicBufferProducer() {
+    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+    sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+            static_cast<String8>(mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()) /* name */,
+            mFDP->ConsumeIntegral<uint32_t>() /* width */,
+            mFDP->ConsumeIntegral<uint32_t>() /* height */,
+            mFDP->ConsumeIntegral<int32_t>() /* format */,
+            mFDP->ConsumeIntegral<int32_t>() /* flags */);
+    if (surfaceControl) {
+        sp<Surface> surface = surfaceControl->getSurface();
+        return surface->getIGraphicBufferProducer();
     } else {
-        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
-        string physicalCameraId = fdp.ConsumeRandomLengthString();
-        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
-        bool isShared = fdp.ConsumeBool();
-
-        if (fdp.ConsumeBool()) {
-            sp<IGraphicBufferProducer> iGBP = nullptr;
-            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
-                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
-                    fdp.ConsumeIntegral<uint32_t>() /* width */,
-                    fdp.ConsumeIntegral<uint32_t>() /* height */,
-                    fdp.ConsumeIntegral<int32_t>() /* format */,
-                    fdp.ConsumeIntegral<int32_t>() /* flags */);
-            if (surfaceControl) {
-                sp<Surface> surface = surfaceControl->getSurface();
-                iGBP = surface->getIGraphicBufferProducer();
-            }
-            outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
-                                                          surfaceSetID, isShared);
-            iGBP.clear();
-            composerClient.clear();
-            surfaceControl.clear();
-        } else {
-            size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
-            vector<sp<IGraphicBufferProducer>> iGBPs;
-            for (size_t idx = 0; idx < iGBPSize; ++idx) {
-                sp<IGraphicBufferProducer> iGBP = nullptr;
-                sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-                sp<SurfaceControl> surfaceControl = composerClient->createSurface(
-                        static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
-                        fdp.ConsumeIntegral<uint32_t>() /* width */,
-                        fdp.ConsumeIntegral<uint32_t>() /* height */,
-                        fdp.ConsumeIntegral<int32_t>() /* format */,
-                        fdp.ConsumeIntegral<int32_t>() /* flags */);
-                if (surfaceControl) {
-                    sp<Surface> surface = surfaceControl->getSurface();
-                    iGBP = surface->getIGraphicBufferProducer();
-                    iGBPs.push_back(iGBP);
-                }
-                iGBP.clear();
-                composerClient.clear();
-                surfaceControl.clear();
-            }
-            outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
-                                                          surfaceSetID, isShared);
-        }
+        sp<IGraphicBufferProducer> gbp;
+        return gbp;
     }
+}
 
-    outputConfiguration->getRotation();
-    outputConfiguration->getSurfaceSetID();
-    outputConfiguration->getSurfaceType();
-    outputConfiguration->getWidth();
-    outputConfiguration->getHeight();
-    outputConfiguration->isDeferred();
-    outputConfiguration->isShared();
-    outputConfiguration->getPhysicalCameraId();
+unique_ptr<OutputConfiguration> C2OutputConfigurationFuzzer::getC2OutputConfig() {
+    unique_ptr<OutputConfiguration> outputConfiguration = nullptr;
+    auto selectOutputConfigurationConstructor =
+            mFDP->PickValueInArray<const std::function<void()>>({
+                    [&]() { outputConfiguration = make_unique<OutputConfiguration>(); },
 
-    OutputConfiguration outputConfiguration2;
-    outputConfiguration->gbpsEqual(outputConfiguration2);
-    outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
-    outputConfiguration->gbpsLessThan(outputConfiguration2);
-    outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
-    outputConfiguration->getGraphicBufferProducers();
-    sp<IGraphicBufferProducer> gbp;
-    outputConfiguration->addGraphicProducer(gbp);
-    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
-    invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
-    delete outputConfiguration;
+                    [&]() {
+                        int32_t rotation = mFDP->ConsumeIntegral<int32_t>();
+                        string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
+                        bool isShared = mFDP->ConsumeBool();
+                        sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
+                        outputConfiguration = make_unique<OutputConfiguration>(
+                                iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+                    },
+
+                    [&]() {
+                        int32_t rotation = mFDP->ConsumeIntegral<int32_t>();
+                        string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
+                        bool isShared = mFDP->ConsumeBool();
+                        size_t iGBPSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+                        vector<sp<IGraphicBufferProducer>> iGBPs;
+                        for (size_t idx = 0; idx < iGBPSize; ++idx) {
+                            sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
+                            iGBPs.push_back(iGBP);
+                        }
+                        outputConfiguration = make_unique<OutputConfiguration>(
+                                iGBPs, rotation, physicalCameraId, surfaceSetID, isShared);
+                    },
+            });
+    selectOutputConfigurationConstructor();
+    return outputConfiguration;
+}
+
+void C2OutputConfigurationFuzzer::invokeC2OutputConfigFuzzer() {
+    unique_ptr<OutputConfiguration> outputConfiguration = getC2OutputConfig();
+    int8_t count = kMaxLoopIterations;
+    while (--count > 0) {
+    unique_ptr<OutputConfiguration> outputConfiguration2 = getC2OutputConfig();
+        auto callC2OutputConfAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { outputConfiguration->getRotation(); },
+                [&]() { outputConfiguration->getSurfaceSetID(); },
+                [&]() { outputConfiguration->getSurfaceType(); },
+                [&]() { outputConfiguration->getWidth(); },
+                [&]() { outputConfiguration->getHeight(); },
+                [&]() { outputConfiguration->isDeferred(); },
+                [&]() { outputConfiguration->isShared(); },
+                [&]() { outputConfiguration->getPhysicalCameraId(); },
+                [&]() { outputConfiguration->gbpsEqual(*outputConfiguration2); },
+                [&]() { outputConfiguration->sensorPixelModesUsedEqual(*outputConfiguration2); },
+                [&]() { outputConfiguration->gbpsLessThan(*outputConfiguration2); },
+                [&]() { outputConfiguration->sensorPixelModesUsedLessThan(*outputConfiguration2); },
+                [&]() { outputConfiguration->getGraphicBufferProducers(); },
+                [&]() {
+                    sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
+                    outputConfiguration->addGraphicProducer(gbp);
+                },
+                [&]() { outputConfiguration->isMultiResolution(); },
+                [&]() { outputConfiguration->getColorSpace(); },
+                [&]() { outputConfiguration->getStreamUseCase(); },
+                [&]() { outputConfiguration->getTimestampBase(); },
+                [&]() { outputConfiguration->getMirrorMode(); },
+                [&]() { outputConfiguration->useReadoutTimestamp(); },
+        });
+        callC2OutputConfAPIs();
+    }
+    // Not keeping invokeReadWrite() APIs in while loop to avoid possible OOM.
+    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration.get());
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcel<OutputConfiguration>(outputConfiguration.get());
+    } else {
+        invokeNewReadWriteParcel<OutputConfiguration>(outputConfiguration.get(), *mFDP);
+    }
+}
+
+void C2OutputConfigurationFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeC2OutputConfigFuzzer();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    C2OutputConfigurationFuzzer c2OutputConfigurationFuzzer;
+    c2OutputConfigurationFuzzer.process(data, size);
     return 0;
 }
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
index dc40b0f..c588f11 100644
--- a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -27,6 +27,10 @@
     SubmitInfo submitInfo;
     submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
     submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
-    invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    if (fdp.ConsumeBool()) {
+        invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    } else {
+        invokeNewReadWriteParcel<SubmitInfo>(&submitInfo, fdp);
+    }
     return 0;
 }
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
index e14d9ce..3131f1d 100644
--- a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -29,6 +29,8 @@
 constexpr int32_t kRangeMin = 0;
 constexpr int32_t kRangeMax = 1000;
 constexpr int32_t kVendorTagDescriptorId = -1;
+constexpr int8_t kMinLoopIterations = 1;
+constexpr int8_t kMaxLoopIterations = 50;
 
 extern "C" {
 
@@ -95,39 +97,63 @@
     initVendorTagDescriptor();
 
     sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
-    vdesc->copyFrom(*mVendorTagDescriptor);
-    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
-    VendorTagDescriptor::getGlobalVendorTagDescriptor();
 
-    int32_t tagCount = mVendorTagDescriptor->getTagCount();
-    if (tagCount > 0) {
-        uint32_t tagArray[tagCount];
-        mVendorTagDescriptor->getTagArray(tagArray);
-        uint32_t tag;
-        for (int32_t i = 0; i < tagCount; ++i) {
-            tag = tagArray[i];
-            get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
-            get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
-            get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
-            mVendorTagDescriptor->getSectionIndex(tag);
-        }
-        mVendorTagDescriptor->getAllSectionNames();
+    int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+    while (--count > 0) {
+        auto callVendorTagDescriptor = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    int32_t tagCount = mVendorTagDescriptor->getTagCount();
+                    if (tagCount > 0) {
+                        uint32_t tagArray[tagCount];
+                        mVendorTagDescriptor->getTagArray(tagArray);
+                        uint32_t tag;
+                        for (int32_t i = 0; i < tagCount; ++i) {
+                            tag = tagArray[i];
+                            get_local_camera_metadata_section_name_vendor_id(
+                                    tag, kVendorTagDescriptorId);
+                            get_local_camera_metadata_tag_name_vendor_id(tag,
+                                                                         kVendorTagDescriptorId);
+                            get_local_camera_metadata_tag_type_vendor_id(tag,
+                                                                         kVendorTagDescriptorId);
+                            mVendorTagDescriptor->getSectionIndex(tag);
+                        }
+                    }
+                },
+                [&]() {
+                    if (mVendorTagDescriptor->getTagCount() > 0) {
+                        mVendorTagDescriptor->getAllSectionNames();
+                    }
+                },
+                [&]() { vdesc->copyFrom(*mVendorTagDescriptor); },
+                [&]() {
+                    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+                },
+                [&]() { VendorTagDescriptor::getGlobalVendorTagDescriptor(); },
+                [&]() {
+                    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+                    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+                    uint32_t lookupTag;
+                    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+                },
+                [&]() {
+                    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+                    int32_t indentation =
+                            mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+                    mVendorTagDescriptor->dump(fd, verbosity, indentation);
+                    close(fd);
+                },
+        });
+        callVendorTagDescriptor();
     }
 
-    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
-    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
-    uint32_t lookupTag;
-    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
-
-    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
-    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
-    mVendorTagDescriptor->dump(fd, verbosity, indentation);
-
-    invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    // Do not keep invokeReadWrite() APIs in while loop to avoid possible OOM.
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    } else {
+        invokeNewReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor, *mFDP);
+    }
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
-    vdesc.clear();
-    close(fd);
 }
 
 void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
@@ -135,36 +161,52 @@
     uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
     initVendorTagDescriptor();
 
-    mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
-    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
-    VendorTagDescriptorCache::getGlobalVendorTagCache();
-    sp<VendorTagDescriptor> tagDesc;
-    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
-
-    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
-    if (tagCount > 0) {
-        uint32_t tagArray[tagCount];
-        mVendorTagDescriptorCache->getTagArray(tagArray, id);
-        uint32_t tag;
-        for (int32_t i = 0; i < tagCount; ++i) {
-            tag = tagArray[i];
-            get_local_camera_metadata_section_name_vendor_id(tag, id);
-            get_local_camera_metadata_tag_name_vendor_id(tag, id);
-            get_local_camera_metadata_tag_type_vendor_id(tag, id);
-        }
+    int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+    while (--count > 0) {
+        auto callVendorTagDescriptorCache = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor); },
+                [&]() {
+                    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+                },
+                [&]() { VendorTagDescriptorCache::getGlobalVendorTagCache(); },
+                [&]() {
+                    sp<VendorTagDescriptor> tagDesc;
+                    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+                },
+                [&]() {
+                    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+                    if (tagCount > 0) {
+                        uint32_t tagArray[tagCount];
+                        mVendorTagDescriptorCache->getTagArray(tagArray, id);
+                        uint32_t tag;
+                        for (int32_t i = 0; i < tagCount; ++i) {
+                            tag = tagArray[i];
+                            get_local_camera_metadata_section_name_vendor_id(tag, id);
+                            get_local_camera_metadata_tag_name_vendor_id(tag, id);
+                            get_local_camera_metadata_tag_type_vendor_id(tag, id);
+                        }
+                    }
+                },
+                [&]() {
+                    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+                    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+                    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+                    close(fd);
+                },
+                [&]() { VendorTagDescriptorCache::isVendorCachePresent(id); },
+                [&]() { mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors(); },
+        });
+        callVendorTagDescriptorCache();
     }
 
-    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
-    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
-    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
-
-    invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
-    VendorTagDescriptorCache::isVendorCachePresent(id);
-    mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+    // Do not keep invokeReadWrite() APIs in while loop to avoid possible OOM.
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+    } else {
+        invokeNewReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache, *mFDP);
+    }
     mVendorTagDescriptorCache->clearGlobalVendorTagCache();
-    tagDesc.clear();
-    close(fd);
 }
 
 void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
@@ -177,26 +219,39 @@
         VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
     } else {
         VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
-        int32_t tagCount = vDesc->getTagCount();
-        uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
-        uint32_t badTagArray[tagCount + 1];
-        vDesc->getTagArray(badTagArray);
-        vDesc->getSectionName(badTag);
-        vDesc->getTagName(badTag);
-        vDesc->getTagType(badTag);
-        VendorTagDescriptor::clearGlobalVendorTagDescriptor();
-        VendorTagDescriptor::getGlobalVendorTagDescriptor();
-        VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+
+        int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+        while (--count > 0) {
+            int32_t tagCount = vDesc->getTagCount();
+            uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+            uint32_t badTagArray[tagCount + 1];
+            auto callVendorTagErrorConditions =
+                    mFDP->PickValueInArray<const std::function<void()>>({
+                            [&]() { vDesc->getTagArray(badTagArray); },
+                            [&]() { vDesc->getSectionName(badTag); },
+                            [&]() { vDesc->getTagName(badTag); },
+                            [&]() { vDesc->getTagType(badTag); },
+                            [&]() { VendorTagDescriptor::clearGlobalVendorTagDescriptor(); },
+                            [&]() { VendorTagDescriptor::getGlobalVendorTagDescriptor(); },
+                            [&]() { VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc); },
+                    });
+            callVendorTagErrorConditions();
+        }
         invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
-        vDesc.clear();
     }
+    vDesc.clear();
 }
 
 void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
     mFDP = new FuzzedDataProvider(data, size);
-    invokeVendorTagDescriptor();
-    invokeVendorTagDescriptorCache();
-    invokeVendorTagErrorConditions();
+    while (mFDP->remaining_bytes()) {
+        auto invokeVendorTagDescriptorFuzzer = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { invokeVendorTagDescriptor(); },
+                [&]() { invokeVendorTagDescriptorCache(); },
+                [&]() { invokeVendorTagErrorConditions(); },
+        });
+        invokeVendorTagDescriptorFuzzer();
+    }
     delete mFDP;
 }
 
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 0ee8779..e26290f 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -204,7 +204,7 @@
             mInterpolatorType = interpolatorType;
             return NO_ERROR;
         default:
-            ALOGE("invalid interpolatorType: %d", interpolatorType);
+            ALOGE("invalid interpolatorType: %d", static_cast<int>(interpolatorType));
             return BAD_VALUE;
         }
     }
diff --git a/media/Android.mk b/media/Android.mk
deleted file mode 100644
index 220a358..0000000
--- a/media/Android.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 96bf4f5..16beb28 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -1,22 +1,52 @@
+// deprecated
 aconfig_declarations {
     name: "aconfig_mediacodec_flags",
     package: "com.android.media.codec.flags",
+    container: "system",
     srcs: ["mediacodec_flags.aconfig"],
 }
 
+// deprecated
 java_aconfig_library {
     name: "aconfig_mediacodec_flags_java_lib",
     aconfig_declarations: "aconfig_mediacodec_flags",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
+// deprecated
 cc_aconfig_library {
     name: "aconfig_mediacodec_flags_c_lib",
     min_sdk_version: "30",
     vendor_available: true,
+    double_loadable: true,
     apex_available: [
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
     aconfig_declarations: "aconfig_mediacodec_flags",
 }
+
+aconfig_declarations {
+    name: "aconfig_codec_fwk_flags",
+    package: "android.media.codec",
+    container: "system",
+    srcs: ["codec_fwk.aconfig"],
+}
+
+java_aconfig_library {
+    name: "android.media.codec-aconfig-java",
+    aconfig_declarations: "aconfig_codec_fwk_flags",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+cc_aconfig_library {
+    name: "android.media.codec-aconfig-cc",
+    min_sdk_version: "30",
+    vendor_available: true,
+    double_loadable: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+    aconfig_declarations: "aconfig_codec_fwk_flags",
+}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
new file mode 100644
index 0000000..3092091
--- /dev/null
+++ b/media/aconfig/codec_fwk.aconfig
@@ -0,0 +1,76 @@
+# Codec framework feature flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+
+package: "android.media.codec"
+container: "system"
+
+flag {
+  name: "aidl_hal_input_surface"
+  namespace: "codec_fwk"
+  description: "Feature flags for enabling AIDL HAL InputSurface handling"
+  bug: "201479783"
+}
+
+flag {
+  name: "dynamic_color_aspects"
+  namespace: "codec_fwk"
+  description: "Feature flag for dynamic color aspect support"
+  bug: "297914560"
+}
+
+flag {
+  name: "hlg_editing"
+  namespace: "codec_fwk"
+  description: "Feature flag for HLG editing support"
+  bug: "316397061"
+}
+
+flag {
+  name: "in_process_sw_audio_codec"
+  namespace: "codec_fwk"
+  description: "Feature flag for in-process software audio codec API"
+  bug: "297922713"
+}
+
+flag {
+  name: "in_process_sw_audio_codec_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for in-process software audio codec support"
+  bug: "325520135"
+}
+
+flag {
+  name: "large_audio_frame_finish"
+  namespace: "codec_fwk"
+  description: "Implementation flag for large audio frame finishing tasks"
+  bug: "325512893"
+}
+
+flag {
+  name: "null_output_surface"
+  namespace: "codec_fwk"
+  description: "Feature flag for null output Surface API"
+  bug: "297920102"
+}
+
+flag {
+  name: "null_output_surface_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for null output Surface support"
+  bug: "325550522"
+}
+
+flag {
+  name: "region_of_interest"
+  namespace: "codec_fwk"
+  description: "Feature flag for region of interest API"
+  bug: "299191092"
+}
+
+flag {
+  name: "region_of_interest_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for region of interest support"
+  bug: "325549730"
+}
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index c82ad4d..4d1e5ca 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -1,4 +1,10 @@
 package: "com.android.media.codec.flags"
+container: "system"
+
+# ******************************************************************
+#            !!! DO NOT ADD FURTHER FLAGS TO THIS FILE !!!
+#            !!!     USE codec_fwk.aconfig INSTEAD     !!!
+# ******************************************************************
 
 flag {
   name: "large_audio_frame"
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 1c1ac0e..6d21e97 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -8,18 +8,21 @@
 aconfig_declarations {
     name: "com.android.media.audioserver-aconfig",
     package: "com.android.media.audioserver",
+    container: "system",
     srcs: ["audioserver.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.audio-aconfig",
     package: "com.android.media.audio",
+    container: "system",
     srcs: ["audio.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.aaudio-aconfig",
     package: "com.android.media.aaudio",
+    container: "system",
     srcs: ["aaudio.aconfig"],
 }
 
@@ -43,6 +46,18 @@
     name: "com.android.media.audio-aconfig-cc",
     aconfig_declarations: "com.android.media.audio-aconfig",
     defaults: ["audio-aconfig-cc-defaults"],
+    double_loadable: true,
+    host_supported: true,
+    product_available: true,
+    vendor_available: true,
+    // TODO(b/316909431) native_bridge_supported: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+        "com.android.btservices",
+    ],
+    min_sdk_version: "29",
 }
 
 cc_aconfig_library {
@@ -56,6 +71,12 @@
     aconfig_declarations: "com.android.media.audio-aconfig",
 }
 
+// For CTS usage
+java_aconfig_library {
+    name: "com.android.media.audioserver-aconfig-java",
+    aconfig_declarations: "com.android.media.audioserver-aconfig",
+}
+
 // Framework available flags to follow
 // Care must be taken to avoid namespace conflicts.
 // These flags are accessible outside of the platform! Limit usage to @FlaggedApi wherever possible
@@ -63,22 +84,25 @@
 aconfig_declarations {
     name: "android.media.audio-aconfig",
     package: "android.media.audio",
+    container: "system",
     srcs: ["audio_framework.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 aconfig_declarations {
     name: "android.media.audiopolicy-aconfig",
     package: "android.media.audiopolicy",
+    container: "system",
     srcs: ["audiopolicy_framework.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 aconfig_declarations {
     name: "android.media.midi-aconfig",
     package: "android.media.midi",
+    container: "system",
     srcs: ["midi_flags.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 java_aconfig_library {
@@ -91,6 +115,11 @@
     name: "android.media.audiopolicy-aconfig-java",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
+    min_sdk_version: "VanillaIceCream",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.car.framework",
+    ],
 }
 
 java_aconfig_library {
@@ -99,11 +128,17 @@
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
-filegroup {
+cc_aconfig_library {
+    name: "android.media.audiopolicy-aconfig-cc",
+    aconfig_declarations: "android.media.audiopolicy-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
+aconfig_declarations_group {
     name: "audio-framework-aconfig",
-    srcs: [
-        ":android.media.audio-aconfig-java{.generated_srcjars}",
-        ":android.media.audiopolicy-aconfig-java{.generated_srcjars}",
-        ":android.media.midi-aconfig-java{.generated_srcjars}",
+    java_aconfig_libraries: [
+        "android.media.audio-aconfig-java",
+        "android.media.audiopolicy-aconfig-java",
+        "android.media.midi-aconfig-java",
     ],
 }
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.aaudio"
+container: "system"
 
 flag {
     name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 73cb8ca..cdbadc2 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audio"
+container: "system"
 
 flag {
     name: "alarm_min_volume_zero"
@@ -12,6 +13,13 @@
 }
 
 flag {
+    name: "as_device_connection_failure"
+    namespace: "media_audio"
+    description: "AudioService handles device connection failures."
+    bug: "326597760"
+}
+
+flag {
     name: "bluetooth_mac_address_anonymization"
     namespace: "media_audio"
     description:
@@ -37,8 +45,30 @@
 }
 
 flag {
+    name: "ringer_mode_affects_alarm"
+    namespace: "media_audio"
+    description:
+        "Support a configuration where ringer mode affects alarm stream"
+    bug: "312456558"
+}
+
+flag {
     name: "spatializer_offload"
     namespace: "media_audio"
     description: "Enable spatializer offload"
     bug: "307842941"
 }
+
+flag {
+    name: "stereo_spatialization"
+    namespace: "media_audio"
+    description: "Enable stereo channel mask for spatialization."
+    bug: "303920722"
+}
+
+flag {
+    name: "volume_refactoring"
+    namespace: "media_audio"
+    description: "Refactor the audio volume internal architecture logic"
+    bug: "324152869"
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 294e67d..cfdf1ab 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -21,6 +21,31 @@
     bug: "302323921"
 }
 
+flag {
+    name: "feature_spatial_audio_headtracking_low_latency"
+    namespace: "media_audio"
+    description: "Define feature for low latency headtracking for SA"
+    bug: "324291076"
+}
+
+flag {
+    name: "focus_exclusive_with_recording"
+    namespace: "media_audio"
+    description:
+        "Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
+        "notifications when the focus owner is also recording"
+    bug: "316414750"
+}
+
+flag {
+    name: "foreground_audio_control"
+    namespace: "media_audio"
+    description:
+        "Audio focus gain requires FGS or delegation to "
+	"take effect"
+    bug: "296232417"
+}
+
 # TODO remove
 flag {
     name: "focus_freeze_test_api"
@@ -42,5 +67,35 @@
 Enable the API for providing loudness metadata and CTA-2075 \
 support."
     bug: "298463873"
+    is_exported: true
 }
 
+flag {
+    name: "mute_background_audio"
+    namespace: "media_audio"
+    description: "mute audio playing in background"
+    bug: "296232417"
+}
+
+flag {
+    name: "sco_managed_by_audio"
+    namespace: "media_audio"
+    description: "\
+Enable new implementation of headset profile device connection and\
+SCO audio activation."
+    bug: "265057196"
+}
+
+flag {
+    name: "supported_device_types_api"
+    namespace: "media_audio"
+    description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
+    bug: "307537538"
+}
+
+flag {
+    name: "volume_ringer_api_hardening"
+    namespace: "media_audio"
+    description: "Limit access to volume and ringer SDK APIs in AudioManager"
+    bug: "296232417"
+}
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 833730a..72a1e6c 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,6 +4,31 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.audiopolicy"
+container: "system"
+
+flag {
+    name: "audio_mix_ownership"
+    namespace: "media_audio"
+    description: "Improves ownership model of AudioMixes and the relationship between AudioPolicy and AudioMix."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_policy_ordering"
+    namespace: "media_audio"
+    description: "Orders AudioMixes per registered AudioPolicy."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_test_api"
+    namespace: "media_audio"
+    description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
 
 flag {
     name: "audio_policy_update_mixing_rules_api"
@@ -11,3 +36,25 @@
     description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
     bug: "293874525"
 }
+
+flag {
+    name: "enable_fade_manager_configuration"
+    namespace: "media_audio"
+    description: "Enable Fade Manager Configuration support to determine fade properties"
+    bug: "307354764"
+}
+
+flag {
+    name: "multi_zone_audio"
+    namespace: "media_audio"
+    description: "Enable multi-zone audio support in audio product strategies."
+    bug: "316643994"
+}
+
+flag {
+    name: "record_audio_device_aware_permission"
+    namespace: "media_audio"
+    description: "Enable device-aware permission handling for RECORD_AUDIO permission"
+    bug: "291737188"
+    is_fixed_read_only: true
+}
\ No newline at end of file
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audioserver"
+container: "system"
 
 flag {
     name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..efb643f 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,6 +4,7 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.midi"
+container: "system"
 
 flag {
     name: "virtual_ump"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 60b814e..77418eb 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -28,7 +28,6 @@
 
 #include "media/AidlConversionCppNdk.h"
 
-#include <media/ShmemCompat.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -905,7 +904,7 @@
         case Tag::voiceMask:
             return convert(aidl, mVoice, __func__, "voice");
     }
-    ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
+    ALOGE("%s: unexpected tag value %d", __func__, static_cast<int>(aidl.getTag()));
     return unexpected(BAD_VALUE);
 }
 
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index d3a5755..07c59c7 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -56,6 +56,19 @@
 }
 
 cc_defaults {
+    name: "audio_aidl_conversion_common_default_cpp",
+    shared_libs: [
+        "libbinder",
+        "libshmemcompat",
+        "shared-file-region-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "shared-file-region-aidl-cpp",
+    ],
+}
+
+cc_defaults {
     name: "audio_aidl_conversion_common_default",
     export_include_dirs: ["include"],
     host_supported: true,
@@ -67,17 +80,12 @@
     ],
     shared_libs: [
         "libbase",
-        "libbinder",
         "liblog",
-        "libshmemcompat",
         "libstagefright_foundation",
         "libutils",
-        "shared-file-region-aidl-cpp",
-        "framework-permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "libbase",
-        "shared-file-region-aidl-cpp",
     ],
     cflags: [
         "-Wall",
@@ -113,6 +121,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "latest_android_media_audio_common_types_cpp_export_shared",
     ],
     min_sdk_version: "29",
@@ -223,6 +232,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "audio_aidl_conversion_common_util_default",
         "latest_android_media_audio_common_types_cpp_shared",
         "latest_android_media_audio_common_types_ndk_shared",
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
index 656d76a..7cba011 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -25,12 +25,12 @@
 #define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP
 #endif  // BACKEND_NDK_IMPL
 
+#include <functional>
 #include <limits>
 #include <type_traits>
 #include <utility>
 
 #include <android-base/expected.h>
-#include <binder/Status.h>
 
 #if defined(BACKEND_NDK_IMPL)
 #include <android/binder_auto_utils.h>
@@ -40,6 +40,7 @@
 namespace aidl {
 #else
 #include <binder/Enums.h>
+#include <binder/Status.h>
 #endif  // BACKEND_NDK_IMPL
 namespace android {
 
@@ -374,6 +375,30 @@
  * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
  * can be found from transactionError() or serviceSpecificErrorCode().
  */
+#if defined(BACKEND_NDK_IMPL)
+static inline ::android::status_t statusTFromExceptionCode(binder_exception_t exception) {
+    switch (exception) {
+        case EX_NONE:
+            return ::android::OK;
+        case EX_SECURITY:  // Java SecurityException, rethrows locally in Java
+            return ::android::PERMISSION_DENIED;
+        case EX_BAD_PARCELABLE:  // Java BadParcelableException, rethrows in Java
+        case EX_ILLEGAL_ARGUMENT:  // Java IllegalArgumentException, rethrows in Java
+        case EX_NULL_POINTER:  // Java NullPointerException, rethrows in Java
+            return ::android::BAD_VALUE;
+        case EX_ILLEGAL_STATE:  // Java IllegalStateException, rethrows in Java
+        case EX_UNSUPPORTED_OPERATION:  // Java UnsupportedOperationException, rethrows
+            return ::android::INVALID_OPERATION;
+        case EX_PARCELABLE:  // Java bootclass loader (not standard exception), rethrows
+        case EX_NETWORK_MAIN_THREAD:  // Java NetworkOnMainThreadException, rethrows
+        case EX_TRANSACTION_FAILED: // Native - see error code
+        case EX_SERVICE_SPECIFIC:   // Java ServiceSpecificException,
+                                            // rethrows in Java with integer error code
+            return ::android::UNKNOWN_ERROR;
+    }
+    return ::android::UNKNOWN_ERROR;
+}
+#else
 static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
     using namespace ::android::binder;
     switch (exceptionCode) {
@@ -398,6 +423,7 @@
     }
     return ::android::UNKNOWN_ERROR;
 }
+#endif  // BACKEND_NDK_IMPL
 
 /**
  * Return the equivalent Android ::android::status_t from a binder status.
@@ -410,6 +436,7 @@
  *
  * return_type method(type0 param0, ...)
  */
+#if !defined(BACKEND_NDK_IMPL)
 static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
     return status.isOk() ? ::android::OK // check ::android::OK,
         : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
@@ -418,6 +445,7 @@
         ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
                                                     // standard Java exception (fromExceptionCode)
 }
+#endif
 
 #if defined(BACKEND_NDK_IMPL)
 static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
@@ -443,6 +471,7 @@
  * This is used for methods not returning an explicit status_t,
  * where Java callers expect an exception, not an integer return value.
  */
+#if !defined(BACKEND_NDK_IMPL)
 static inline ::android::binder::Status binderStatusFromStatusT(
         ::android::status_t status, const char *optionalMessage = nullptr) {
     const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
@@ -470,6 +499,7 @@
     // throw a ServiceSpecificException.
     return Status::fromServiceSpecificError(status, emptyIfNull);
 }
+#endif
 
 } // namespace aidl_utils
 
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
index 60727b4..f78243e 100644
--- a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+#define LOG_TAG "AidlConversionNdkTests"
 #include <iostream>
 #include <type_traits>
 
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 2030dc7..479e13a 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -71,7 +72,6 @@
         "frameworks/av/services/medialog",
         "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 
-
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index c7a1bfd..55847f4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -169,6 +169,11 @@
                 "%s: AudioSystem already has an AudioFlinger instance!", __func__);
         const auto aps = sp<AudioPolicyService>::make();
         ALOGD("%s: AudioPolicy created", __func__);
+        ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+                 "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+        // Start initialization of internally managed audio objects such as Device Effects.
+        aps->onAudioSystemReady();
 
         // Add AudioFlinger and AudioPolicy to ServiceManager.
         sp<IServiceManager> sm = defaultServiceManager();
diff --git a/media/codec2/Android.mk b/media/codec2/Android.mk
deleted file mode 100644
index 82d739f..0000000
--- a/media/codec2/Android.mk
+++ /dev/null
@@ -1,48 +0,0 @@
-# =============================================================================
-# DOCUMENTATION GENERATION
-# =============================================================================
-C2_ROOT := $(call my-dir)
-
-C2_DOCS_ROOT := $(OUT_DIR)/target/common/docs/codec2
-
-C2_OUT_TEMP := $(PRODUCT_OUT)/gen/ETC/Codec2-docs_intermediates
-
-C2_DOXY := $(or $(shell command -v doxygen),\
-		$(shell command -v /Applications/Doxygen.app/Contents/Resources/doxygen))
-
-.PHONY: check-doxygen
-check-doxygen:
-ifndef C2_DOXY
-	$(error 'doxygen is not available')
-endif
-
-$(C2_OUT_TEMP)/doxy-api.config: $(C2_ROOT)/docs/doxygen.config
-	# only document include directory, no internal sections
-	sed 's/\(^INPUT *=.*\)/\1include\//; \
-	s/\(^INTERNAL_DOCS *= *\).*/\1NO/; \
-	s/\(^ENABLED_SECTIONS *=.*\)INTERNAL\(.*\).*/\1\2/; \
-	s:\(^OUTPUT_DIRECTORY *= \)out:\1'$(OUT_DIR)':;' \
-		$(C2_ROOT)/docs/doxygen.config > $@
-
-$(C2_OUT_TEMP)/doxy-internal.config: $(C2_ROOT)/docs/doxygen.config
-	sed 's:\(^OUTPUT_DIRECTORY *= \)out\(.*\)api:\1'$(OUT_DIR)'\2internal:;' \
-		$(C2_ROOT)/docs/doxygen.config > $@
-
-.PHONY: docs-api
-docs-api: $(C2_OUT_TEMP)/doxy-api.config check-doxygen
-	echo API docs are building in $(C2_DOCS_ROOT)/api
-	rm -rf $(C2_DOCS_ROOT)/api
-	mkdir -p $(C2_DOCS_ROOT)/api
-	$(C2_DOXY) $(C2_OUT_TEMP)/doxy-api.config
-
-.PHONY: docs-internal
-docs-internal: $(C2_OUT_TEMP)/doxy-internal.config check-doxygen
-	echo Internal docs are building in $(C2_DOCS_ROOT)/internal
-	rm -rf $(C2_DOCS_ROOT)/internal
-	mkdir -p $(C2_DOCS_ROOT)/internal
-	$(C2_DOXY) $(C2_OUT_TEMP)/doxy-internal.config
-
-.PHONY: docs-all
-docs-all: docs-api docs-internal
-
-include $(call all-makefiles-under,$(call my-dir))
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 8a894f3..b911e11 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -25,5 +25,8 @@
         }
       ]
     }
+  ],
+  "postsubmit": [
+    { "name": "c2aidl_gtracker_test"}
   ]
 }
diff --git a/media/codec2/components/OWNERS b/media/codec2/components/OWNERS
new file mode 100644
index 0000000..453999a
--- /dev/null
+++ b/media/codec2/components/OWNERS
@@ -0,0 +1 @@
+kyslov@google.com
\ No newline at end of file
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index d1b08bd..c770d0c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -35,14 +35,14 @@
 
 #define FILEREAD_MAX_LAYERS 2
 
-#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_CUT   1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY   /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
 #define DRC_DEFAULT_MOBILE_DRC_EFFECT 3  /* MPEG-D DRC effect type; 3 => Limited playback range */
 #define DRC_DEFAULT_MOBILE_DRC_ALBUM  0  /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
 #define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS (0.25) /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
-#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
 #define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
 // names of properties that can be used to override the default DRC settings
 #define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
@@ -145,9 +145,13 @@
                 .withSetter(ProfileLevelSetter)
                 .build());
 
+        C2Config::drc_compression_mode_t defaultCompressionMode =
+                property_get_int32(PROP_DRC_OVERRIDE_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY) == 1
+                        ? C2Config::DRC_COMPRESSION_HEAVY
+                        : C2Config::DRC_COMPRESSION_LIGHT;
         addParameter(
                 DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE)
-                .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY))
+                .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, defaultCompressionMode))
                 .withFields({
                     C2F(mDrcCompressMode, value).oneOf({
                             C2Config::DRC_COMPRESSION_ODM_DEFAULT,
@@ -158,37 +162,48 @@
                 .withSetter(Setter<decltype(*mDrcCompressMode)>::StrictValueWithNoDeps)
                 .build());
 
+
+        float defaultRefLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_REF_LEVEL,
+                                                           DRC_DEFAULT_MOBILE_REF_LEVEL);
         addParameter(
                 DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL)
-                .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL))
+                .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, defaultRefLevel))
                 .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcTargetRefLevel)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultEncLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_ENC_LEVEL,
+                                                           DRC_DEFAULT_MOBILE_ENC_LEVEL);
         addParameter(
                 DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL)
-                .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL))
+                .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, defaultEncLevel))
                 .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcEncTargetLevel)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultDrcBoost =
+                property_get_int32(PROP_DRC_OVERRIDE_BOOST, DRC_DEFAULT_MOBILE_DRC_BOOST) / 127.;
         addParameter(
                 DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR)
-                .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST))
+                .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, defaultDrcBoost))
                 .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)})
                 .withSetter(Setter<decltype(*mDrcBoostFactor)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultDrcCut =
+                property_get_int32(PROP_DRC_OVERRIDE_CUT, DRC_DEFAULT_MOBILE_DRC_CUT) / 127.;
         addParameter(
                 DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR)
-                .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT))
+                .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, defaultDrcCut))
                 .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)})
                 .withSetter(Setter<decltype(*mDrcAttenuationFactor)>::StrictValueWithNoDeps)
                 .build());
 
+        C2Config::drc_effect_type_t defaultDrcEffectType = (C2Config::drc_effect_type_t)
+                property_get_int32(PROP_DRC_OVERRIDE_EFFECT, DRC_DEFAULT_MOBILE_DRC_EFFECT);
         addParameter(
                 DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE)
-                .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE))
+                .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, defaultDrcEffectType))
                 .withFields({
                     C2F(mDrcEffectType, value).oneOf({
                             C2Config::DRC_EFFECT_ODM_DEFAULT,
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 71909e5..7c9d3e8 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -338,6 +338,19 @@
 }
 
 c2_status_t C2SoftAomEnc::onStop() {
+    IntfImpl::Lock lock = mIntf->lock();
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
+    lock.unlock();
+    if (requestSync != mRequestSync) {
+        // we can handle IDR immediately
+        if (requestSync->value) {
+            // unset request
+            C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
+        }
+        mRequestSync = requestSync;
+    }
     onRelease();
     return C2_OK;
 }
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 96a4c4a..3385b95 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -416,6 +416,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+    s_create_ip.u4_keep_threads_active = 1;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index e424860..80a5e67 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -152,6 +152,17 @@
                 .build());
 
         addParameter(
+                DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                .withDefault(new C2StreamBitrateModeTuning::output(0u, C2Config::BITRATE_VARIABLE))
+                .withFields({C2F(mBitrateMode, value).oneOf({
+                                        C2Config::BITRATE_CONST,
+                                        C2Config::BITRATE_VARIABLE,
+                                        C2Config::BITRATE_IGNORE})
+                        })
+                .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
                 .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
@@ -536,6 +547,9 @@
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
     std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
     std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
+    }
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
@@ -552,6 +566,7 @@
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
@@ -1154,6 +1169,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mSize = mIntf->getSize_l();
+        mBitrateMode = mIntf->getBitrateMode_l();
         mBitrate = mIntf->getBitrate_l();
         mFrameRate = mIntf->getFrameRate_l();
         mIntraRefresh = mIntf->getIntraRefresh_l();
@@ -1326,8 +1342,23 @@
         } else {
             ps_init_ip->u4_enable_recon = 0;
         }
+
+        switch (mBitrateMode->value) {
+            case C2Config::BITRATE_IGNORE:
+                ps_init_ip->e_rc_mode = IVE_RC_NONE;
+                break;
+            case C2Config::BITRATE_CONST:
+                ps_init_ip->e_rc_mode = IVE_RC_CBR_NON_LOW_DELAY;
+                break;
+            case C2Config::BITRATE_VARIABLE:
+                ps_init_ip->e_rc_mode = IVE_RC_STORAGE;
+                break;
+            default:
+                ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+                break;
+            break;
+        }
         ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
         ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
         ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
         ps_init_ip->u4_num_bframes = mBframes;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index cde6604..33d166f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -191,6 +191,7 @@
     std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
 
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 3f96cb3..76680a3 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -42,6 +42,8 @@
 
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
 
+constexpr uint32_t kOutputDelay = 4;
+
 class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
   public:
     explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
@@ -239,6 +241,13 @@
                              .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
                              .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
                              .build());
+
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+                .build());
     }
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
@@ -450,13 +459,6 @@
     if (mDav1dCtx) {
         Dav1dPicture p;
 
-        while (mDecodedPictures.size() > 0) {
-            p = mDecodedPictures.front();
-            mDecodedPictures.pop_front();
-
-            dav1d_picture_unref(&p);
-        }
-
         int res = 0;
         while (true) {
             memset(&p, 0, sizeof(p));
@@ -527,6 +529,8 @@
             android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
     if (numThreads > 0) lib_settings.n_threads = numThreads;
 
+    lib_settings.max_frame_delay = kOutputDelay;
+
     int res = 0;
     if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
         ALOGE("dav1d_open failed. status: %d.", res);
@@ -540,15 +544,6 @@
 
 void C2SoftDav1dDec::destroyDecoder() {
     if (mDav1dCtx) {
-        Dav1dPicture p;
-        while (mDecodedPictures.size() > 0) {
-            memset(&p, 0, sizeof(p));
-            p = mDecodedPictures.front();
-            mDecodedPictures.pop_front();
-
-            dav1d_picture_unref(&p);
-        }
-
         dav1d_close(&mDav1dCtx);
         mDav1dCtx = nullptr;
         mOutputBufferIndex = 0;
@@ -572,19 +567,24 @@
 }
 
 void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
-                                const std::shared_ptr<C2GraphicBlock>& block) {
+                                const std::shared_ptr<C2GraphicBlock>& block,
+                                const Dav1dPicture &img) {
     std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
     {
         IntfImpl::Lock lock = mIntf->lock();
         buffer->setInfo(mIntf->getColorAspects_l());
     }
-    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+
+    auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
         uint32_t flags = 0;
         if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
             (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
             flags |= C2FrameData::FLAG_END_OF_STREAM;
             ALOGV("signalling end_of_stream.");
         }
+        getHDRStaticParams(&img, work);
+        getHDR10PlusInfoData(&img, work);
+
         work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
         work->worklets.front()->output.buffers.clear();
         work->worklets.front()->output.buffers.push_back(buffer);
@@ -598,10 +598,6 @@
     }
 }
 
-static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
-    return;
-}
-
 void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
                              const std::shared_ptr<C2BlockPool>& pool) {
     work->result = C2_OK;
@@ -652,19 +648,60 @@
             if (res == 0) {
                 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
                       seq.max_height, (long)in_frameIndex);
+                if (seq.max_width != mWidth || seq.max_height != mHeight) {
+                    drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+                    mWidth = seq.max_width;
+                    mHeight = seq.max_height;
+
+                    C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+                    std::vector<std::unique_ptr<C2SettingResult>> failures;
+                    c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+                    if (err == C2_OK) {
+                        work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+                    } else {
+                        ALOGE("Config update size failed");
+                        mSignalledError = true;
+                        work->result = C2_CORRUPTED;
+                        work->workletsProcessed = 1u;
+                        return;
+                    }
+                }
             }
 
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
             Dav1dData data;
 
-            res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
-            if (res != 0) {
-                ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
                 i_ret = -1;
+
             } else {
                 data.m.timestamp = in_frameIndex;
-                // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
-                //       inSize, frameIndex, data.m.timestamp);
 
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
 
                 // Dump the bitstream data (inputBuffer) if dumping is enabled.
 #ifdef FILE_DUMP_ENABLE
@@ -672,6 +709,7 @@
 #endif
 
                 bool b_draining = false;
+                int res;
 
                 do {
                     res = dav1d_send_data(mDav1dCtx, &data);
@@ -685,39 +723,9 @@
                         break;
                     }
 
-                    bool b_output_error = false;
+                    outputBuffer(pool, work);
 
-                    do {
-                        Dav1dPicture img;
-                        memset(&img, 0, sizeof(img));
-
-                        res = dav1d_get_picture(mDav1dCtx, &img);
-                        if (res == 0) {
-                            mDecodedPictures.push_back(img);
-
-                            if (!end_of_stream) break;
-                        } else if (res == DAV1D_ERR(EAGAIN)) {
-                            /* the decoder needs more data to be able to output something.
-                             * if there is more data pending, continue the loop below or
-                             * otherwise break */
-                            if (data.sz != 0) res = 0;
-                            break;
-                        } else {
-                            ALOGE("warning! Decoder error %d!", res);
-                            b_output_error = true;
-                            break;
-                        }
-                    } while (res == 0);
-
-                    if (b_output_error) break;
-
-                    /* on drain, we must ignore the 1st EAGAIN */
-                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
-                        (end_of_stream)) {
-                        b_draining = true;
-                        res = 0;
-                    }
-                } while (res == 0 && ((data.sz != 0) || b_draining));
+                } while (res == DAV1D_ERR(EAGAIN));
 
                 if (data.sz > 0) {
                     ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
@@ -739,8 +747,6 @@
         }
     }
 
-    (void)outputBuffer(pool, work);
-
     if (end_of_stream) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
         mSignalledOutputEos = true;
@@ -749,7 +755,7 @@
     }
 }
 
-void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
                                         const std::unique_ptr<C2Work>& work) {
     C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
     bool infoPresent = false;
@@ -813,7 +819,7 @@
     }
 }
 
-void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
                                           const std::unique_ptr<C2Work>& work) {
     if (picture != nullptr) {
         if (picture->itut_t35 != nullptr) {
@@ -853,7 +859,7 @@
     }
 }
 
-void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
     VuiColorAspects vuiColorAspects;
 
     if (picture) {
@@ -924,53 +930,16 @@
     memset(&img, 0, sizeof(img));
 
     int res = 0;
-    if (mDecodedPictures.size() > 0) {
-        img = mDecodedPictures.front();
-        mDecodedPictures.pop_front();
-        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
-        // outputBuffer.",img.m.timestamp,img.m.timestamp);
-    } else {
-        res = dav1d_get_picture(mDav1dCtx, &img);
-        if (res == 0) {
-            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
-            // outputBuffer.",img.m.timestamp,img.m.timestamp);
-        } else {
-            ALOGE("failed to get a picture from dav1d for outputBuffer.");
-        }
-    }
-
+    res = dav1d_get_picture(mDav1dCtx, &img);
     if (res == DAV1D_ERR(EAGAIN)) {
-        ALOGD("Not enough data to output a picture.");
+        ALOGV("Not enough data to output a picture.");
         return false;
-    }
-    if (res != 0) {
+    } else if (res != 0) {
         ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
         return false;
     }
 
-    const int width = img.p.w;
-    const int height = img.p.h;
-    if (width != mWidth || height != mHeight) {
-        mWidth = width;
-        mHeight = height;
-
-        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
-        std::vector<std::unique_ptr<C2SettingResult>> failures;
-        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
-        if (err == C2_OK) {
-            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
-        } else {
-            ALOGE("Config update size failed");
-            mSignalledError = true;
-            work->result = C2_CORRUPTED;
-            work->workletsProcessed = 1u;
-            return false;
-        }
-    }
-
     getVuiParams(&img);
-    getHDRStaticParams(&img, work);
-    getHDR10PlusInfoData(&img, work);
 
     // out_frameIndex that the decoded picture returns from dav1d.
     int64_t out_frameIndex = img.m.timestamp;
@@ -1156,9 +1125,8 @@
                              convFormat);
     }
 
+    finishWork(out_frameIndex, work, std::move(block), img);
     dav1d_picture_unref(&img);
-
-    finishWork(out_frameIndex, work, std::move(block));
     block = nullptr;
     return true;
 }
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index e3d2a93..5d2a725 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -58,7 +58,6 @@
     int mOutputBufferIndex = 0;
 
     Dav1dContext* mDav1dCtx = nullptr;
-    std::deque<Dav1dPicture> mDecodedPictures;
 
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
@@ -101,12 +100,13 @@
     nsecs_t mTimeEnd = 0;    // Time at the end of decode()
 
     bool initDecoder();
-    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
-    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
-    void getVuiParams(Dav1dPicture* picture);
+    void getHDRStaticParams(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(const Dav1dPicture* picture);
     void destroyDecoder();
     void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
-                    const std::shared_ptr<C2GraphicBlock>& block);
+                    const std::shared_ptr<C2GraphicBlock>& block,
+                    const Dav1dPicture &img);
     // Sets |work->result| and mSignalledError. Returns false.
     void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
     bool allocTmpFrameBuffer(size_t size);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 15d6dcd..81db2a1 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -407,6 +407,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+    s_create_ip.u4_keep_threads_active = 1;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 439323c..491098d 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -433,6 +433,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
+    s_fill_mem_ip.u4_keep_threads_active = 1;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -474,6 +475,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
+    s_init_ip.u4_keep_threads_active = 1;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index a03d4e2..ea13071 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -65,7 +65,7 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::output(0u, 2))
-                .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+                .withFields({C2F(mChannelCount, value).inRange(1, 12)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index e903069..0384e2e 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -29,6 +29,12 @@
 #define INT32_MAX   2147483647
 #endif
 
+/* Quantization param values defined by the spec */
+#define VPX_QP_MIN 0
+#define VPX_QP_MAX 63
+#define VPX_QP_DEFAULT_MIN VPX_QP_MIN
+#define VPX_QP_DEFAULT_MAX VPX_QP_MAX
+
 namespace android {
 
 C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
@@ -197,6 +203,20 @@
             })
             .withSetter(CodedColorAspectsSetter, mColorAspects)
             .build());
+
+    addParameter(
+            DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+            .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                    0 /* flexCount */, 0u /* stream */))
+            .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                            {C2Config::I_FRAME, C2Config::P_FRAME}),
+                         C2F(mPictureQuantization, m.values[0].min).inRange(
+                            VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
+                         C2F(mPictureQuantization, m.values[0].max).inRange(
+                            VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
+            .withSetter(PictureQuantizationSetter)
+            .build());
+
 }
 
 C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
@@ -330,6 +350,58 @@
     double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
     return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
 }
+
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
+                                                     C2P<C2StreamPictureQuantizationTuning::output>
+                                                     &me) {
+    (void)mayBlock;
+    // these are the ones we're going to set, so want them to default
+    // to the DEFAULT values for the codec
+    int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
+    int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+        // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
+        // cases where layer.min > layer.max
+        int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
+        int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layerMax;
+            iMin = layerMin;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layerMax;
+            pMin = layerMin;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        }
+    }
+    ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+          iMin, iMax, pMin, pMax);
+
+    // vpx library takes same range for I/P picture type
+    int32_t maxFrameQP = std::min({iMax, pMax});
+    int32_t minFrameQP = std::max({iMin, pMin});
+    if (minFrameQP > maxFrameQP) {
+        minFrameQP = maxFrameQP;
+    }
+    // put them back into the structure
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+        else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+    }
+    ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+          minFrameQP, maxFrameQP);
+    return C2R::Ok();
+}
+
 C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
                                                C2P<C2StreamColorAspectsInfo::input>& me) {
     (void)mayBlock;
@@ -453,6 +525,7 @@
         mRequestSync = mIntf->getRequestSync_l();
         mLayering = mIntf->getTemporalLayers_l();
         mTemporalLayers = mLayering->m.layerCount;
+        mQpBounds = mIntf->getPictureQuantization_l();
     }
 
     switch (mBitrateMode->value) {
@@ -466,6 +539,18 @@
             break;
     }
 
+    if (mQpBounds->flexCount() > 0) {
+        // read min max qp for sequence
+        for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                mMaxQuantizer = layer.max;
+                mMinQuantizer = layer.min;
+                break;
+            }
+        }
+    }
+
     setCodecSpecificInterface();
     if (!mCodecInterface) goto CleanUp;
 
@@ -964,7 +1049,7 @@
             memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
             ++mNumInputFrames;
 
-            ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+            ALOGV("bytes generated %zu", encoded_packet->data.frame.sz);
             uint32_t flags = 0;
             if (eos) {
                 flags |= C2FrameData::FLAG_END_OF_STREAM;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index bfb4444..980de04 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -219,6 +219,7 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamTemporalLayeringTuning::output> mLayering;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 
      C2_DO_NOT_COPY(C2SoftVpxEnc);
 };
@@ -250,6 +251,9 @@
 
     static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me);
+
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
     std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
@@ -269,6 +273,9 @@
     std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
         return mCodedColorAspects;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
     uint32_t getSyncFramePeriod() const;
     static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
@@ -287,6 +294,7 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 }  // namespace android
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9a3399d..785cdf2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -160,6 +160,10 @@
     kParamIndexSecureMode,
     kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
 
+    /* multiple access unit support */
+    kParamIndexLargeFrame,
+    kParamIndexAccessUnitInfos, // struct
+
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
 
@@ -1114,6 +1118,36 @@
 constexpr char C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE[] = "input.buffers.max-size";
 constexpr char C2_PARAMKEY_OUTPUT_MAX_BUFFER_SIZE[] = "output.buffers.max-size";
 
+/**
+ * Large frame struct
+ *
+ * This structure describes the size limits for large frames (frames with multiple
+ * access units.)
+ */
+struct C2LargeFrameStruct {
+    uint32_t maxSize;         ///< maximum size of the buffer in bytes
+    uint32_t thresholdSize;   ///< size threshold for the buffer in bytes. The buffer is considered
+                              ///< full as soon as its size reaches or surpasses this limit.
+    C2LargeFrameStruct()
+        : maxSize(0),
+          thresholdSize(0) {}
+
+    C2LargeFrameStruct(uint32_t maxSize_, uint32_t thresholdSize_)
+        : maxSize(maxSize_), thresholdSize(thresholdSize_) {}
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(LargeFrame)
+    C2FIELD(maxSize, "max-size")
+    C2FIELD(thresholdSize, "threshold-size")
+};
+
+/**
+ * This tuning controls the size limits for large output frames for the component.
+ * The default value for this tuning is platform specific.
+ */
+typedef C2StreamParam<C2Tuning, C2LargeFrameStruct, kParamIndexLargeFrame>
+        C2LargeFrame;
+constexpr char C2_PARAMKEY_OUTPUT_LARGE_FRAME[] = "output.large-frame";
+
 /* ---------------------------------------- misc. state ---------------------------------------- */
 
 /**
@@ -2146,6 +2180,49 @@
         C2StreamAudioFrameSizeInfo;
 constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
 
+/**
+ * Information for an access unit in a large frame (containing multiple access units)
+ */
+struct C2AccessUnitInfosStruct {
+
+    inline C2AccessUnitInfosStruct() {
+        memset(this, 0, sizeof(*this));
+    }
+
+    inline C2AccessUnitInfosStruct(
+            uint32_t flags_,
+            uint32_t size_,
+            int64_t timestamp_)
+        : flags(flags_),
+          size(size_),
+          timestamp(timestamp_) { }
+
+    uint32_t flags; ///<flags for the access-unit
+    uint32_t size; ///<size of access-unit
+    int64_t timestamp; ///<timestamp in us for the access-unit
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(AccessUnitInfos)
+    C2FIELD(flags, "flags")
+    C2FIELD(size, "size")
+    C2FIELD(timestamp, "timestamp")
+};
+
+/**
+ * Multiple access unit support (e.g large audio frames)
+ *
+ * If supported by a component, multiple access units may be contained
+ * in a single work item. For now this is only defined for linear buffers.
+ * The metadata indicates the access-unit boundaries in a single buffer.
+ * The boundary of each access-units are marked by its size, immediately
+ * followed by the next access-unit.
+ */
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2AccessUnitInfosStruct>,
+                kParamIndexAccessUnitInfos>
+        C2AccessUnitInfos;
+
+constexpr char C2_PARAMKEY_INPUT_ACCESS_UNIT_INFOS[] = "input.access-unit-infos";
+constexpr char C2_PARAMKEY_OUTPUT_ACCESS_UNIT_INFOS[] = "output.access-unit-infos";
+
 /* --------------------------------------- AAC components --------------------------------------- */
 
 /**
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index e938f96..387d2b8 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -427,7 +427,9 @@
     inline bool operator==(const C2Param &o) const {
         return equals(o) && memcmp(this, &o, _mSize) == 0;
     }
+#if __cplusplus < 202002
     inline bool operator!=(const C2Param &o) const { return !operator==(o); }
+#endif
 
     /// safe(r) type cast from pointer and size
     inline static C2Param* From(void *addr, size_t len) {
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index d578820..1805464 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -212,6 +212,26 @@
     }
 };
 
+/// Define equality (and inequality) operators for params.
+#if __cplusplus < 202002
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+    inline bool operator==(const _Type &o) const { \
+        return this->T::operator==(o); \
+    } \
+    inline bool operator!=(const _Type &o) const { \
+    return !operator==(o); \
+    }
+
+#else
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+    inline bool operator==(const _Type &o) const { \
+        return this->T::operator==(o); \
+    }
+
+#endif
+
 /// Define From() cast operators for params.
 #define DEFINE_CAST_OPERATORS(_Type) \
     inline static _Type* From(C2Param *other) { \
@@ -409,6 +429,7 @@
         template<typename ...Args>
         inline input(const Args(&... args)) : T(sizeof(_Type), input::PARAM_TYPE), S(args...) { }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_CAST_OPERATORS(input)
 
     };
@@ -421,6 +442,7 @@
         template<typename ...Args>
         inline output(const Args(&... args)) : T(sizeof(_Type), output::PARAM_TYPE), S(args...) { }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_CAST_OPERATORS(output)
     };
 };
@@ -479,6 +501,7 @@
     public:
         S m; ///< wrapped flexible structure
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_FLEXIBLE_METHODS(input, S)
         DEFINE_CAST_OPERATORS(input)
     };
@@ -495,6 +518,7 @@
     public:
         S m; ///< wrapped flexible structure
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_FLEXIBLE_METHODS(output, S)
         DEFINE_CAST_OPERATORS(output)
     };
@@ -560,6 +584,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_CAST_OPERATORS(input)
     };
 
@@ -578,6 +603,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_CAST_OPERATORS(output)
     };
 };
@@ -648,6 +674,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_FLEXIBLE_METHODS(input, S)
         DEFINE_CAST_OPERATORS(input)
     };
@@ -670,6 +697,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_FLEXIBLE_METHODS(output, S)
         DEFINE_CAST_OPERATORS(output)
     };
diff --git a/media/codec2/docs/doxygen.config b/media/codec2/docs/doxygen.config
index 5c3bea3..ab8b53b 100644
--- a/media/codec2/docs/doxygen.config
+++ b/media/codec2/docs/doxygen.config
@@ -162,7 +162,7 @@
 # will be relative from the directory where doxygen is started.
 # This tag requires that the tag FULL_PATH_NAMES is set to YES.
 
-STRIP_FROM_PATH        = frameworks/av/media/libstagefright/codec2
+STRIP_FROM_PATH        = frameworks/av/media/codec2
 
 # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
 # path mentioned in the documentation of a class, which tells the reader which
@@ -781,7 +781,7 @@
 # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
 # Note: If this tag is empty the current directory is searched.
 
-INPUT                  = frameworks/av/media/libstagefright/codec2/
+INPUT                  = frameworks/av/media/codec2/
 
 # This tag can be used to specify the character encoding of the source files
 # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
@@ -897,7 +897,7 @@
 # need to set EXTENSION_MAPPING for the extension otherwise the files are not
 # properly processed by doxygen.
 
-INPUT_FILTER           = frameworks/av/media/libstagefright/codec2/docs/doxyfilter.sh
+INPUT_FILTER           = frameworks/av/media/codec2/docs/doxyfilter.sh
 
 # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
 # basis. Doxygen will compare the file name with each pattern and apply the
diff --git a/media/codec2/doxygen.sh b/media/codec2/doxygen.sh
new file mode 100755
index 0000000..ca5aeed
--- /dev/null
+++ b/media/codec2/doxygen.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# =============================================================================
+# DOCUMENTATION GENERATION
+# =============================================================================
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+  echo "error: Android build is not set up. Run this command after lunch." >&2
+  exit 2
+fi
+
+OUT_DIR=$ANDROID_BUILD_TOP/out
+
+# Codec 2.0 source and target paths
+C2_ROOT=$(dirname "$0")
+C2_DOCS_ROOT=$OUT_DIR/target/common/docs/codec2
+C2_OUT_TEMP=$ANDROID_PRODUCT_OUT/gen/ETC/Codec2-docs_intermediates
+
+# Doxygen path
+DOXY=$(which doxygen)
+DOXY_MAC="/Applications/Doxygen.app/Contents/Resources/doxygen"
+if [ -z "$DOXY" -a -x "$DOXY_MAC" ]; then
+  DOXY=$DOXY_MAC
+fi
+
+if [ -z "$DOXY" ]; then
+  echo "error: doxygen is not available" >&2
+  exit 2
+fi
+
+# Create doxygen config
+# ---------------------
+gen_doxy() {
+  local variant=$1
+  local variant_lc=$(echo $variant | tr A-Z a-z)
+  mkdir -p $C2_OUT_TEMP
+  if [ "$variant_lc" == "api" ]; then
+    # only document include directory, no internal sections
+    sed 's/\(^INPUT *=.*\)/\1core\/include\//;
+      s/\(^INTERNAL_DOCS *= *\).*/\1NO/;
+      s/\(^ENABLED_SECTIONS *=.*\)INTERNAL\(.*\).*/\1\2/;
+      s:\(^OUTPUT_DIRECTORY *= \)out\(.*\)api:\1'$OUT_DIR'\2'$variant_lc':;' \
+      $C2_ROOT/docs/doxygen.config > $C2_OUT_TEMP/doxy-$variant_lc.config
+
+    ls -la $C2_OUT_TEMP/doxy-$variant_lc.config
+  else
+    sed 's:\(^OUTPUT_DIRECTORY *= \)out\(.*\)api:\1'$OUT_DIR'\2'$variant_lc':;' \
+      $C2_ROOT/docs/doxygen.config > $C2_OUT_TEMP/doxy-$variant_lc.config
+  fi
+
+  echo $variant docs are building in $C2_DOCS_ROOT/$variant_lc
+  rm -rf $C2_DOCS_ROOT/$variant_lc
+  mkdir -p $C2_DOCS_ROOT/$variant_lc
+  pushd $ANDROID_BUILD_TOP
+  $DOXY $C2_OUT_TEMP/doxy-$variant_lc.config
+  popd
+}
+
+usage() {
+  echo "usage: $(basename "$0") [target]"
+  echo "  where target can be one of:"
+  echo "    all:      build both API and internal docs (default)"
+  echo "    api:      build API docs only"
+  echo "    internal: build internal docs which include implementation details"
+}
+
+TARGET=${1:-all}
+case "$TARGET" in
+  api) gen_doxy API;;
+  internal) gen_doxy Internal;;
+  all) gen_doxy API; gen_doxy Internal;;
+  -h) usage; exit 0;;
+  *) echo "unknown target '$TARGET'" >&2; usage; exit 2;;
+esac
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 7a9af18..48b6e21 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -24,7 +24,7 @@
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbinder_ndk",
         "libbase",
@@ -84,7 +84,7 @@
 
     shared_libs: [
         "android.hardware.common-V2-ndk",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder_ndk",
diff --git a/media/codec2/hal/aidl/BufferTypes.cpp b/media/codec2/hal/aidl/BufferTypes.cpp
index bc4948b..a0e6aa5 100644
--- a/media/codec2/hal/aidl/BufferTypes.cpp
+++ b/media/codec2/hal/aidl/BufferTypes.cpp
@@ -302,7 +302,7 @@
                             "invalid receiver connection id (0).";
             return bufferpool2::ResultStatus::CRITICAL_ERROR;
         } else {
-            if (isNewConnection) {
+            if (foundConnection == mConnections.end()) {
                 foundConnection = mConnections.try_emplace(
                         connectionId, receiverConnectionId, now).first;
             } else {
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 4605af3..eb64a4a 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -46,13 +46,17 @@
 using ::aidl::android::hardware::common::NativeHandle;
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 using ::ndk::ScopedAStatus;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
 
     Listener(const std::shared_ptr<Component>& component) :
-        mComponent(component),
-        mListener(component->mListener) {
+            mComponent(component),
+            mListener(component->mListener) {
+        CHECK(component);
+        CHECK(component->mListener);
     }
 
     virtual void onError_nb(
@@ -137,6 +141,52 @@
     std::weak_ptr<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const std::shared_ptr<Component>& component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::DeathContext
 struct Component::DeathContext {
     std::weak_ptr<Component> mWeakComp;
@@ -149,14 +199,15 @@
         const std::shared_ptr<ComponentStore>& store,
         const std::shared_ptr<IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{SharedRefBase::make<ComponentInterface>(
-                component->intf(), store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager},
         mDeathContext(nullptr) {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = SharedRefBase::make<ComponentInterface>(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -179,8 +230,21 @@
                     registerFrameData(mListener, work->input);
         }
     }
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                return ScopedAStatus::fromServiceSpecificError(err);
+            }
+        }
+        return ScopedAStatus::ok();
+    }
 
-    c2_status_t err = mComponent->queue_nb(&c2works);
+    err = mComponent->queue_nb(&c2works);
     if (err == C2_OK) {
         return ScopedAStatus::ok();
     }
@@ -192,7 +256,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
-
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
         if (work) {
@@ -289,30 +355,22 @@
         const IComponent::BlockPoolAllocator &allocator,
         IComponent::BlockPool *blockPool) {
     std::shared_ptr<C2BlockPool> c2BlockPool;
-    static constexpr IComponent::BlockPoolAllocator::Tag ALLOCATOR_ID =
-        IComponent::BlockPoolAllocator::allocatorId;
-    static constexpr IComponent::BlockPoolAllocator::Tag IGBA =
-        IComponent::BlockPoolAllocator::allocator;
     c2_status_t status = C2_OK;
     ::android::C2PlatformAllocatorDesc allocatorParam;
-    switch (allocator.getTag()) {
-        case ALLOCATOR_ID: {
-            allocatorParam.allocatorId =
-                    allocator.get<IComponent::BlockPoolAllocator::allocatorId>();
-        }
-        break;
-        case IGBA: {
-            allocatorParam.allocatorId = ::android::C2PlatformAllocatorStore::IGBA;
-            allocatorParam.igba =
-                    allocator.get<IComponent::BlockPoolAllocator::allocator>().igba;
+    allocatorParam.allocatorId = allocator.allocatorId;
+    switch (allocator.allocatorId) {
+        case ::android::C2PlatformAllocatorStore::IGBA: {
+            allocatorParam.igba = allocator.gbAllocator->igba;
             allocatorParam.waitableFd.reset(
-                    allocator.get<IComponent::BlockPoolAllocator::allocator>()
-                    .waitableFd.dup().release());
+                    allocator.gbAllocator->waitableFd.dup().release());
         }
         break;
-        default:
-            return ScopedAStatus::fromServiceSpecificError(C2_CORRUPTED);
+        default: {
+            // no-op
+        }
+        break;
     }
+
 #ifdef __ANDROID_APEX__
     status = ::android::CreateCodec2BlockPool(
             allocatorParam,
@@ -370,6 +428,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     if (status == C2_OK) {
         return ScopedAStatus::ok();
@@ -383,6 +444,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     if (status == C2_OK) {
         return ScopedAStatus::ok();
@@ -403,15 +467,38 @@
     return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
 }
 
+ScopedAStatus Component::connectToInputSurface(
+        const std::shared_ptr<IInputSurface>& inputSurface,
+        std::shared_ptr<IInputSurfaceConnection> *connection) {
+    // TODO
+    (void)inputSurface;
+    (void)connection;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
+ScopedAStatus Component::asInputSink(
+        std::shared_ptr<IInputSink> *sink) {
+    // TODO
+    (void)sink;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
 void Component::initListener(const std::shared_ptr<Component>& self) {
     if (__builtin_available(android __ANDROID_API_T__, *)) {
-        std::shared_ptr<C2Component::Listener> c2listener =
+        std::shared_ptr<C2Component::Listener> c2listener;
+        if (mMultiAccessUnitIntf) {
+            mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+        }
+        c2listener = mMultiAccessUnitHelper ?
+                std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
                 std::make_shared<Listener>(self);
         c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
         if (res != C2_OK) {
             mInit = res;
         }
 
+        // b/321902635, mListener should not be null.
+        CHECK(mListener);
         mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
                 AIBinder_DeathRecipient_new(OnBinderDied));
         mDeathContext = new DeathContext{ref<Component>()};
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 2d812c9..8ae9fa8 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -24,6 +24,8 @@
 
 #include <utils/Timers.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -43,9 +45,10 @@
 
 // Implementation of ConfigurableC2Intf based on C2ComponentInterface
 struct CompIntf : public ConfigurableC2Intf {
-    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
         ConfigurableC2Intf{intf->getName(), intf->getId()},
-        mIntf{intf} {
+        mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
     }
 
     virtual c2_status_t config(
@@ -53,7 +56,34 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures
             ) override {
-        return mIntf->config_vb(params, mayBlock, failures);
+        std::vector<C2Param*> paramsToIntf;
+        std::vector<C2Param*> paramsToLargeFrameIntf;
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->config_vb(params, mayBlock, failures);
+            return err;
+        }
+        for (auto &p : params) {
+            if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+                paramsToLargeFrameIntf.push_back(p);
+            } else {
+                paramsToIntf.push_back(p);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+        }
+        if (err1 != C2_OK) {
+            LOG(ERROR) << "We have a failed config";
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->config(
+                    paramsToLargeFrameIntf, mayBlock, failures);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t query(
@@ -61,23 +91,82 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2Param>>* const params
             ) const override {
-        return mIntf->query_vb({}, indices, mayBlock, params);
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->query_vb({}, indices, mayBlock, params);
+            return err;
+        }
+        std::vector<C2Param::Index> paramsToIntf;
+        std::vector<C2Param::Index> paramsToLargeFrameIntf;
+        for (auto &i : indices) {
+            if (mMultiAccessUnitIntf->isParamSupported(i)) {
+                paramsToLargeFrameIntf.push_back(i);
+            } else {
+                paramsToIntf.push_back(i);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->query(
+                    {}, paramsToLargeFrameIntf, mayBlock, params);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t querySupportedParams(
             std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
             ) const override {
-        return mIntf->querySupportedParams_nb(params);
+        c2_status_t err = mIntf->querySupportedParams_nb(params);
+        if (mMultiAccessUnitIntf != nullptr) {
+            err =  mMultiAccessUnitIntf->querySupportedParams(params);
+        }
+        return err;
     }
 
     virtual c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const override {
-        return mIntf->querySupportedValues_vb(fields, mayBlock);
+        if (mMultiAccessUnitIntf == nullptr) {
+           return  mIntf->querySupportedValues_vb(fields, mayBlock);
+        }
+        std::vector<C2FieldSupportedValuesQuery> dup = fields;
+        std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+        std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+        c2_status_t err = C2_OK;
+        for (int i = 0 ; i < fields.size(); i++) {
+            const C2ParamField &field = fields[i].field();
+            uint32_t queryArrayIdx = 1;
+            if (mMultiAccessUnitIntf->isValidField(fields[i].field())) {
+                queryArrayIdx = 0;
+            }
+            queryMap[field] = std::make_pair(
+                    queryArrayIdx, queryArray[queryArrayIdx].size());
+            queryArray[queryArrayIdx].push_back(fields[i]);
+        }
+        if (queryArray[0].size() > 0) {
+            err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+        }
+        if (queryArray[1].size() > 0) {
+             err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+        }
+        for (int i = 0 ; i < dup.size(); i++) {
+            auto it = queryMap.find(dup[i].field());
+            if (it != queryMap.end()) {
+                std::pair<uint32_t, size_t> queryid = it->second;
+                fields[i] = queryArray[queryid.first][queryid.second];
+            }
+        }
+        return err;
     }
 
 protected:
     std::shared_ptr<C2ComponentInterface> mIntf;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
 };
 
 } // unnamed namespace
@@ -85,10 +174,16 @@
 // ComponentInterface
 ComponentInterface::ComponentInterface(
         const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
         const std::shared_ptr<ParameterCache>& cache)
       : mInterface{intf},
         mConfigurable{SharedRefBase::make<CachedConfigurable>(
-                std::make_unique<CompIntf>(intf))} {
+                std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
     mInit = mConfigurable->init(cache);
 }
 
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 58407d1..b95c09e 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -199,6 +199,36 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::aidl::android::hardware::media::c2::IComponentStore
 ScopedAStatus ComponentStore::createComponent(
         const std::string& name,
@@ -206,6 +236,15 @@
         const std::shared_ptr<IClientManager>& pool,
         std::shared_ptr<IComponent> *component) {
 
+    if (!listener) {
+        ALOGE("createComponent(): listener is null");
+        return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+    }
+    if (!pool) {
+        ALOGE("createComponent(): pool is null");
+        return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+    }
+
     std::shared_ptr<C2Component> c2component;
     c2_status_t status =
             mStore->createComponent(name, &c2component);
@@ -218,7 +257,8 @@
         std::shared_ptr<Component> comp =
             SharedRefBase::make<Component>(c2component, listener, ref<ComponentStore>(), pool);
         *component = comp;
-        if (!component) {
+        if (!component || !comp) {
+            ALOGE("createComponent(): component cannot be returned");
             status = C2_CORRUPTED;
         } else {
             reportComponentBirth(comp.get());
@@ -248,7 +288,10 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        *intf = SharedRefBase::make<ComponentInterface>(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        *intf = SharedRefBase::make<ComponentInterface>(
+                c2interface, multiAccessUnitIntf, mParameterCache);
         return ScopedAStatus::ok();
     }
     return ScopedAStatus::fromServiceSpecificError(res);
@@ -273,6 +316,13 @@
     return ScopedAStatus::ok();
 }
 
+ScopedAStatus ComponentStore::createInputSurface(
+        std::shared_ptr<IInputSurface> *inputSurface) {
+    // TODO
+    (void)inputSurface;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
 void ComponentStore::onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf) {
     // invalidate unsupported struct descriptors if a new interface is loaded as it may have
     // exposed new descriptors
diff --git a/media/codec2/hal/aidl/Configurable.cpp b/media/codec2/hal/aidl/Configurable.cpp
index 0326263..2daaac2 100644
--- a/media/codec2/hal/aidl/Configurable.cpp
+++ b/media/codec2/hal/aidl/Configurable.cpp
@@ -19,6 +19,7 @@
 #include <android-base/logging.h>
 
 #include <android/binder_auto_utils.h>
+#include <android-base/hex.h>
 #include <codec2/aidl/Configurable.h>
 #include <codec2/aidl/ParamTypes.h>
 
@@ -61,7 +62,7 @@
 ScopedAStatus CachedConfigurable::query(
         const std::vector<int32_t>& indices,
         bool mayBlock,
-        Params* params) {
+        QueryResult *queryResult) {
     typedef C2Param::Index Index;
     std::vector<Index> c2heapParamIndices(
             (Index*)indices.data(),
@@ -72,13 +73,11 @@
             mayBlock ? C2_MAY_BLOCK : C2_DONT_BLOCK,
             &c2heapParams);
 
-    if (!CreateParamsBlob(params, c2heapParams)) {
+    if (!CreateParamsBlob(&(queryResult->params), c2heapParams)) {
         LOG(WARNING) << "query -- invalid output params.";
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    queryResult->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 ScopedAStatus CachedConfigurable::config(
@@ -115,10 +114,8 @@
     if (!CreateParamsBlob(&result->params, c2params)) {
         LOG(DEBUG) << "config -- invalid output params.";
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    result->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 ScopedAStatus CachedConfigurable::querySupportedParams(
@@ -139,8 +136,6 @@
                 LOG(WARNING) << "querySupportedParams -- invalid output params.";
                 break;
             }
-        } else {
-            res = Status::BAD_INDEX;
         }
     }
     paramDesc->resize(dstIx);
@@ -153,7 +148,7 @@
 ScopedAStatus CachedConfigurable::querySupportedValues(
         const std::vector<FieldSupportedValuesQuery>& fields,
         bool mayBlock,
-        std::vector<FieldSupportedValuesQueryResult>* result) {
+        QuerySupportedValuesResult *queryValues) {
     std::vector<C2FieldSupportedValuesQuery> c2fields;
     {
         // C2FieldSupportedValuesQuery objects are restricted in that some
@@ -173,22 +168,20 @@
     c2_status_t c2res = mIntf->querySupportedValues(
             c2fields,
             mayBlock ? C2_MAY_BLOCK : C2_DONT_BLOCK);
-    result->resize(fields.size());
+    queryValues->values.resize(fields.size());
     size_t dstIx = 0;
     for (const C2FieldSupportedValuesQuery &res : c2fields) {
-        if (ToAidl(&(*result)[dstIx], res)) {
+        if (ToAidl(&(queryValues->values[dstIx]), res)) {
             ++dstIx;
         } else {
-            result->resize(dstIx);
+            queryValues->values.resize(dstIx);
             c2res = C2_CORRUPTED;
             LOG(WARNING) << "querySupportedValues -- invalid output params.";
             break;
         }
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    queryValues->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 }  // namespace utils
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index 4a090e9..9725bcf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -27,6 +27,11 @@
 #include <aidl/android/hardware/media/c2/IComponentInterface.h>
 #include <aidl/android/hardware/media/c2/IComponentListener.h>
 #include <aidl/android/hardware/media/c2/IComponentStore.h>
+#include <aidl/android/hardware/media/c2/IInputSink.h>
+#include <aidl/android/hardware/media/c2/IInputSurface.h>
+#include <aidl/android/hardware/media/c2/IInputSurfaceConnection.h>
+
+#include <codec2/common/MultiAccessUnitHelper.h>
 
 #include <C2Component.h>
 #include <C2Buffer.h>
@@ -43,6 +48,8 @@
 namespace c2 {
 namespace utils {
 
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -71,12 +78,19 @@
     ::ndk::ScopedAStatus configureVideoTunnel(
             int32_t avSyncHwId,
             common::NativeHandle* handle) override;
+    ::ndk::ScopedAStatus connectToInputSurface(
+            const std::shared_ptr<IInputSurface>& inputSurface,
+            std::shared_ptr<IInputSurfaceConnection> *connection) override;
+    ::ndk::ScopedAStatus asInputSink(
+            std::shared_ptr<IInputSink> *sink) override;
 
 protected:
     c2_status_t mInit;
     std::shared_ptr<C2Component> mComponent;
     std::shared_ptr<ComponentInterface> mInterface;
     std::shared_ptr<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     std::shared_ptr<ComponentStore> mStore;
     DefaultBufferPoolSender mBufferPoolSender;
 
@@ -94,6 +108,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
     static void OnBinderDied(void *cookie);
     static void OnBinderUnlinked(void *cookie);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
index 7723bee..bd19cd6 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
@@ -22,11 +22,14 @@
 
 #include <aidl/android/hardware/media/c2/BnComponentInterface.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
 
 #include <memory>
+#include <set>
 
 namespace aidl {
 namespace android {
@@ -35,12 +38,16 @@
 namespace c2 {
 namespace utils {
 
-struct ComponentStore;
+using ::android::MultiAccessUnitInterface;
 
 struct ComponentInterface : public BnComponentInterface {
     ComponentInterface(
             const std::shared_ptr<C2ComponentInterface>& interface,
             const std::shared_ptr<ParameterCache>& cache);
+    ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& interface,
+        const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+        const std::shared_ptr<ParameterCache>& cache);
     c2_status_t status() const;
     ::ndk::ScopedAStatus getConfigurable(
             std::shared_ptr<IConfigurable> *intf) override;
@@ -51,7 +58,6 @@
     c2_status_t mInit;
 };
 
-
 }  // namespace utils
 }  // namespace c2
 }  // namespace media
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index b3c97d5..746e1bf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -23,6 +23,7 @@
 
 #include <aidl/android/hardware/media/bufferpool2/IClientManager.h>
 #include <aidl/android/hardware/media/c2/BnComponentStore.h>
+#include <aidl/android/hardware/media/c2/IInputSurface.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
@@ -74,6 +75,9 @@
 
     static std::shared_ptr<::android::FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::aidl::android::hardware::media::c2::IComponentStore.
     virtual ::ndk::ScopedAStatus createComponent(
             const std::string& name,
@@ -85,6 +89,8 @@
             std::shared_ptr<IComponentInterface> *intf) override;
     virtual ::ndk::ScopedAStatus listComponents(
             std::vector<IComponentStore::ComponentTraits>* traits) override;
+    virtual ::ndk::ScopedAStatus createInputSurface(
+            std::shared_ptr<IInputSurface> *inputSurface) override;
     virtual ::ndk::ScopedAStatus getStructDescriptors(
             const std::vector<int32_t>& indices,
             std::vector<StructDescriptor> *descs) override;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h b/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
index 6cc2c1b..96d3516 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
@@ -111,7 +111,7 @@
     virtual ::ndk::ScopedAStatus query(
             const std::vector<int32_t>& indices,
             bool mayBlock,
-            Params* params) override;
+            QueryResult* result) override;
 
     virtual ::ndk::ScopedAStatus config(
             const ::aidl::android::hardware::media::c2::Params& params,
@@ -126,7 +126,7 @@
     virtual ::ndk::ScopedAStatus querySupportedValues(
             const std::vector<FieldSupportedValuesQuery>& fields,
             bool mayBlock,
-            std::vector<FieldSupportedValuesQueryResult>* result) override;
+            QuerySupportedValuesResult* result) override;
 
 protected:
     // Common Codec2.0 interface wrapper
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 0b5b940..af6f4ae 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -43,7 +43,7 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
         "android.hardware.media.c2@1.2",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder",
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 0848fc6..01b0678 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -174,20 +174,16 @@
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
     : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
-    mMaxDequeueRequested{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
-    mMaxDequeueRequestedSeqId{0UL}, mMaxDequeueCommittedSeqId{0ULL},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
     mInConfig{false}, mStopped{false} {
     if (maxDequeueCount < kMaxDequeueMin) {
         mMaxDequeue = kMaxDequeueMin;
-        mMaxDequeueRequested = kMaxDequeueMin;
         mMaxDequeueCommitted = kMaxDequeueMin;
         mDequeueable = kMaxDequeueMin;
     } else if(maxDequeueCount > kMaxDequeueMax) {
         mMaxDequeue = kMaxDequeueMax;
-        mMaxDequeueRequested = kMaxDequeueMax;
         mMaxDequeueCommitted = kMaxDequeueMax;
         mDequeueable = kMaxDequeueMax;
     }
@@ -197,34 +193,36 @@
     mReadPipeFd.reset(pipefd[0]);
     mWritePipeFd.reset(pipefd[1]);
 
-    mEventQueueThread = std::thread([this](){processEvent();});
-    writeIncDequeueable(mDequeueable);
+    // ctor does not require lock to be held.
+    writeIncDequeueableLocked(mDequeueable);
 
     CHECK(ret >= 0);
-    CHECK(mEventQueueThread.joinable());
 }
 
 GraphicsTracker::~GraphicsTracker() {
     stop();
-    if (mEventQueueThread.joinable()) {
-        mEventQueueThread.join();
-    }
 }
 
 bool GraphicsTracker::adjustDequeueConfLocked(bool *updateDequeue) {
     // TODO: can't we adjust during config? not committing it may safe?
     *updateDequeue = false;
-    if (!mInConfig && mMaxDequeueRequested < mMaxDequeue) {
-        int delta = mMaxDequeue - mMaxDequeueRequested;
+    if (!mInConfig && mMaxDequeueRequested.has_value() && mMaxDequeueRequested < mMaxDequeue) {
+        int delta = mMaxDequeue - mMaxDequeueRequested.value();
+        int drained = 0;
         // Since we are supposed to increase mDequeuable by one already
         int adjustable = mDequeueable + 1;
         if (adjustable >= delta) {
-            mMaxDequeue = mMaxDequeueRequested;
+            mMaxDequeue = mMaxDequeueRequested.value();
             mDequeueable -= (delta - 1);
+            drained = delta - 1;
         } else {
             mMaxDequeue -= adjustable;
+            drained = mDequeueable;
             mDequeueable = 0;
         }
+        if (drained > 0) {
+            drainDequeueableLocked(drained);
+        }
         if (mMaxDequeueRequested == mMaxDequeue && mMaxDequeueRequested != mMaxDequeueCommitted) {
             *updateDequeue = true;
         }
@@ -235,6 +233,7 @@
 
 c2_status_t GraphicsTracker::configureGraphics(
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
+    // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
     int prevDequeueCommitted;
 
@@ -254,14 +253,28 @@
     if (igbp) {
         ret = igbp->getUniqueId(&bqId);
     }
-    if (ret != ::android::OK || prevCache->mGeneration == generation || prevCache->mBqId == bqId) {
+    if (ret != ::android::OK ||
+            prevCache->mGeneration == generation) {
+        ALOGE("new surface configure fail due to wrong or same bqId or same generation:"
+              "igbp(%d:%llu -> %llu), gen(%lu -> %lu)", (bool)igbp,
+              (unsigned long long)prevCache->mBqId, (unsigned long long)bqId,
+              (unsigned long)prevCache->mGeneration, (unsigned long)generation);
+        std::unique_lock<std::mutex> l(mLock);
+        mInConfig = false;
         return C2_BAD_VALUE;
     }
-    ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
-    if (ret != ::android::OK) {
-        // TODO: sort out the error from igbp and return an error accordingly.
-        return C2_CORRUPTED;
+    if (igbp) {
+        ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
+        if (ret != ::android::OK) {
+            ALOGE("new surface maxDequeueBufferCount configure fail");
+            // TODO: sort out the error from igbp and return an error accordingly.
+            std::unique_lock<std::mutex> l(mLock);
+            mInConfig = false;
+            return C2_CORRUPTED;
+        }
     }
+    ALOGD("new surface configured with id:%llu gen:%lu maxDequeue:%d",
+          (unsigned long long)bqId, (unsigned long)generation, prevDequeueCommitted);
     std::shared_ptr<BufferCache> newCache = std::make_shared<BufferCache>(bqId, generation, igbp);
     {
         std::unique_lock<std::mutex> l(mLock);
@@ -283,59 +296,74 @@
     // (Sometimes maxDequeueCount cannot be committed if the number of
     // dequeued buffer count is bigger.)
     int maxDequeueToCommit;
-    // max dequeue count which is committed to IGBP currently
-    // (actually mMaxDequeueCommitted, but needs to be read outside lock.)
-    int curMaxDequeueCommitted;
     std::unique_lock<std::mutex> cl(mConfigLock);
     {
         std::unique_lock<std::mutex> l(mLock);
-        if (mMaxDequeueRequested == maxDequeueCount) {
+        if (mMaxDequeueRequested.has_value()) {
+            if (mMaxDequeueRequested == maxDequeueCount) {
+                ALOGD("maxDequeueCount requested with %d already", maxDequeueCount);
+                return C2_OK;
+            }
+        } else if (mMaxDequeue == maxDequeueCount) {
+            ALOGD("maxDequeueCount is already %d", maxDequeueCount);
             return C2_OK;
         }
         mInConfig = true;
         mMaxDequeueRequested = maxDequeueCount;
         cache = mBufferCache;
-        curMaxDequeueCommitted = mMaxDequeueCommitted;
         if (mMaxDequeue <= maxDequeueCount) {
             maxDequeueToCommit = maxDequeueCount;
         } else {
             // Since mDequeuable is decreasing,
             // a delievered ready to allocate event may not be fulfilled.
             // Another waiting via a waitable object may be necessary in the case.
-            int delta = mMaxDequeue - maxDequeueCount;
-            if (delta <= mDequeueable) {
-                maxDequeueToCommit = maxDequeueCount;
-                mDequeueable -= delta;
-            } else {
-                maxDequeueToCommit = mMaxDequeue - mDequeueable;
-                mDequeueable = 0;
+            int delta = std::min(mMaxDequeue - maxDequeueCount, mDequeueable);
+            maxDequeueToCommit = mMaxDequeue - delta;
+            mDequeueable -= delta;
+            if (delta > 0) {
+                drainDequeueableLocked(delta);
             }
         }
     }
 
     bool committed = true;
-    if (cache->mIgbp && maxDequeueToCommit != curMaxDequeueCommitted) {
+    if (cache->mIgbp && maxDequeueToCommit != mMaxDequeueCommitted) {
         ::android::status_t ret = cache->mIgbp->setMaxDequeuedBufferCount(maxDequeueToCommit);
         committed = (ret == ::android::OK);
-        if (!committed) {
+        if (committed) {
+            ALOGD("maxDequeueCount committed to IGBP: %d", maxDequeueToCommit);
+        } else {
             // This should not happen.
-            ALOGE("dequeueCount failed with error(%d)", (int)ret);
+            ALOGE("maxdequeueCount update to IGBP failed with error(%d)", (int)ret);
         }
     }
 
+    int oldMaxDequeue = 0;
+    int requested = 0;
     {
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
+        oldMaxDequeue = mMaxDequeue;
+        mMaxDequeue = maxDequeueToCommit; // we already drained dequeueable
         if (committed) {
+            clearCacheIfNecessaryLocked(cache, maxDequeueToCommit);
             mMaxDequeueCommitted = maxDequeueToCommit;
-            int delta = mMaxDequeueCommitted - mMaxDequeue;
+            if (mMaxDequeueRequested == mMaxDequeueCommitted &&
+                  mMaxDequeueRequested == mMaxDequeue) {
+                mMaxDequeueRequested.reset();
+            }
+            if (mMaxDequeueRequested.has_value()) {
+                requested = mMaxDequeueRequested.value();
+            }
+            int delta = mMaxDequeueCommitted - oldMaxDequeue;
             if (delta > 0) {
                 mDequeueable += delta;
-                l.unlock();
-                writeIncDequeueable(delta);
+                writeIncDequeueableLocked(delta);
             }
         }
     }
+    ALOGD("maxDqueueCount change %d -> %d: pending: %d",
+          oldMaxDequeue, maxDequeueToCommit, requested);
 
     if (!committed) {
         return C2_CORRUPTED;
@@ -350,48 +378,60 @@
     std::unique_lock<std::mutex> cl(mConfigLock);
     {
         std::unique_lock<std::mutex> l(mLock);
-        if (mMaxDequeue == mMaxDequeueRequested && mMaxDequeueCommitted != mMaxDequeueRequested) {
-            dequeueCommit = mMaxDequeue;
-            mInConfig = true;
-            cache = mBufferCache;
-        } else {
+        if (!mMaxDequeueRequested.has_value() || mMaxDequeue != mMaxDequeueRequested) {
             return;
         }
+        if (mMaxDequeueCommitted == mMaxDequeueRequested) {
+            // already committed. may not happen.
+            mMaxDequeueRequested.reset();
+            return;
+        }
+        dequeueCommit = mMaxDequeue;
+        mInConfig = true;
+        cache = mBufferCache;
     }
     bool committed = true;
     if (cache->mIgbp) {
         ::android::status_t ret = cache->mIgbp->setMaxDequeuedBufferCount(dequeueCommit);
         committed = (ret == ::android::OK);
-        if (!committed) {
+        if (committed) {
+            ALOGD("delayed maxDequeueCount update to IGBP: %d", dequeueCommit);
+        } else {
             // This should not happen.
-            ALOGE("dequeueCount failed with error(%d)", (int)ret);
+            ALOGE("delayed maxdequeueCount update to IGBP failed with error(%d)", (int)ret);
         }
     }
-    int cleared = 0;
     {
         // cache == mCache here, since we locked config.
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         if (committed) {
-            if (cache->mIgbp && dequeueCommit < mMaxDequeueCommitted) {
-                // we are shrinking # of buffers, so clearing the cache.
-                for (auto it = cache->mBuffers.begin(); it != cache->mBuffers.end();) {
-                    uint64_t bid = it->second->mId;
-                    if (mDequeued.count(bid) == 0 || mDeallocating.count(bid) > 0) {
-                        ++cleared;
-                        it = cache->mBuffers.erase(it);
-                    } else {
-                        ++it;
-                    }
-                }
-            }
+            clearCacheIfNecessaryLocked(cache, dequeueCommit);
             mMaxDequeueCommitted = dequeueCommit;
         }
+        mMaxDequeueRequested.reset();
     }
-    if (cleared > 0) {
-        ALOGD("%d buffers are cleared from cache, due to IGBP capacity change", cleared);
-    }
+}
 
+void GraphicsTracker::clearCacheIfNecessaryLocked(const std::shared_ptr<BufferCache> &cache,
+                                            int maxDequeueCommitted) {
+    int cleared = 0;
+    size_t origCacheSize = cache->mBuffers.size();
+    if (cache->mIgbp && maxDequeueCommitted < mMaxDequeueCommitted) {
+        // we are shrinking # of buffers in the case, so evict the previous
+        // cached buffers.
+        for (auto it = cache->mBuffers.begin(); it != cache->mBuffers.end();) {
+            uint64_t bid = it->second->mId;
+            if (mDequeued.count(bid) == 0 || mDeallocating.count(bid) > 0) {
+                ++cleared;
+                it = cache->mBuffers.erase(it);
+            } else {
+                ++it;
+            }
+        }
+    }
+    ALOGD("Cache size %zu -> %zu: maybe_cleared(%d), dequeued(%zu)",
+          origCacheSize, cache->mBuffers.size(), cleared, mDequeued.size());
 }
 
 int GraphicsTracker::getCurDequeueable() {
@@ -400,70 +440,58 @@
 }
 
 void GraphicsTracker::stop() {
-    bool expected = false;
-    std::unique_lock<std::mutex> l(mEventLock);
-    bool updated = mStopped.compare_exchange_strong(expected, true);
-    if (updated) {
-        int writeFd = mWritePipeFd.release();
+   // TODO: wait until all operation to current IGBP
+   // being completed.
+    std::unique_lock<std::mutex> l(mLock);
+    if (mStopped) {
+        return;
+    }
+    mStopped = true;
+    int writeFd = mWritePipeFd.release();
+    if (writeFd >= 0) {
         ::close(writeFd);
-        int readFd = mReadPipeFd.release();
-        ::close(readFd);
-        mEventCv.notify_one();
     }
 }
 
-void GraphicsTracker::writeIncDequeueable(int inc) {
+void GraphicsTracker::writeIncDequeueableLocked(int inc) {
     CHECK(inc > 0 && inc < kMaxDequeueMax);
     thread_local char buf[kMaxDequeueMax];
-    int diff = 0;
-    {
-        std::unique_lock<std::mutex> l(mEventLock);
-        if (mStopped) {
-            return;
-        }
-        CHECK(mWritePipeFd.get() >= 0);
-        int ret = ::write(mWritePipeFd.get(), buf, inc);
-        if (ret == inc) {
-            return;
-        }
-        diff = ret < 0 ? inc : inc - ret;
-
-        // Partial write or EINTR. This will not happen in a real scenario.
-        mIncDequeueable += diff;
-        if (mIncDequeueable > 0) {
-            l.unlock();
-            mEventCv.notify_one();
-            ALOGW("updating dequeueable to pipefd pending");
-        }
+    if (mStopped) { // reading end closed;
+        return;
     }
+    int writeFd = mWritePipeFd.get();
+    if (writeFd < 0) {
+        // initialization fail and not valid though.
+        return;
+    }
+    int ret = ::write(writeFd, buf, inc);
+    // Since this is non-blocking i/o, it never returns EINTR.
+    //
+    // ::write() to pipe guarantee to succeed atomically if it writes less than
+    // the given PIPE_BUF. And the buffer size in pipe/fifo is at least 4K and our total
+    // max pending buffer size is 64. So it never returns EAGAIN here either.
+    // See pipe(7) for further information.
+    //
+    // Other errors are serious errors and we cannot synchronize mDequeueable to
+    // length of pending buffer in pipe/fifo anymore. So better to abort here.
+    // TODO: do not abort here. (b/318717399)
+    CHECK(ret == inc);
 }
 
-void GraphicsTracker::processEvent() {
-    // This is for partial/failed writes to the writing end.
-    // This may not happen in the real scenario.
+void GraphicsTracker::drainDequeueableLocked(int dec) {
+    CHECK(dec > 0 && dec < kMaxDequeueMax);
     thread_local char buf[kMaxDequeueMax];
-    while (true) {
-        std::unique_lock<std::mutex> l(mEventLock);
-        if (mStopped) {
-            break;
-        }
-        if (mIncDequeueable > 0) {
-            int inc = mIncDequeueable > kMaxDequeueMax ? kMaxDequeueMax : mIncDequeueable;
-            int ret = ::write(mWritePipeFd.get(), buf, inc);
-            int written = ret <= 0 ? 0 : ret;
-            mIncDequeueable -= written;
-            if (mIncDequeueable > 0) {
-                l.unlock();
-                if (ret < 0) {
-                    ALOGE("write to writing end failed %d", errno);
-                } else {
-                    ALOGW("partial write %d(%d)", inc, written);
-                }
-                continue;
-            }
-        }
-        mEventCv.wait(l);
+    if (mStopped) {
+        return;
     }
+    int readFd = mReadPipeFd.get();
+    if (readFd < 0) {
+        // initializationf fail and not valid though.
+        return;
+    }
+    int ret = ::read(readFd, buf, dec);
+    // TODO: no dot abort here. (b/318717399)
+    CHECK(ret == dec);
 }
 
 c2_status_t GraphicsTracker::getWaitableFd(int *pipeFd) {
@@ -539,8 +567,7 @@
             return;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
     }
 }
 
@@ -715,14 +742,13 @@
             return C2_OK;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
     }
     return C2_OK;
 }
 
 void GraphicsTracker::commitDeallocate(
-        std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid) {
+        std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid, bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
     size_t del1 = mDequeued.erase(bid);
     size_t del2 = mDeallocating.erase(bid);
@@ -730,9 +756,11 @@
     if (cache) {
         cache->unblockSlot(slotId);
     }
+    if (adjustDequeueConfLocked(updateDequeue)) {
+        return;
+    }
     mDequeueable++;
-    l.unlock();
-    writeIncDequeueable(1);
+    writeIncDequeueableLocked(1);
 }
 
 
@@ -758,7 +786,10 @@
     // cache->mIgbp is not null, if completed is false.
     (void)cache->mIgbp->cancelBuffer(slotId, rFence);
 
-    commitDeallocate(cache, slotId, bid);
+    commitDeallocate(cache, slotId, bid, &updateDequeue);
+    if (updateDequeue) {
+        updateDequeueConf();
+    }
     return C2_OK;
 }
 
@@ -785,8 +816,7 @@
             return C2_BAD_STATE;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
         return C2_BAD_STATE;
     }
     std::shared_ptr<BufferItem> buffer = it->second;
@@ -828,8 +858,7 @@
             return;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
         return;
     }
 }
@@ -843,6 +872,9 @@
         ALOGE("retrieving AHB-ID for GraphicBlock failed");
         return C2_CORRUPTED;
     }
+    std::shared_ptr<_C2BlockPoolData> poolData =
+            _C2BlockFactory::GetGraphicBlockPoolData(blk);
+    _C2BlockFactory::DisownIgbaBlock(poolData);
     std::shared_ptr<BufferCache> cache;
     std::shared_ptr<BufferItem> buffer;
     std::shared_ptr<BufferItem> oldBuffer;
@@ -870,13 +902,19 @@
         if (!gb) {
             ALOGE("render: realloc-ing a new buffer for migration failed");
             std::shared_ptr<BufferCache> nullCache;
-            commitDeallocate(nullCache, -1, bid);
+            commitDeallocate(nullCache, -1, bid, &updateDequeue);
+            if (updateDequeue) {
+                updateDequeueConf();
+            }
             return C2_REFUSED;
         }
         if (cache->mIgbp->attachBuffer(&(newBuffer->mSlot), gb) != ::android::OK) {
             ALOGE("render: attaching a new buffer to IGBP failed");
             std::shared_ptr<BufferCache> nullCache;
-            commitDeallocate(nullCache, -1, bid);
+            commitDeallocate(nullCache, -1, bid, &updateDequeue);
+            if (updateDequeue) {
+                updateDequeueConf();
+            }
             return C2_REFUSED;
         }
         cache->waitOnSlot(newBuffer->mSlot);
@@ -890,11 +928,13 @@
         CHECK(renderRes != ::android::BAD_VALUE);
         ALOGE("render: failed to queueBuffer() err = %d", renderRes);
         (void) cache->mIgbp->cancelBuffer(buffer->mSlot, input.fence);
-        commitDeallocate(cache, buffer->mSlot, bid);
+        commitDeallocate(cache, buffer->mSlot, bid, &updateDequeue);
+        if (updateDequeue) {
+            updateDequeueConf();
+        }
         return C2_REFUSED;
     }
 
-    updateDequeue = false;
     commitRender(cache, buffer, oldBuffer, output->bufferReplaced, &updateDequeue);
     if (updateDequeue) {
         updateDequeueConf();
@@ -909,8 +949,7 @@
         if (mBufferCache->mGeneration == generation) {
             if (!adjustDequeueConfLocked(&updateDequeue)) {
                 mDequeueable++;
-                l.unlock();
-                writeIncDequeueable(1);
+                writeIncDequeueableLocked(1);
             }
         }
     }
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 2d19ecc..b3ae514 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -635,21 +635,22 @@
     if (heapParams) {
         heapParams->reserve(heapParams->size() + numIndices);
     }
-    c2_aidl::Params result;
+    c2_aidl::IConfigurable::QueryResult result;
     ndk::ScopedAStatus transStatus = mBase->query(indices, (mayBlock == C2_MAY_BLOCK), &result);
     c2_status_t status = GetC2Status(transStatus, "query");
     if (status != C2_OK) {
         return status;
     }
+    status = static_cast<c2_status_t>(result.status.status);
 
     std::vector<C2Param*> paramPointers;
-    if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, result)) {
+    if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, result.params)) {
         LOG(ERROR) << "query -- error while parsing params.";
         return C2_CORRUPTED;
     }
     size_t i = 0;
-    for (auto it = paramPointers.begin();
-            it != paramPointers.end(); ) {
+    size_t numQueried = 0;
+    for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
         C2Param* paramPointer = *it;
         if (numStackIndices > 0) {
             --numStackIndices;
@@ -676,7 +677,9 @@
                 status = C2_BAD_INDEX;
                 continue;
             }
-            if (!stackParams[i++]->updateFrom(*paramPointer)) {
+            if (stackParams[i++]->updateFrom(*paramPointer)) {
+                ++numQueried;
+            } else {
                 LOG(WARNING) << "query -- param update failed: "
                                 "index = "
                              << paramPointer->index() << ".";
@@ -692,10 +695,14 @@
                                 "unexpected extra stack param.";
             } else {
                 heapParams->emplace_back(C2Param::Copy(*paramPointer));
+                ++numQueried;
             }
         }
         ++it;
     }
+    if (status == C2_OK && indices.size() != numQueried) {
+        status = C2_BAD_INDEX;
+    }
     return status;
 }
 
@@ -714,6 +721,7 @@
     if (status != C2_OK) {
         return status;
     }
+    status = static_cast<c2_status_t>(result.status.status);
     size_t i = failures->size();
     failures->resize(i + result.failures.size());
     for (const c2_aidl::SettingResult& sf : result.failures) {
@@ -764,21 +772,23 @@
         }
     }
 
-    std::vector<c2_aidl::FieldSupportedValuesQueryResult> result;
+    c2_aidl::IConfigurable::QuerySupportedValuesResult result;
+
     ndk::ScopedAStatus transStatus = mBase->querySupportedValues(
             inFields, (mayBlock == C2_MAY_BLOCK), &result);
     c2_status_t status = GetC2Status(transStatus, "querySupportedValues");
     if (status != C2_OK) {
         return status;
     }
-    if (result.size() != fields.size()) {
+    status = static_cast<c2_status_t>(result.status.status);
+    if (result.values.size() != fields.size()) {
         LOG(ERROR) << "querySupportedValues -- "
                       "input and output lists "
                       "have different sizes.";
         return C2_CORRUPTED;
     }
     for (size_t i = 0; i < fields.size(); ++i) {
-        if (!c2_aidl::utils::FromAidl(&fields[i], inFields[i], result[i])) {
+        if (!c2_aidl::utils::FromAidl(&fields[i], inFields[i], result.values[i])) {
             LOG(ERROR) << "querySupportedValues -- "
                           "invalid returned value.";
             return C2_CORRUPTED;
@@ -2059,6 +2069,8 @@
         id = id == C2PlatformAllocatorStore::BUFFERQUEUE ?
                 C2PlatformAllocatorStore::IGBA : id;
 
+        c2_aidl::IComponent::BlockPoolAllocator allocator;
+        allocator.allocatorId = id;
         if (id == C2PlatformAllocatorStore::IGBA)  {
             std::shared_ptr<AidlGraphicBufferAllocator> gba =
                     mGraphicBufferAllocators->create();
@@ -2068,12 +2080,11 @@
             if (status != C2_OK) {
                 return status;
             }
-            c2_aidl::IComponent::BlockPoolAllocator allocator;
-            allocator.set<c2_aidl::IComponent::BlockPoolAllocator::allocator>();
-            allocator.get<c2_aidl::IComponent::BlockPoolAllocator::allocator>().igba =
+            c2_aidl::IComponent::GbAllocator gbAllocator;
+            gbAllocator.waitableFd = std::move(waitableFd);
+            gbAllocator.igba =
                     c2_aidl::IGraphicBufferAllocator::fromBinder(gba->asBinder());
-            allocator.get<c2_aidl::IComponent::BlockPoolAllocator::allocator>().waitableFd =
-                    std::move(waitableFd);
+            allocator.gbAllocator = std::move(gbAllocator);
             ::ndk::ScopedAStatus transStatus = mAidlBase->createBlockPool(
                     allocator, &aidlBlockPool);
             status = GetC2Status(transStatus, "createBlockPool");
@@ -2083,7 +2094,7 @@
             mGraphicBufferAllocators->setCurrentId(aidlBlockPool.blockPoolId);
         } else {
             ::ndk::ScopedAStatus transStatus = mAidlBase->createBlockPool(
-                    static_cast<int32_t>(id), &aidlBlockPool);
+                    allocator, &aidlBlockPool);
             status = GetC2Status(transStatus, "createBlockPool");
             if (status != C2_OK) {
                 return status;
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 4640243..dd6c869 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -27,6 +27,7 @@
 #include <mutex>
 #include <set>
 #include <thread>
+#include <optional>
 
 #include <C2Buffer.h>
 
@@ -234,12 +235,14 @@
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
 
+    // These member variables are read and modified accessed as follows.
+    // 1. mConfigLock being held
+    //    Set mInConfig true with mLock in the beginning
+    //    Clear mInConfig with mLock in the end
+    // 2. mLock is held and mInConfig is false.
     int mMaxDequeue;
-    int mMaxDequeueRequested;
     int mMaxDequeueCommitted;
-
-    uint32_t mMaxDequeueRequestedSeqId;
-    uint32_t mMaxDequeueCommittedSeqId;
+    std::optional<int> mMaxDequeueRequested;
 
     int mDequeueable;
 
@@ -271,13 +274,6 @@
     ::android::base::unique_fd mWritePipeFd;  // The writing end file descriptor
 
     std::atomic<bool> mStopped;
-    std::thread mEventQueueThread; // Thread to handle interrupted
-                                   // writes to the writing end.
-    std::mutex mEventLock;
-    std::condition_variable mEventCv;
-
-    bool mStopEventThread;
-    int mIncDequeueable; // pending # of write to increase dequeueable eventfd
 
 private:
     explicit GraphicsTracker(int maxDequeueCount);
@@ -289,6 +285,9 @@
     bool adjustDequeueConfLocked(bool *updateDequeueConf);
 
     void updateDequeueConf();
+    void clearCacheIfNecessaryLocked(
+            const std::shared_ptr<BufferCache> &cache,
+            int maxDequeueCommitted);
 
     c2_status_t requestAllocate(std::shared_ptr<BufferCache> *cache);
     c2_status_t requestDeallocate(uint64_t bid, const sp<Fence> &fence,
@@ -305,7 +304,9 @@
                         bool cached, int slotId, const sp<Fence> &fence,
                         std::shared_ptr<BufferItem> *buffer,
                         bool *updateDequeue);
-    void commitDeallocate(std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid);
+    void commitDeallocate(std::shared_ptr<BufferCache> &cache,
+                          int slotId, uint64_t bid,
+                          bool *updateDequeue);
     void commitRender(const std::shared_ptr<BufferCache> &cache,
                       const std::shared_ptr<BufferItem> &buffer,
                       const std::shared_ptr<BufferItem> &oldBuffer,
@@ -318,8 +319,8 @@
             bool *cached, int *rSlotId, sp<Fence> *rFence,
             std::shared_ptr<BufferItem> *buffer);
 
-    void writeIncDequeueable(int inc);
-    void processEvent();
+    void writeIncDequeueableLocked(int inc);
+    void drainDequeueableLocked(int dec);
 };
 
 } // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 2aedd8b..7d7b285 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -11,6 +11,7 @@
 
     srcs: [
         "BufferTypes.cpp",
+        "MultiAccessUnitHelper.cpp",
     ],
 
     export_include_dirs: ["include/"],
@@ -26,7 +27,10 @@
         "libcodec2_vndk",
         "liblog",
         "libstagefright_foundation",
+        "server_configurable_flags",
     ],
+
+    static_libs: ["aconfig_mediacodec_flags_c_lib"],
 }
 
 cc_library_static {
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index 761a409..d3ea181 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -28,7 +28,12 @@
 namespace android {
 
 bool IsCodec2AidlHalSelected() {
-    if (!com::android::media::codec::flags::provider_->aidl_hal()) {
+    // For new devices with vendor software targeting 202404, we always want to
+    // use AIDL if it exists
+    constexpr int kAndroidApi202404 = 202404;
+    int vendorVersion = ::android::base::GetIntProperty("ro.vendor.api_level", -1);
+    if (!com::android::media::codec::flags::provider_->aidl_hal() &&
+        vendorVersion < kAndroidApi202404) {
         // Cannot select AIDL if not enabled
         return false;
     }
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
new file mode 100644
index 0000000..3a71520
--- /dev/null
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -0,0 +1,745 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-MultiAccessUnitHelper"
+#include <android-base/logging.h>
+
+#include <com_android_media_codec_flags.h>
+
+#include <codec2/common/MultiAccessUnitHelper.h>
+#include <android-base/properties.h>
+
+#include <C2BufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+namespace android {
+
+static C2R MultiAccessUnitParamsSetter(
+        bool mayBlock, C2InterfaceHelper::C2P<C2LargeFrame::output> &me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.maxSize).supportsAtAll(me.v.maxSize)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.maxSize)));
+    } else if (!me.F(me.v.thresholdSize).supportsAtAll(me.v.thresholdSize)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
+    } else if (me.v.maxSize < me.v.thresholdSize) {
+        me.set().maxSize = me.v.thresholdSize;
+    } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
+        me.set().thresholdSize = me.v.maxSize;
+    }
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    res.retrieveFailures(&failures);
+    if (!failures.empty()) {
+        me.set().maxSize = 0;
+        me.set().thresholdSize = 0;
+    }
+    return res;
+}
+
+MultiAccessUnitInterface::MultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface>& interface,
+        std::shared_ptr<C2ReflectorHelper> helper)
+        : C2InterfaceHelper(helper), mC2ComponentIntf(interface) {
+    setDerivedInstance(this);
+    addParameter(
+            DefineParam(mLargeFrameParams, C2_PARAMKEY_OUTPUT_LARGE_FRAME)
+            .withDefault(new C2LargeFrame::output(0u, 0, 0))
+            .withFields({
+                C2F(mLargeFrameParams, maxSize).inRange(
+                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+                C2F(mLargeFrameParams, thresholdSize).inRange(
+                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+            })
+            .withSetter(MultiAccessUnitParamsSetter)
+            .build());
+    std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
+    querySupportedParams(&supportedParams);
+    // Adding to set to do intf seperation in query/config
+    for (std::shared_ptr<C2ParamDescriptor> &desc : supportedParams) {
+        mSupportedParamIndexSet.insert(desc->index());
+    }
+    mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->maxSize));
+    mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->thresholdSize));
+
+    if (mC2ComponentIntf) {
+        c2_status_t err = mC2ComponentIntf->query_vb({&mKind}, {}, C2_MAY_BLOCK, nullptr);
+    }
+}
+
+bool MultiAccessUnitInterface::isValidField(const C2ParamField &field) const {
+    return (std::find(mParamFields.begin(), mParamFields.end(), field) != mParamFields.end());
+}
+bool MultiAccessUnitInterface::isParamSupported(C2Param::Index index) {
+    return (mSupportedParamIndexSet.count(index) != 0);
+}
+
+C2LargeFrame::output MultiAccessUnitInterface::getLargeFrameParam() const {
+    return *mLargeFrameParams;
+}
+
+C2Component::kind_t MultiAccessUnitInterface::kind() const {
+    return (C2Component::kind_t)(mKind.value);
+}
+
+void MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+        uint32_t &sampleRate_, uint32_t &channelCount_) const {
+    if (mC2ComponentIntf) {
+        C2StreamSampleRateInfo::output sampleRate;
+        C2StreamChannelCountInfo::output channelCount;
+        c2_status_t res = mC2ComponentIntf->query_vb(
+                {&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
+        if (res == C2_OK) {
+            sampleRate_ = sampleRate.value;
+            channelCount_ = channelCount.value;
+        }
+    }
+}
+
+//C2MultiAccessUnitBuffer
+class C2MultiAccessUnitBuffer : public C2Buffer {
+    public:
+        explicit C2MultiAccessUnitBuffer(
+                const std::vector<C2ConstLinearBlock> &blocks):
+                C2Buffer(blocks) {
+        }
+};
+
+//MultiAccessUnitHelper
+MultiAccessUnitHelper::MultiAccessUnitHelper(
+        const std::shared_ptr<MultiAccessUnitInterface>& intf):
+        mInit(false),
+        mInterface(intf) {
+    std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
+    if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator) == C2_OK) {
+        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, ++mBlockPoolId);
+        mInit = true;
+    }
+}
+
+MultiAccessUnitHelper::~MultiAccessUnitHelper() {
+    std::unique_lock<std::mutex> l(mLock);
+    mFrameHolder.clear();
+}
+
+bool MultiAccessUnitHelper::isEnabledOnPlatform() {
+    bool result = com::android::media::codec::flags::provider_->large_audio_frame();
+    if (!result) {
+        return false;
+    }
+    //TODO: remove this before launch
+    result = ::android::base::GetBoolProperty("debug.media.c2.large.audio.frame", true);
+    LOG(DEBUG) << "MultiAccessUnitHelper " << (result ? "enabled" : "disabled");
+    return result;
+}
+
+std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
+    return mInterface;
+}
+
+bool MultiAccessUnitHelper::getStatus() {
+    return mInit;
+}
+
+void MultiAccessUnitHelper::reset() {
+    std::lock_guard<std::mutex> l(mLock);
+    mFrameHolder.clear();
+}
+
+c2_status_t MultiAccessUnitHelper::error(
+        std::list<std::unique_ptr<C2Work>> * const worklist) {
+    if (worklist == nullptr) {
+        LOG(ERROR) << "Provided null worklist for error()";
+        return C2_OK;
+    }
+    std::unique_lock<std::mutex> l(mLock);
+    for (auto frame = mFrameHolder.begin(); frame != mFrameHolder.end(); frame++) {
+        if (frame->mLargeWork) {
+            finalizeWork(*frame, 0, true);
+            worklist->push_back(std::move(frame->mLargeWork));
+            frame->reset();
+        }
+    }
+    mFrameHolder.clear();
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::flush(
+        std::list<std::unique_ptr<C2Work>>* const c2flushedWorks) {
+    c2_status_t c2res = C2_OK;
+    std::lock_guard<std::mutex> l(mLock);
+    for (auto iterWork = c2flushedWorks->begin() ; iterWork != c2flushedWorks->end(); ) {
+        bool foundFlushedFrame = false;
+        std::list<MultiAccessUnitInfo>::iterator frame =
+                mFrameHolder.begin();
+        while (frame != mFrameHolder.end() && !foundFlushedFrame) {
+            auto it = frame->mComponentFrameIds.find(
+                    (*iterWork)->input.ordinal.frameIndex.peekull());
+            if (it != frame->mComponentFrameIds.end()) {
+                LOG(DEBUG) << "Multi access-unit flush "
+                        << (*iterWork)->input.ordinal.frameIndex.peekull()
+                        << " with " << frame->inOrdinal.frameIndex.peekull();
+                (*iterWork)->input.ordinal.frameIndex = frame->inOrdinal.frameIndex;
+                frame = mFrameHolder.erase(frame);
+                foundFlushedFrame = true;
+            } else {
+                ++frame;
+            }
+        }
+        if (!foundFlushedFrame) {
+            iterWork = c2flushedWorks->erase(iterWork);
+        } else {
+            ++iterWork;
+        }
+    }
+    return c2res;
+}
+
+c2_status_t MultiAccessUnitHelper::scatter(
+        std::list<std::unique_ptr<C2Work>> &largeWork,
+        std::list<std::list<std::unique_ptr<C2Work>>>* const processedWork) {
+    LOG(DEBUG) << "Multiple access-unit: scatter";
+    if (processedWork == nullptr) {
+        LOG(ERROR) << "MultiAccessUnitHelper provided with no work list";
+        return C2_CORRUPTED;
+    }
+    for (std::unique_ptr<C2Work>& w : largeWork) {
+        std::list<std::unique_ptr<C2Work>> sliceWork;
+        C2WorkOrdinalStruct inputOrdinal = w->input.ordinal;
+        // To hold correspondence and processing bits b/w input and output
+        MultiAccessUnitInfo frameInfo(inputOrdinal);
+        std::set<uint64_t>& frameSet = frameInfo.mComponentFrameIds;
+        uint64_t newFrameIdx = mFrameIndex++;
+        // TODO: Do not split buffers if component inherantly supports MultipleFrames.
+        // if thats case, only replace frameindex.
+        auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+            std::unique_ptr<C2Work> newWork(new C2Work);
+            newWork->input.flags = (C2FrameData::flags_t)flags;
+            newWork->input.ordinal = inWork->input.ordinal;
+            newWork->input.ordinal.frameIndex = newFrameIdx;
+            if (!inWork->input.configUpdate.empty()) {
+                for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
+                    newWork->input.configUpdate.push_back(
+                            std::move(C2Param::Copy(*(param.get()))));
+                }
+            }
+            newWork->input.infoBuffers = (inWork->input.infoBuffers);
+            if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
+                newWork->worklets.emplace_back(new C2Worklet);
+                newWork->worklets.front()->component = inWork->worklets.front()->component;
+                std::vector<std::unique_ptr<C2Tuning>> tunings;
+                for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
+                    tunings.push_back(std::move(
+                            std::unique_ptr<C2Tuning>(
+                                    static_cast<C2Tuning*>(
+                                            C2Param::Copy(*(tuning.get())).release()))));
+                }
+                newWork->worklets.front()->tunings = std::move(tunings);
+            }
+            return newWork;
+        };
+        if (w->input.buffers.empty()
+                || (w->input.buffers.front() == nullptr)
+                || (!w->input.buffers.front()->hasInfo(
+                        C2AccessUnitInfos::input::PARAM_TYPE))) {
+            LOG(DEBUG) << "Empty or MultiAU info buffer scatter frames with frameIndex "
+                    << inputOrdinal.frameIndex.peekull()
+                    << ") -> newFrameIndex " << newFrameIdx
+                    <<" : input ts " << inputOrdinal.timestamp.peekull();
+            sliceWork.push_back(std::move(cloneInputWork(w, w->input.flags)));
+            if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
+                sliceWork.back()->input.buffers = std::move(w->input.buffers);
+            }
+            frameSet.insert(newFrameIdx);
+            processedWork->push_back(std::move(sliceWork));
+        }  else {
+            const std::vector<std::shared_ptr<C2Buffer>>& inBuffers = w->input.buffers;
+            if (inBuffers.front()->data().linearBlocks().size() == 0) {
+                LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
+                return C2_CORRUPTED;
+            }
+            const std::vector<C2ConstLinearBlock>& multiAU =
+                    inBuffers.front()->data().linearBlocks();
+            std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
+                    std::static_pointer_cast<const C2AccessUnitInfos::input>(
+                    w->input.buffers.front()->getInfo(C2AccessUnitInfos::input::PARAM_TYPE));
+            uint32_t offset = 0; uint32_t multiAUSize = multiAU.front().size();
+            bool sendEos = false;
+            for (int idx = 0; idx < auInfo->flexCount(); ++idx) {
+                std::vector<C2ConstLinearBlock> au;
+                const C2AccessUnitInfosStruct &info = auInfo->m.values[idx];
+                sendEos |= (info.flags & C2FrameData::FLAG_END_OF_STREAM);
+                std::unique_ptr<C2Work> newWork = cloneInputWork(w, info.flags);
+                frameSet.insert(newFrameIdx);
+                newFrameIdx = mFrameIndex++;
+                newWork->input.ordinal.timestamp = info.timestamp;
+                au.push_back(multiAU.front().subBlock(offset, info.size));
+                if ((offset + info.size) > multiAUSize) {
+                    LOG(ERROR) << "ERROR: access-unit offset > buffer size"
+                            << " current offset " << (offset + info.size)
+                            << " buffer size " << multiAUSize;
+                    return C2_CORRUPTED;
+                }
+                newWork->input.buffers.push_back(
+                        std::shared_ptr<C2Buffer>(new C2MultiAccessUnitBuffer(au)));
+                LOG(DEBUG) << "Frame scatter queuing frames WITH info in ordinal "
+                        << inputOrdinal.frameIndex.peekull()
+                        << " info.size " << info.size
+                        << " : TS " << newWork->input.ordinal.timestamp.peekull()
+                        << " with index " << newFrameIdx - 1;
+                // add to worklist
+                sliceWork.push_back(std::move(newWork));
+                processedWork->push_back(std::move(sliceWork));
+                offset += info.size;
+            }
+            mFrameIndex--;
+            if (!sendEos && (w->input.flags & C2FrameData::FLAG_END_OF_STREAM)) {
+                if (!processedWork->empty()) {
+                    std::list<std::unique_ptr<C2Work>> &sliceWork = processedWork->back();
+                    if (!sliceWork.empty()) {
+                        std::unique_ptr<C2Work> &work = sliceWork.back();
+                        if (work) {
+                            work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+                        }
+                    }
+                }
+            }
+        }
+        if (!processedWork->empty()) {
+            {
+                C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+                if (mInterface->kind() == C2Component::KIND_DECODER) {
+                    uint32_t sampleRate = 0;
+                    uint32_t channelCount = 0;
+                    uint32_t frameSize = 0;
+                    mInterface->getDecoderSampleRateAndChannelCount(
+                            sampleRate, channelCount);
+                    if (sampleRate > 0 && channelCount > 0) {
+                        frameSize = channelCount * 2;
+                        multiAccessParams.maxSize =
+                                (multiAccessParams.maxSize / frameSize) * frameSize;
+                        multiAccessParams.thresholdSize =
+                                (multiAccessParams.thresholdSize / frameSize) * frameSize;
+                    }
+                }
+                frameInfo.mLargeFrameTuning = multiAccessParams;
+                std::lock_guard<std::mutex> l(mLock);
+                mFrameHolder.push_back(std::move(frameInfo));
+            }
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::gather(
+        std::list<std::unique_ptr<C2Work>> &c2workItems,
+        std::list<std::unique_ptr<C2Work>>* const processedWork) {
+    LOG(DEBUG) << "Multi access-unit gather process";
+    if (processedWork == nullptr) {
+        LOG(ERROR) << "Nothing provided for processed work";
+        return C2_CORRUPTED;
+    }
+    auto addOutWork = [&processedWork](std::unique_ptr<C2Work>& work) {
+        processedWork->push_back(std::move(work));
+    };
+    {
+        std::lock_guard<std::mutex> l(mLock);
+        for (auto& work : c2workItems) {
+            LOG(DEBUG) << "FrameHolder Size: " << mFrameHolder.size();
+            uint64_t thisFrameIndex = work->input.ordinal.frameIndex.peekull();
+            bool removeEntry = work->worklets.empty()
+                    || !work->worklets.front()
+                    || (work->worklets.front()->output.flags
+                        & C2FrameData::FLAG_INCOMPLETE) == 0;
+            bool foundFrame = false;
+            std::list<MultiAccessUnitInfo>::iterator frame =
+                    mFrameHolder.begin();
+            while (!foundFrame && frame != mFrameHolder.end()) {
+                auto it = frame->mComponentFrameIds.find(thisFrameIndex);
+                if (it != frame->mComponentFrameIds.end()) {
+                    foundFrame = true;
+                    LOG(DEBUG) << "onWorkDone (frameIndex " << thisFrameIndex
+                            << " worklstsSze " << work->worklets.size()
+                            << ") -> frameIndex " << frame->inOrdinal.frameIndex.peekull();
+                    if (work->result != C2_OK
+                            || work->worklets.empty()
+                            || !work->worklets.front()
+                            || (frame->mLargeFrameTuning.thresholdSize == 0
+                            || frame->mLargeFrameTuning.maxSize == 0)) {
+                        if (removeEntry) {
+                            frame->mComponentFrameIds.erase(it);
+                            removeEntry = false;
+                        }
+                        if (frame->mLargeWork) {
+                            finalizeWork(*frame);
+                            addOutWork(frame->mLargeWork);
+                            frame->reset();
+                        }
+                        c2_status_t workResult = work->result;
+                        frame->mLargeWork = std::move(work);
+                        frame->mLargeWork->input.ordinal.frameIndex =
+                                frame->inOrdinal.frameIndex;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        if (workResult != C2_OK) {
+                            frame->mAccessUnitInfos.clear();
+                        }
+                    } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
+                        LOG(DEBUG) << "Error while processing work";
+                    }
+                    if (removeEntry) {
+                        LOG(DEBUG) << "Removing entry: " << thisFrameIndex
+                                << " -> " << frame->inOrdinal.frameIndex.peekull();
+                        frame->mComponentFrameIds.erase(it);
+                    }
+                    // This is to take care of the last bytes and to decide to send with
+                    // FLAG_INCOMPLETE or not.
+                    if ((frame->mWview
+                            && (frame->mWview->offset() > frame->mLargeFrameTuning.thresholdSize))
+                            || frame->mComponentFrameIds.empty()) {
+                        if (frame->mLargeWork) {
+                            finalizeWork(*frame);
+                            addOutWork(frame->mLargeWork);
+                            frame->reset();
+                        }
+                    }
+                    if (frame->mComponentFrameIds.empty()) {
+                        LOG(DEBUG) << "This frame is finished ID " << thisFrameIndex;
+                        frame = mFrameHolder.erase(frame);
+                        continue;
+                    }
+                } else {
+                    LOG(DEBUG) << "Received an out-of-order output " << thisFrameIndex
+                            << " expected: " <<mFrameHolder.front().inOrdinal.frameIndex.peekull();
+                }
+                frame++;
+            }
+            if (!foundFrame) {
+                LOG(ERROR) <<" Error: Frame Holder reports no frame " << thisFrameIndex;
+            }
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::createLinearBlock(MultiAccessUnitInfo &frame) {
+    if (!mInit) {
+        LOG(ERROR) << "Large buffer allocator failed";
+        return C2_NO_MEMORY;
+    }
+    C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    uint32_t maxOutSize = frame.mLargeFrameTuning.maxSize;
+    c2_status_t err = mLinearPool->fetchLinearBlock(maxOutSize, usage, &frame.mBlock);
+    LOG(DEBUG) << "Allocated block with offset : " << frame.mBlock->offset()
+            << " size " << frame.mBlock->size() << " Capacity " << frame.mBlock->capacity();
+    if (err != C2_OK) {
+        LOG(ERROR) << "Error allocating Multi access-unit Buffer";
+        return err;
+    }
+    frame.mWview = std::make_shared<C2WriteView>(frame.mBlock->map().get());
+    LOG(DEBUG) << "Allocated buffer : requested size : " <<
+            frame.mLargeFrameTuning.maxSize
+            << " alloc size " << frame.mWview->size();
+    return C2_OK;
+}
+
+/*
+ * For every work from the component, we try to do aggregation of work here.
+*/
+c2_status_t MultiAccessUnitHelper::processWorklets(MultiAccessUnitInfo &frame,
+        std::unique_ptr<C2Work>& work,
+        const std::function <void(std::unique_ptr<C2Work>&)>& addWork) {
+    // This will allocate work, worklet, c2Block
+    auto allocateWork = [&](MultiAccessUnitInfo &frame,
+            bool allocateWorket = false,
+            bool allocateBuffer = false) {
+        c2_status_t ret = C2_OK;
+        if (frame.mLargeWork == nullptr) {
+            frame.mLargeWork.reset(new C2Work);
+            frame.mLargeWork->input.ordinal = frame.inOrdinal;
+            frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+        }
+        if (allocateWorket) {
+            if (frame.mLargeWork->worklets.size() == 0) {
+                frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+            }
+        }
+        if (allocateBuffer) {
+            if (frame.mWview == nullptr) {
+                ret = createLinearBlock(frame);
+            }
+        }
+        return ret;
+    };
+    // we will only have one worklet.
+    bool foundEndOfStream = false;
+    for (auto worklet = work->worklets.begin();
+             worklet != work->worklets.end() && (*worklet) != nullptr; ++worklet) {
+        uint32_t flagsForNoCopy = C2FrameData::FLAG_DROP_FRAME
+                | C2FrameData::FLAG_DISCARD_FRAME
+                | C2FrameData::FLAG_CORRUPT;
+        if ((*worklet)->output.flags & flagsForNoCopy) {
+            if (frame.mLargeWork) {
+                finalizeWork(frame);
+                addWork(frame.mLargeWork);
+                frame.reset();
+            }
+            frame.mLargeWork = std::move(work);
+            frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+            finalizeWork(frame, (*worklet)->output.flags, true);
+            addWork(frame.mLargeWork);
+            frame.reset();
+            return C2_OK;
+        }
+        c2_status_t c2ret = allocateWork(frame, true);
+        if (c2ret != C2_OK) {
+            return c2ret;
+        }
+        C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
+        if (!(*worklet)->output.configUpdate.empty()) {
+            for (auto& configUpdate : (*worklet)->output.configUpdate) {
+                outputFramedata.configUpdate.push_back(std::move(configUpdate));
+            }
+            (*worklet)->output.configUpdate.clear();
+        }
+        outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
+                (*worklet)->output.infoBuffers.begin(),
+                (*worklet)->output.infoBuffers.end());
+        int64_t sampleTimeUs = 0;
+        uint32_t frameSize = 0;
+        uint32_t sampleRate = 0;
+        uint32_t channelCount = 0;
+        mInterface->getDecoderSampleRateAndChannelCount(sampleRate, channelCount);
+        if (sampleRate > 0 && channelCount > 0) {
+            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+            frameSize = channelCount * 2;
+        }
+        LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
+                << " threshold " << frame.mLargeFrameTuning.thresholdSize;
+        if ((*worklet)->output.buffers.size() > 0) {
+            allocateWork(frame, true, true);
+        }
+        LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
+                << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
+        int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
+        int64_t timestamp = workletTimestamp;
+        uint32_t flagsForCopy =  ((*worklet)->output.flags) & C2FrameData::FLAG_CODEC_CONFIG;
+        for (int bufIdx = 0; bufIdx < (*worklet)->output.buffers.size(); ++bufIdx) {
+            std::shared_ptr<C2Buffer>& buffer = (*worklet)->output.buffers[bufIdx];
+            if (!buffer || buffer->data().linearBlocks().empty()) {
+                continue;
+            }
+            const std::vector<C2ConstLinearBlock>& blocks = buffer->data().linearBlocks();
+            if (blocks.size() > 0) {
+                uint32_t inputOffset = 0;
+                uint32_t inputSize = blocks.front().size();
+                frame.mInfos.insert(frame.mInfos.end(),
+                        buffer->info().begin(), buffer->info().end());
+                if (frameSize != 0 && (mInterface->kind() == C2Component::KIND_DECODER)) {
+                    // For decoders we only split multiples of 16bChannelCount*2
+                    inputSize -= (inputSize % frameSize);
+                }
+                while (inputOffset < inputSize) {
+                    if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+                        frame.mLargeWork->result = C2_OK;
+                        finalizeWork(frame, flagsForCopy);
+                        addWork(frame.mLargeWork);
+                        frame.reset();
+                        allocateWork(frame, true, true);
+                    }
+                    if (mInterface->kind() == C2Component::KIND_ENCODER) {
+                        if (inputSize > frame.mLargeFrameTuning.maxSize) {
+                            LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
+                                    << frame.mLargeFrameTuning.maxSize
+                                    << " block size: " << blocks.front().size()
+                                    << "alloc size " << frame.mWview->size();
+                            if (frame.mLargeWork
+                                    && frame.mWview && frame.mWview->offset() > 0) {
+                                finalizeWork(frame, flagsForCopy);
+                                addWork(frame.mLargeWork);
+                                frame.reset();
+                                allocateWork(frame, true, false);
+                            }
+                            frame.mLargeWork->result = C2_NO_MEMORY;
+                            finalizeWork(frame, 0, true);
+                            addWork(frame.mLargeWork);
+                            frame.reset();
+                            return C2_NO_MEMORY;
+                        } else if (inputSize > frame.mWview->size()) {
+                            LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
+                                << frame.mWview->offset();
+                            if (frame.mLargeWork
+                                    && frame.mWview && frame.mWview->offset() > 0) {
+                                finalizeWork(frame, flagsForCopy);
+                                addWork(frame.mLargeWork);
+                                frame.reset();
+                                allocateWork(frame, true, true);
+                            }
+                        }
+                    }
+                    C2ReadView rView = blocks.front().map().get();
+                    if (rView.error()) {
+                        LOG(ERROR) << "Buffer read view error";
+                        frame.mLargeWork->result = rView.error();
+                        frame.mLargeWork->worklets.clear();
+                        finalizeWork(frame, 0, true);
+                        addWork(frame.mLargeWork);
+                        frame.reset();
+                        return C2_NO_MEMORY;
+                    }
+                    uint32_t toCopy = 0;
+                    if (mInterface->kind() == C2Component::KIND_ENCODER) {
+                        toCopy = inputSize;
+                    } else {
+                        toCopy = c2_min(frame.mWview->size(), (inputSize - inputOffset));
+                        timestamp = workletTimestamp + inputOffset * sampleTimeUs;
+                        LOG(DEBUG) << "ts " << timestamp
+                                << " copiedOutput " << inputOffset
+                                << " sampleTimeUs " << sampleTimeUs;
+                    }
+                    LOG(DEBUG) << " Copy size " << toCopy
+                            << " ts " << timestamp;
+                    memcpy(frame.mWview->data(), rView.data() + inputOffset, toCopy);
+                    frame.mWview->setOffset(frame.mWview->offset() + toCopy);
+                    inputOffset += toCopy;
+                    mergeAccessUnitInfo(frame, flagsForCopy, toCopy, timestamp);
+                }
+            } else {
+                frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(buffer));
+                LOG(DEBUG) << "Copying worklets without linear buffer";
+            }
+        }
+        uint32_t flagsForCsdOrEnd = (*worklet)->output.flags
+                & (C2FrameData::FLAG_END_OF_STREAM | C2FrameData::FLAG_CODEC_CONFIG);
+        if (flagsForCsdOrEnd) {
+            LOG(DEBUG) << "Output worklet has CSD/EOS data";
+            frame.mLargeWork->result = C2_OK;
+            // we can assign timestamp as this will be evaluated in finalizeWork
+            frame.mLargeWork->worklets.front()->output.ordinal.timestamp = timestamp;
+            finalizeWork(frame, flagsForCsdOrEnd, true);
+            addWork(frame.mLargeWork);
+            frame.reset();
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::finalizeWork(
+        MultiAccessUnitInfo& frame, uint32_t inFlags, bool forceComplete) {
+    if (frame.mLargeWork == nullptr) {
+        return C2_OK;
+    }
+    //prepare input ordinal
+    frame.mLargeWork->input.ordinal = frame.inOrdinal;
+    // remove this
+    int64_t timeStampUs = frame.inOrdinal.timestamp.peekull();
+    if (!frame.mAccessUnitInfos.empty()) {
+        timeStampUs = frame.mAccessUnitInfos.front().timestamp;
+    } else if (!frame.mLargeWork->worklets.empty()) {
+        std::unique_ptr<C2Worklet> &worklet = frame.mLargeWork->worklets.front();
+        if (worklet) {
+            timeStampUs = worklet->output.ordinal.timestamp.peekull();
+        }
+    }
+    LOG(DEBUG) << "Finalizing work with input Idx "
+            << frame.mLargeWork->input.ordinal.frameIndex.peekull()
+            << " timestamp " << timeStampUs;
+    uint32_t finalFlags = 0;
+    if ((!forceComplete)
+            && (frame.mLargeWork->result == C2_OK)
+            && (!frame.mComponentFrameIds.empty())) {
+        finalFlags |= C2FrameData::FLAG_INCOMPLETE;
+    }
+    if (frame.mLargeWork->result == C2_OK) {
+        finalFlags |= inFlags;
+    }
+    // update worklet if present
+    if (!frame.mLargeWork->worklets.empty() &&
+            frame.mLargeWork->worklets.front() != nullptr) {
+        frame.mLargeWork->workletsProcessed = 1;
+        C2FrameData& outFrameData = frame.mLargeWork->worklets.front()->output;
+        outFrameData.ordinal.frameIndex = frame.inOrdinal.frameIndex.peekull();
+        outFrameData.ordinal.timestamp = timeStampUs;
+        finalFlags |= frame.mLargeWork->worklets.front()->output.flags;
+        outFrameData.flags = (C2FrameData::flags_t)finalFlags;
+        // update buffers
+        if (frame.mBlock && (frame.mWview->offset() > 0)) {
+            size_t size = frame.mWview->offset();
+            LOG(DEBUG) << "Finalize : Block: Large frame size set as " << size
+                    << " timestamp as " << timeStampUs
+                    << "frameIndex " << outFrameData.ordinal.frameIndex.peekull();
+            frame.mWview->setOffset(0);
+            std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
+                    frame.mBlock->share(0, size, ::C2Fence()));
+            if (frame.mAccessUnitInfos.size() > 0) {
+                if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+                    frame.mAccessUnitInfos.back().flags |=
+                            C2FrameData::FLAG_END_OF_STREAM;
+                }
+                std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+                        C2AccessUnitInfos::output::AllocShared(
+                        frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+                frame.mInfos.push_back(largeFrame);
+                frame.mAccessUnitInfos.clear();
+            }
+            for (auto &info : frame.mInfos) {
+                c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+            }
+            frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
+            frame.mInfos.clear();
+            frame.mBlock.reset();
+            frame.mWview.reset();
+        }
+    }
+    LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
+    return C2_OK;
+}
+
+void MultiAccessUnitHelper::mergeAccessUnitInfo(
+        MultiAccessUnitInfo &frame,
+        uint32_t flags_,
+        uint32_t size,
+        int64_t timestamp) {
+    // Remove flags that are not part of Access unit info
+    uint32_t flags = flags_ & ~(C2FrameData::FLAG_INCOMPLETE
+            | C2FrameData::FLAG_DISCARD_FRAME
+            | C2FrameData::FLAG_CORRUPT
+            | C2FrameData::FLAG_CORRECTED);
+    if (frame.mAccessUnitInfos.empty()) {
+        frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+        return;
+    }
+    if ((mInterface->kind() == C2Component::KIND_DECODER) &&
+            (frame.mAccessUnitInfos.back().flags == flags)) {
+        // merge access units here
+        C2AccessUnitInfosStruct &s = frame.mAccessUnitInfos.back();
+        s.size += size; // don't have to update timestamp
+    } else {
+        frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+    }
+}
+
+void MultiAccessUnitHelper::MultiAccessUnitInfo::reset() {
+    mBlock.reset();
+    mWview.reset();
+    mInfos.clear();
+    mAccessUnitInfos.clear();
+    mLargeWork.reset();
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
new file mode 100644
index 0000000..a90ae56
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+#define CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <set>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+struct MultiAccessUnitHelper;
+
+struct MultiAccessUnitInterface : public C2InterfaceHelper {
+    explicit MultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface>& interface,
+            std::shared_ptr<C2ReflectorHelper> helper);
+
+    bool isParamSupported(C2Param::Index index);
+    C2LargeFrame::output getLargeFrameParam() const;
+    C2Component::kind_t kind() const;
+    bool isValidField(const C2ParamField &field) const;
+
+protected:
+    void getDecoderSampleRateAndChannelCount(
+            uint32_t &sampleRate_, uint32_t &channelCount_) const;
+    const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
+    std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
+    C2ComponentKindSetting mKind;
+    std::set<C2Param::Index> mSupportedParamIndexSet;
+    std::vector<C2ParamField> mParamFields;
+
+    friend struct MultiAccessUnitHelper;
+};
+
+struct MultiAccessUnitHelper {
+public:
+    MultiAccessUnitHelper(
+            const std::shared_ptr<MultiAccessUnitInterface>& intf);
+
+    virtual ~MultiAccessUnitHelper();
+
+    static bool isEnabledOnPlatform();
+
+    /*
+     * Scatters the incoming linear buffer into access-unit sized buffers
+     * based on the access-unit info.
+     */
+    c2_status_t scatter(
+            std::list<std::unique_ptr<C2Work>> &c2workItems,
+            std::list<std::list<std::unique_ptr<C2Work>>> * const processedWork);
+
+    /*
+     * Gathers different access-units into a single buffer based on the scatter list
+     * and the configured max and threshold sizes. This also generates the associated
+     * access-unit information and attach it with the final result.
+     */
+    c2_status_t gather(
+            std::list<std::unique_ptr<C2Work>> &c2workItems,
+            std::list<std::unique_ptr<C2Work>> * const processedWork);
+
+    /*
+     * Flushes the codec and generated the list of flushed buffers.
+     */
+    c2_status_t flush(
+            std::list<std::unique_ptr<C2Work>> * const c2flushedWorks);
+
+    /*
+     * Gets all the pending buffers under generation in c2workItems.
+     */
+    c2_status_t error(std::list<std::unique_ptr<C2Work>> * const c2workItems);
+
+    /*
+     * Get the interface object of this handler.
+     */
+    std::shared_ptr<MultiAccessUnitInterface> getInterface();
+
+    /*
+     * Gets the status of the object. This really is to make sure that
+     * all the allocators are configured properly within the handler.
+     */
+    bool getStatus();
+
+    /*
+     * Resets the structures inside the handler.
+     */
+    void reset();
+
+protected:
+
+    struct MultiAccessUnitInfo {
+        /*
+         * From the input
+         * Ordinal of the input frame
+         */
+        C2WorkOrdinalStruct inOrdinal;
+
+        /*
+         * Frame indexes of the scattered buffers
+         */
+        std::set<uint64_t> mComponentFrameIds;
+
+        /*
+         * For the output
+         * Current output block.
+         */
+        std::shared_ptr<C2LinearBlock> mBlock;
+
+        /*
+         * Write view of current block
+         */
+        std::shared_ptr<C2WriteView> mWview;
+
+        /*
+         * C2Info related to the current mBlock
+         */
+        std::vector<std::shared_ptr<const C2Info>> mInfos;
+
+        /*
+         * C2AccessUnitInfos for the current buffer
+         */
+        std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
+
+        /*
+         * Current tuning used to process this input work
+         */
+        C2LargeFrame::output mLargeFrameTuning;
+
+        /*
+         * Current output C2Work being processed
+         */
+        std::unique_ptr<C2Work> mLargeWork;
+
+        MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
+
+        }
+
+        /*
+         * Resets this frame
+         */
+        void reset();
+    };
+
+    /*
+     * Creates a linear block to be used with work
+     */
+    c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
+
+    /*
+     * Processes worklets from the component
+     */
+    c2_status_t processWorklets(MultiAccessUnitInfo &frame,
+                std::unique_ptr<C2Work> &work,
+                const std::function <void(std::unique_ptr<C2Work>&)> &addWork);
+
+    /*
+     * Finalizes the work to be send out.
+     */
+    c2_status_t finalizeWork(MultiAccessUnitInfo &frame,
+            uint32_t flags = 0, bool forceComplete = false);
+
+    /*
+     * Merges different access unit infos if possible
+     */
+    void mergeAccessUnitInfo(MultiAccessUnitInfo &frame,
+            uint32_t flags,
+            uint32_t size,
+            int64_t timestamp);
+
+    bool mInit;
+
+    // Interface of this module
+    std::shared_ptr<MultiAccessUnitInterface> mInterface;
+    // Local pool id used for output buffer allocation
+    C2BlockPool::local_id_t mBlockPoolId;
+    // C2Blockpool for output buffer allocation
+    std::shared_ptr<C2BlockPool> mLinearPool;
+    // Allocator for output buffer allocation
+    std::shared_ptr<C2Allocator> mLinearAllocator;
+    // FrameIndex for the current outgoing work
+    std::atomic_uint64_t mFrameIndex;
+    // Mutex to protect mFrameHolder
+    std::mutex mLock;
+    // List of Infos that contains the input and
+    // output work and buffer objects
+    std::list<MultiAccessUnitInfo> mFrameHolder;
+};
+
+}  // namespace android
+
+#endif  // CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
diff --git a/media/codec2/hal/hidl/1.0/utils/Android.bp b/media/codec2/hal/hidl/1.0/utils/Android.bp
index 2f2ecd1..9646a0b 100644
--- a/media/codec2/hal/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.0/utils/Android.bp
@@ -52,7 +52,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 0aeed08..e32e6ae 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -135,6 +135,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &handler):
+        Listener(component), mHandler(handler) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHandler) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHandler->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHandler) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHandler->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHandler;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +254,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -240,7 +287,6 @@
 // Methods from ::android::hardware::media::c2::V1_0::IComponent
 Return<Status> Component::queue(const WorkBundle& workBundle) {
     std::list<std::unique_ptr<C2Work>> c2works;
-
     if (!objcpy(&c2works, workBundle)) {
         return Status::CORRUPTED;
     }
@@ -252,7 +298,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
-
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
 
@@ -261,6 +319,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
 
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -469,6 +530,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +543,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -501,8 +568,14 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
+
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
         mInit = res;
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 12078e0..41a8904 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -25,7 +25,6 @@
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
-#include <C2BqBufferPriv.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -45,9 +44,10 @@
 
 // Implementation of ConfigurableC2Intf based on C2ComponentInterface
 struct CompIntf : public ConfigurableC2Intf {
-    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
         ConfigurableC2Intf{intf->getName(), intf->getId()},
-        mIntf{intf} {
+        mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
     }
 
     virtual c2_status_t config(
@@ -55,7 +55,34 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures
             ) override {
-        return mIntf->config_vb(params, mayBlock, failures);
+        std::vector<C2Param*> paramsToIntf;
+        std::vector<C2Param*> paramsToLargeFrameIntf;
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->config_vb(params, mayBlock, failures);
+            return err;
+        }
+        for (auto &p : params) {
+            if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+                paramsToLargeFrameIntf.push_back(p);
+            } else {
+                paramsToIntf.push_back(p);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+        }
+        if (err1 != C2_OK) {
+            LOG(ERROR) << "We have a failed config";
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->config(
+                    paramsToLargeFrameIntf, mayBlock, failures);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t query(
@@ -63,33 +90,100 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2Param>>* const params
             ) const override {
-        return mIntf->query_vb({}, indices, mayBlock, params);
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->query_vb({}, indices, mayBlock, params);
+            return err;
+        }
+        std::vector<C2Param::Index> paramsToIntf;
+        std::vector<C2Param::Index> paramsToLargeFrameIntf;
+        for (auto &i : indices) {
+            if (mMultiAccessUnitIntf->isParamSupported(i)) {
+                paramsToLargeFrameIntf.push_back(i);
+            } else {
+                paramsToIntf.push_back(i);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->query(
+                    {}, paramsToLargeFrameIntf, mayBlock, params);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t querySupportedParams(
             std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
             ) const override {
-        return mIntf->querySupportedParams_nb(params);
+        c2_status_t err = mIntf->querySupportedParams_nb(params);
+        if (mMultiAccessUnitIntf != nullptr) {
+            err =  mMultiAccessUnitIntf->querySupportedParams(params);
+        }
+        return err;
     }
 
     virtual c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const override {
-        return mIntf->querySupportedValues_vb(fields, mayBlock);
+        if (mMultiAccessUnitIntf == nullptr) {
+           return  mIntf->querySupportedValues_vb(fields, mayBlock);
+        }
+        std::vector<C2FieldSupportedValuesQuery> dup = fields;
+        std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+        std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+        c2_status_t err = C2_OK;
+        for (int i = 0 ; i < fields.size(); i++) {
+            const C2ParamField &field = fields[i].field();
+            uint32_t queryArrayIdx = 1;
+            if (mMultiAccessUnitIntf->isValidField(field)) {
+                queryArrayIdx = 0;
+            }
+            queryMap[field] = std::make_pair(
+                    queryArrayIdx, queryArray[queryArrayIdx].size());
+            queryArray[queryArrayIdx].push_back(fields[i]);
+        }
+        if (queryArray[0].size() > 0) {
+            err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+        }
+        if (queryArray[1].size() > 0) {
+             err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+        }
+        for (int i = 0 ; i < dup.size(); i++) {
+            auto it = queryMap.find(dup[i].field());
+            if (it != queryMap.end()) {
+                std::pair<uint32_t, size_t> queryid = it->second;
+                fields[i] = queryArray[queryid.first][queryid.second];
+            }
+        }
+        return err;
     }
 
 protected:
     std::shared_ptr<C2ComponentInterface> mIntf;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
 };
 
 } // unnamed namespace
 
+
 // ComponentInterface
 ComponentInterface::ComponentInterface(
         const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
         const std::shared_ptr<ParameterCache>& cache)
       : mInterface{intf},
-        mConfigurable{new CachedConfigurable(std::make_unique<CompIntf>(intf))} {
+        mConfigurable{new CachedConfigurable(
+                std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
     mInit = mConfigurable->init(cache);
 }
 
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 1c0d5b0..988ab6f 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -242,7 +272,9 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index 3f55618..aed94ec 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -30,6 +30,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -54,6 +56,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -113,6 +117,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -135,6 +141,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
index 9102f92..2995faf 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
@@ -23,10 +23,15 @@
 #include <android/hardware/media/c2/1.0/IComponentInterface.h>
 #include <hidl/Status.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
 #include <C2.h>
 
+#include <set>
 #include <memory>
 
 namespace android {
@@ -39,6 +44,7 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::sp;
+using ::android::MultiAccessUnitInterface;
 
 struct ComponentStore;
 
@@ -46,6 +52,11 @@
     ComponentInterface(
             const std::shared_ptr<C2ComponentInterface>& interface,
             const std::shared_ptr<ParameterCache>& cache);
+
+    ComponentInterface(
+            const std::shared_ptr<C2ComponentInterface>& interface,
+            const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+            const std::shared_ptr<ParameterCache>& cache);
     c2_status_t status() const;
     virtual Return<sp<IConfigurable>> getConfigurable() override;
 
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index 27e2a05..b5d85da 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -78,6 +78,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
index 1c5c7d6..ccdde5e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -44,6 +45,7 @@
         "res/bbb_opus_stereo_128kbps_48000hz.info",
         "res/bbb_opus_stereo_128kbps_48000hz.opus",
         "res/bbb_raw_1ch_8khz_s32le.info",
+        "res/bbb_raw_1ch_8khz_s32le_largeframe.info",
         "res/bbb_raw_1ch_8khz_s32le.raw",
         "res/bbb_vorbis_stereo_128kbps_48000hz.info",
         "res/bbb_vorbis_stereo_128kbps_48000hz.vorbis",
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
index 624aad2..2b1bca0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index c7c04c5..0c30d95 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -32,7 +32,11 @@
 #include <codec2/hidl/client.h>
 
 #include "media_c2_hidl_test_common.h"
-using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+
+using DecodeTestParameters = std::tuple<std::string /*instance_name*/,
+        std::string /*component_name*/,
+        uint32_t /*stream_index*/,
+        bool /*signal end-of-stream nor not*/>;
 static std::vector<DecodeTestParameters> gDecodeTestParameters;
 
 using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
@@ -56,6 +60,7 @@
         {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
         {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
         {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+        {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le_largeframe.info"},
         {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
 };
 
@@ -137,6 +142,9 @@
     struct outputMetaData {
         uint64_t timestampUs;
         uint32_t rangeLength;
+        // The following is used only if C2AccessUnitInfos::output
+        // is present as part of C2Buffer.
+        std::vector<C2AccessUnitInfosStruct> largeFrameInfo;
     };
     // callback function to process onWorkDone received by Listener
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -161,8 +169,18 @@
                                                    .capacity();
                     // List of timestamp values and output size to calculate timestamp
                     if (mTimestampDevTest) {
-                        outputMetaData meta = {mTimestampUs, rangeLength};
+                        outputMetaData meta = {mTimestampUs, rangeLength, {}};
                         oBufferMetaData.push_back(meta);
+                        std::shared_ptr<const C2AccessUnitInfos::output> inBufferInfo =
+                                std::static_pointer_cast<const C2AccessUnitInfos::output>(
+                                work->worklets.front()->output.buffers[0]->getInfo(
+                                C2AccessUnitInfos::output::PARAM_TYPE));
+                        if (inBufferInfo) {
+                            for (int nMeta = 0; nMeta < inBufferInfo->flexCount(); nMeta++) {
+                                oBufferMetaData.back().largeFrameInfo.push_back(
+                                        inBufferInfo->m.values[nMeta]);
+                            }
+                        }
                     }
                 }
                 bool mCsd = false;
@@ -203,6 +221,12 @@
     std::string mInfoFile;
     size_t mStreamIndex = 0;
 
+    // These are used only with large frame codec
+    // Specifies the maximum output size in bytes.
+    uint32_t mMaxOutputSize;
+    //Specifies the threshold output size in bytes.
+    uint32_t mOutputThresholdSize;
+
   protected:
     static void description(const std::string& description) {
         RecordProperty("description", description);
@@ -249,6 +273,96 @@
     ALOGV("Component Valid");
 }
 
+bool isLargeAudioFrameSupported(const std::shared_ptr<android::Codec2Client::Component> &comp,
+        std::vector<C2FieldSupportedValues>& supportedValues) {
+    C2LargeFrame::output largeFrameParams;
+    std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+            C2FieldSupportedValuesQuery::Current(
+                    C2ParamField(&largeFrameParams, &C2LargeFrame::maxSize)),
+            C2FieldSupportedValuesQuery::Current(
+                    C2ParamField(&largeFrameParams,
+                            &C2LargeFrame::thresholdSize))};
+    c2_status_t c2err = comp->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
+    if (c2err != C2_OK || validValueInfos.size() != 2) {
+        return false;
+    }
+    supportedValues.clear();
+    for (int i = 0; i < 2; i++) {
+        if (validValueInfos[i].values.type == C2FieldSupportedValues::EMPTY) {
+            return false;
+        }
+        supportedValues.push_back(validValueInfos[i].values);
+    }
+    return true;
+}
+
+c2_status_t configureLargeFrameParams(const std::shared_ptr<android::Codec2Client::Component> &comp,
+        uint32_t& maxOutput, uint32_t& outputThreshold,
+        const std::vector<C2FieldSupportedValues>& supportedValues) {
+
+    if (supportedValues.empty()) {
+        ALOGE("Error: No supported values in large audio frame params");
+        return C2_BAD_VALUE;
+    }
+
+    auto boundBySupportedValues = [](const C2FieldSupportedValues& supportedValues, uint32_t& value)
+            -> c2_status_t {
+        uint32_t oBufMin = 0, oBufMax = 0;
+        switch (supportedValues.type) {
+            case C2FieldSupportedValues::type_t::RANGE:
+            {
+                const auto& range = supportedValues.range;
+                oBufMax = (uint32_t)(range.max).ref<uint32_t>();
+                oBufMin = (uint32_t)(range.min).ref<uint32_t>();
+                value = (value > oBufMax) ? oBufMax :
+                        (value < oBufMin) ? oBufMin : value;
+                break;
+            }
+
+            case C2FieldSupportedValues::type_t::VALUES:
+            {
+                uint32_t lastValue;
+                for (const C2Value::Primitive& prim : supportedValues.values) {
+                    lastValue = (uint32_t)prim.ref<uint32_t>();
+                    if (lastValue > value) {
+                        value = lastValue;
+                        break;
+                    }
+                }
+                if (value > lastValue) {
+                    value = lastValue;
+                }
+                break;
+            }
+
+            default:
+                return C2_BAD_VALUE;
+            }
+        return C2_OK;
+    };
+    c2_status_t c2_err = boundBySupportedValues(supportedValues[0], maxOutput);
+    if (c2_err != C2_OK) {
+        return c2_err;
+    }
+    c2_err = boundBySupportedValues(supportedValues[1], outputThreshold);
+    if (c2_err != C2_OK) {
+        return c2_err;
+    }
+    if (outputThreshold > maxOutput) {
+        outputThreshold = maxOutput;
+    }
+    ALOGV("Setting large frame format : Max: %d - Threshold: %d", maxOutput, outputThreshold);
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    C2LargeFrame::output largeFrameParams(0u, maxOutput, outputThreshold);
+    std::vector<C2Param*> configParam{&largeFrameParams};
+    c2_status_t status = comp->config(configParam, C2_DONT_BLOCK, &failures);
+    if (status != C2_OK || failures.size() != 0u) {
+        ALOGE("Large frame Audio configuration failed for maxSize: %d, thresholdSize: %d",
+                maxOutput, outputThreshold);
+    }
+    return status;
+}
+
 // Set Default config param.
 bool setupConfigParam(const std::shared_ptr<android::Codec2Client::Component>& component,
                       int32_t* bitStreamInfo) {
@@ -317,6 +431,10 @@
     typedef std::unique_lock<std::mutex> ULock;
     int frameID = offset;
     int maxRetry = 0;
+    std::shared_ptr<C2Buffer> buffer;
+    std::vector<C2FieldSupportedValues> largeFrameValues;
+    bool isComponentSupportsLargeAudioFrame = isLargeAudioFrameSupported(component,
+            largeFrameValues);
     while (1) {
         if (frameID == (int)Info->size() || frameID == (offset + range)) break;
         uint32_t flags = 0;
@@ -376,7 +494,17 @@
 
             memcpy(view.base(), data, size);
 
-            work->input.buffers.emplace_back(new LinearBuffer(block));
+            buffer.reset(new LinearBuffer(block));
+            if (!(*Info)[frameID].largeFrameInfo.empty() && isComponentSupportsLargeAudioFrame) {
+                const std::vector<C2AccessUnitInfosStruct>& meta =
+                        (*Info)[frameID].largeFrameInfo;
+                ALOGV("Large Audio frame supported for %s, frameID: %d, size: %zu",
+                        component->getName().c_str(), frameID, meta.size());
+                const std::shared_ptr<C2AccessUnitInfos::input> largeFrame =
+                        C2AccessUnitInfos::input::AllocShared(meta.size(), 0u, meta);
+                buffer->setInfo(largeFrame);
+            }
+            work->input.buffers.push_back(buffer);
             free(data);
         }
         work->worklets.clear();
@@ -403,9 +531,37 @@
     auto itOut = oBufferMetaData.begin();
     EXPECT_EQ(*itIn, itOut->timestampUs);
     uint64_t expectedTimeStamp = *itIn;
-    while (itOut != oBufferMetaData.end()) {
+    bool err= false;
+    while (!err && itOut != oBufferMetaData.end()) {
         EXPECT_EQ(expectedTimeStamp, itOut->timestampUs);
         if (expectedTimeStamp != itOut->timestampUs) break;
+        if (!itOut->largeFrameInfo.empty()) {
+            // checking large audio frame metadata
+            if (itOut->largeFrameInfo[0].timestamp != itOut->timestampUs) {
+                ALOGE("Metadata first time stamp doesn't match");
+                err = true;
+                break;
+            }
+            uint64_t totalSize = 0;
+            uint64_t sampleSize = 0;
+            int64_t nextTimestamp = itOut->timestampUs;
+            for (auto& meta : itOut->largeFrameInfo) {
+                if (nextTimestamp != meta.timestamp) {
+                    ALOGE("Metadata timestamp error: expect: %lld, got: %lld",
+                            (long long)nextTimestamp, (long long)meta.timestamp);
+                    err = true;
+                    break;
+                }
+                totalSize += meta.size;
+                sampleSize = (meta.size / (nChannels * 2));
+                nextTimestamp += sampleSize * 1000000ll / nSampleRate;
+            }
+            if (totalSize != itOut->rangeLength) {
+                ALOGE("Metadata size error: expected:%lld, got: %d",
+                        (long long)totalSize, itOut->rangeLength);
+                err = true;
+            }
+        }
         // buffer samples = ((total bytes) / (ac * (bits per sample / 8))
         samplesReceived += ((itOut->rangeLength) / (nChannels * 2));
         expectedTimeStamp = samplesReceived * 1000000ll / nSampleRate;
@@ -453,7 +609,8 @@
     android::Vector<FrameInfo> Info;
 
     int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile <<
+            " #CSD " << numCsds;
 
     // Reset total no of frames received
     mFramesReceived = 0;
@@ -474,10 +631,23 @@
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
+    getInputChannelInfo(mComponent, mMime, bitStreamInfo);
+    std::vector<C2FieldSupportedValues> supportedValues;
+    if (!Info.top().largeFrameInfo.empty()) {
+        if (!isLargeAudioFrameSupported(mComponent, supportedValues)) {
+            GTEST_SKIP() << "As component does not support large frame";
+        }
+        // time_sec * sample_rate * channel_count * 2 (bytes_per_channel)
+        mMaxOutputSize = 60 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        mOutputThresholdSize = 50 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        ASSERT_EQ(configureLargeFrameParams(mComponent, mMaxOutputSize,
+                mOutputThresholdSize, supportedValues), C2_OK);
+    }
     ASSERT_EQ(mComponent->start(), C2_OK);
     std::ifstream eleStream;
     eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
+
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, &Info, 0,
                                           (int)Info.size(), signalEOS));
@@ -529,6 +699,18 @@
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
+    getInputChannelInfo(mComponent, mMime, bitStreamInfo);
+    std::vector<C2FieldSupportedValues> supportedValues;
+    if (!Info.top().largeFrameInfo.empty()) {
+        if (!isLargeAudioFrameSupported(mComponent, supportedValues)) {
+            GTEST_SKIP() << "As component does not support large frame";
+        }
+        // time_sec * sample_rate * channel_count * 2 (bytes_per_channel)
+        mMaxOutputSize = 60 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        mOutputThresholdSize = 50 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        ASSERT_EQ(configureLargeFrameParams(mComponent, mMaxOutputSize,
+                mOutputThresholdSize, supportedValues), C2_OK);
+    }
     ASSERT_EQ(mComponent->start(), C2_OK);
 
     // request EOS for thumbnail
@@ -611,6 +793,18 @@
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
+    getInputChannelInfo(mComponent, mMime, bitStreamInfo);
+    std::vector<C2FieldSupportedValues> supportedValues;
+    if (!Info.top().largeFrameInfo.empty()) {
+        if (!isLargeAudioFrameSupported(mComponent, supportedValues)) {
+            GTEST_SKIP() << "As component does not support large frame";
+        }
+        // time_sec * sample_rate * channel_count * 2 (bytes_per_channel)
+        mMaxOutputSize = 60 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        mOutputThresholdSize = 50 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        ASSERT_EQ(configureLargeFrameParams(mComponent, mMaxOutputSize,
+                mOutputThresholdSize, supportedValues), C2_OK);
+    }
     ASSERT_EQ(mComponent->start(), C2_OK);
     // flush
     std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -681,6 +875,7 @@
     uint32_t flags = 0;
     uint32_t vtsFlags = 0;
     uint32_t timestamp = 0;
+    uint32_t nLargeFrames = 0;
     bool codecConfig = false;
     // This test introduces empty CSD after every 20th frame
     // and empty input frames at an interval of 5 frames.
@@ -688,6 +883,7 @@
         if (!(frameId % 5)) {
             vtsFlags = !(frameId % 20) ? (1 << VTS_BIT_FLAG_CSD_FRAME) : 0;
             bytesCount = 0;
+            Info.push_back({bytesCount, vtsFlags, timestamp, {}});
         } else {
             if (!(eleInfo >> bytesCount)) break;
             eleInfo >> flags;
@@ -695,8 +891,20 @@
             ASSERT_NE(vtsFlags, 0xFF) << "unrecognized flag entry in info file: " << mInfoFile;
             eleInfo >> timestamp;
             codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
+            Info.push_back({bytesCount, vtsFlags, timestamp, {}});
+            if ((vtsFlags & (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME)) != 0) {
+                eleInfo >> nLargeFrames;
+                // this is a large audio frame.
+                while(nLargeFrames-- > 0) {
+                    eleInfo >> bytesCount;
+                    eleInfo >> flags;
+                    eleInfo >> timestamp;
+                    vtsFlags = mapInfoFlagstoVtsFlags(flags);
+                    Info.editItemAt(Info.size() - 1).largeFrameInfo.push_back(
+                            {(uint32_t)bytesCount, vtsFlags, timestamp});
+                }
+            }
         }
-        Info.push_back({bytesCount, vtsFlags, timestamp});
         frameId++;
     }
     eleInfo.close();
@@ -711,6 +919,18 @@
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
+    getInputChannelInfo(mComponent, mMime, bitStreamInfo);
+    std::vector<C2FieldSupportedValues> supportedValues;
+    if (!Info.top().largeFrameInfo.empty()) {
+        if (!isLargeAudioFrameSupported(mComponent, supportedValues)) {
+            GTEST_SKIP() << "As component does not support large frame";
+        }
+        // time_sec * sample_rate * channel_count * 2 (bytes_per_channel)
+        mMaxOutputSize = 60 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        mOutputThresholdSize = 50 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        ASSERT_EQ(configureLargeFrameParams(mComponent, mMaxOutputSize,
+                mOutputThresholdSize, supportedValues), C2_OK);
+    }
     ASSERT_EQ(mComponent->start(), C2_OK);
     eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
@@ -766,7 +986,18 @@
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
-
+    getInputChannelInfo(mComponent, mMime, bitStreamInfo);
+    std::vector<C2FieldSupportedValues> supportedValues;
+    if (!Info.top().largeFrameInfo.empty()) {
+        if (!isLargeAudioFrameSupported(mComponent, supportedValues)) {
+            GTEST_SKIP() << "As component does not support large frame";
+        }
+        // time_sec * sample_rate * channel_count * 2 (bytes_per_channel)
+        mMaxOutputSize = 60 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        mOutputThresholdSize = 50 * bitStreamInfo[0] * bitStreamInfo[1] * 2;
+        ASSERT_EQ(configureLargeFrameParams(mComponent, mMaxOutputSize,
+                mOutputThresholdSize, supportedValues), C2_OK);
+    }
     ASSERT_EQ(mComponent->start(), C2_OK);
     std::ifstream eleStream;
     eleStream.open(mInputFile, std::ifstream::binary);
@@ -864,4 +1095,4 @@
     ::testing::InitGoogleTest(&argc, argv);
     ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
-}
+}
\ No newline at end of file
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
index 6c04683..9b9c62f 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
@@ -38,6 +38,8 @@
         <option name="push-file" key="bbb_opus_stereo_128kbps_48000hz.info" value="/data/local/tmp/media/bbb_opus_stereo_128kbps_48000hz.info" />
         <option name="push-file" key="bbb_opus_stereo_128kbps_48000hz.opus" value="/data/local/tmp/media/bbb_opus_stereo_128kbps_48000hz.opus" />
         <option name="push-file" key="bbb_raw_1ch_8khz_s32le.info" value="/data/local/tmp/media/bbb_raw_1ch_8khz_s32le.info" />
+        <option name="push-file" key="bbb_raw_1ch_8khz_s32le_largeframe.info"
+                value="/data/local/tmp/media/bbb_raw_1ch_8khz_s32le_largeframe.info" />
         <option name="push-file" key="bbb_raw_1ch_8khz_s32le.raw" value="/data/local/tmp/media/bbb_raw_1ch_8khz_s32le.raw" />
         <option name="push-file" key="bbb_vorbis_stereo_128kbps_48000hz.info" value="/data/local/tmp/media/bbb_vorbis_stereo_128kbps_48000hz.info" />
         <option name="push-file" key="bbb_vorbis_stereo_128kbps_48000hz.vorbis" value="/data/local/tmp/media/bbb_vorbis_stereo_128kbps_48000hz.vorbis" />
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
index 0f07077..564de47 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index a72f7bd..2da6501 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -17,7 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "media_c2_hidl_test_common"
 #include <stdio.h>
-
+#include <numeric>
 #include "media_c2_hidl_test_common.h"
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
@@ -221,6 +221,32 @@
     return parameters;
 }
 
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList{
+        {(1 << VTS_BIT_FLAG_SYNC_FRAME), 0},
+        {(1 << VTS_BIT_FLAG_CSD_FRAME), C2FrameData::FLAG_CODEC_CONFIG},
+};
+
+/*
+ * This is a conversion function that can be used to convert
+ * VTS flags to C2 flags and vice-versa based on the initializer list.
+ * @param flags can be a C2 flag or a VTS flag
+ * @param toC2 if true, converts flags to a C2 flag
+ *              if false, converts flags to a VTS flag
+ */
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+    return std::transform_reduce(
+            flagList.begin(), flagList.end(),
+            0u,
+            std::bit_or{},
+            [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+                if (toC2) {
+                    return (flags & entry.first) ? entry.second : 0;
+                } else {
+                    return (flags & entry.second) ? entry.first : 0;
+                }
+            });
+}
+
 // Populate Info vector and return number of CSDs
 int32_t populateInfoVector(std::string info, android::Vector<FrameInfo>* frameInfo,
                            bool timestampDevTest, std::list<uint64_t>* timestampUslist) {
@@ -235,12 +261,13 @@
     uint32_t flags = 0;
     uint32_t vtsFlags = 0;
     uint32_t timestamp = 0;
+    uint32_t nLargeFrames = 0;
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
         vtsFlags = mapInfoFlagstoVtsFlags(flags);
         if (vtsFlags == 0xFF) {
-            ALOGE("unrecognized flag entry in info file %s", info.c_str());
+            ALOGE("unrecognized flag(0x%x) entry in info file %s", flags, info.c_str());
             return -1;
         }
         eleInfo >> timestamp;
@@ -250,7 +277,18 @@
         if (timestampDevTest && !codecConfig && !nonDisplayFrame) {
             timestampUslist->push_back(timestamp);
         }
-        frameInfo->push_back({bytesCount, vtsFlags, timestamp});
+        frameInfo->push_back({bytesCount, vtsFlags, timestamp, {}});
+        if (vtsFlags & (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME)) {
+            eleInfo >> nLargeFrames;
+            while(nLargeFrames-- > 0) {
+                eleInfo >> bytesCount;
+                eleInfo >> flags;
+                eleInfo >> timestamp;
+                uint32_t c2Flags = convertFlags(flags, true);
+                frameInfo->editItemAt(frameInfo->size() - 1).largeFrameInfo.push_back(
+                        {c2Flags, static_cast<uint32_t>(bytesCount), timestamp});
+            }
+        }
     }
     ALOGV("numCsds : %d", numCsds);
     eleInfo.close();
@@ -285,5 +323,9 @@
     else if (infoFlags == 0x1) return (1 << VTS_BIT_FLAG_SYNC_FRAME);
     else if (infoFlags == 0x10) return (1 << VTS_BIT_FLAG_NO_SHOW_FRAME);
     else if (infoFlags == 0x20) return (1 << VTS_BIT_FLAG_CSD_FRAME);
+    else if (infoFlags == 0x40) return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME);
+    else if (infoFlags == 0x80) {
+        return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME) | (1 << VTS_BIT_FLAG_SYNC_FRAME);
+    }
     return 0xFF;
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index eda7b99..708fe15 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -54,12 +54,16 @@
     VTS_BIT_FLAG_SYNC_FRAME = 1,
     VTS_BIT_FLAG_NO_SHOW_FRAME = 2,
     VTS_BIT_FLAG_CSD_FRAME = 3,
+    VTS_BIT_FLAG_LARGE_AUDIO_FRAME = 4,
 };
 
 struct FrameInfo {
     int bytesCount;
     uint32_t vtsFlags;
     int64_t timestamp;
+    // This is used when access-units are marked with
+    // VTS_BIT_FLAG_LARGE_AUDIO_FRAME
+    std::vector<C2AccessUnitInfosStruct> largeFrameInfo;
 };
 
 template <typename... T>
@@ -86,7 +90,6 @@
     virtual void onWorkDone(const std::weak_ptr<android::Codec2Client::Component>& comp,
                             std::list<std::unique_ptr<C2Work>>& workItems) override {
         /* TODO */
-        ALOGD("onWorkDone called");
         (void)comp;
         if (callBack) callBack(workItems);
     }
@@ -103,7 +106,6 @@
                          uint32_t errorCode) override {
         /* TODO */
         (void)comp;
-        ALOGD("onError called");
         if (errorCode != 0) ALOGE("Error : %u", errorCode);
     }
 
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
index cc019da..0640f02 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 275a721..ab47b7c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_component_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 
@@ -382,5 +383,6 @@
     }
 
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
index 40f5201..5e52fde 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index 47ceed5..d1f0fb5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_master_test"
 
 #include <android-base/logging.h>
+#include <android-base/properties.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <hidl/ServiceManagement.h>
@@ -82,6 +83,17 @@
     }
 }
 
+TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
+    static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
+    if (sVendorApiLevel < 202404) {
+        GTEST_SKIP() << "vendor api level less than 202404: " << sVendorApiLevel;
+    }
+    ALOGV("MustUseAidlBeyond202404 Test");
+
+    EXPECT_NE(mClient->getAidlBase(), nullptr) << "android.hardware.media.c2 MUST use AIDL "
+                                               << "for chipsets launching at 202404 or above";
+}
+
 }  // anonymous namespace
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2MasterHalTest,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
new file mode 100644
index 0000000..ee59a8e
--- /dev/null
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
@@ -0,0 +1,5 @@
+16384 128 0 		 1 16384 1 0
+49152 128 1024000 3 16384 1 1024000 16384 1 2048000 16384 1 3072000
+32768 128 4096000 2 16384 1 4096000 16384 1 5120000
+49152 128 6144000 3 16384 1 6144000 16384 1 7168000 16384 1 8192000
+10924 128 9216000 1 10924 1 9216000
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
index ecc4f9d..d04c2f6 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index fdb28f4..f8fd425 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -29,6 +29,7 @@
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
+#include <codec2/common/HalSelection.h>
 #include <codec2/hidl/client.h>
 #include <gui/BufferQueue.h>
 #include <gui/IConsumerListener.h>
@@ -407,30 +408,45 @@
                       surfaceMode_t surfMode) {
     using namespace android;
     sp<IGraphicBufferProducer> producer = nullptr;
+    sp<IGraphicBufferConsumer> consumer = nullptr;
+    sp<GLConsumer> texture = nullptr;
+    sp<ANativeWindow> surface = nullptr;
     static std::atomic_uint32_t surfaceGeneration{0};
     uint32_t generation =
             (getpid() << 10) |
             ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1) & ((1 << 10) - 1));
     int32_t maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+    C2BlockPool::local_id_t poolId = C2BlockPool::BASIC_GRAPHIC;
+    std::shared_ptr<Codec2Client::Configurable> configurable;
+    bool aidl = ::android::IsCodec2AidlHalSelected();
+    if (aidl) {
+        // AIDL does not support blockpool-less mode.
+        c2_status_t poolRet = component->createBlockPool(
+                C2PlatformAllocatorStore::IGBA, &poolId, &configurable);
+        ASSERT_EQ(poolRet, C2_OK) << "setOutputSurface failed";
+    }
+
     if (surfMode == SURFACE) {
-        sp<IGraphicBufferConsumer> consumer = nullptr;
         BufferQueue::createBufferQueue(&producer, &consumer);
         ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
         ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
 
-        sp<GLConsumer> texture =
+        texture =
                 new GLConsumer(consumer, 0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
                                true /* useFenceSync */, false /* isControlledByApp */);
 
-        sp<ANativeWindow> gSurface = new Surface(producer);
-        ASSERT_NE(gSurface, nullptr) << "getSurface failed";
+        surface = new Surface(producer);
+        ASSERT_NE(surface, nullptr) << "failed to create Surface object";
 
         producer->setGenerationNumber(generation);
     }
 
-    c2_status_t err = component->setOutputSurface(C2BlockPool::BASIC_GRAPHIC, producer, generation,
+    c2_status_t err = component->setOutputSurface(poolId, producer, generation,
                                                   maxDequeueBuffers);
-    ASSERT_EQ(err, C2_OK) << "setOutputSurface failed";
+    std::string surfStr = surfMode == NO_SURFACE ? "NO_SURFACE" :
+            (surfMode == NULL_SURFACE ? "NULL_SURFACE" : "WITH_SURFACE");
+
+    ASSERT_EQ(err, C2_OK) << "setOutputSurface failed, surfMode: " << surfStr;
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -723,7 +739,7 @@
             ASSERT_NE(vtsFlags, 0xFF) << "unrecognized flag entry in info file: " << mInfoFile;
             eleInfo >> timestamp;
             timestamp += timestampOffset;
-            Info.push_back({bytesCount, vtsFlags, timestamp});
+            Info.push_back({bytesCount, vtsFlags, timestamp, {}});
             bool codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
             bool nonDisplayFrame = (vtsFlags & (1 << VTS_BIT_FLAG_NO_SHOW_FRAME)) != 0;
 
@@ -962,7 +978,7 @@
             eleInfo >> timestamp;
             codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
         }
-        Info.push_back({bytesCount, vtsFlags, timestamp});
+        Info.push_back({bytesCount, vtsFlags, timestamp, {}});
         frameId++;
     }
     eleInfo.close();
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index fbb4f18..8ecb9c0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -19,6 +19,7 @@
 
 #include <android-base/logging.h>
 #include <android/binder_process.h>
+#include <codec2/common/HalSelection.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <stdio.h>
@@ -71,7 +72,9 @@
         std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
         CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &mGraphicAllocator),
                  C2_OK);
-        mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
+        C2PooledBlockPool::BufferPoolVer ver = ::android::IsCodec2AidlHalSelected() ?
+                C2PooledBlockPool::VER_AIDL2 : C2PooledBlockPool::VER_HIDL;
+        mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++, ver);
         ASSERT_NE(mGraphicPool, nullptr);
 
         std::vector<std::unique_ptr<C2Param>> queried;
diff --git a/media/codec2/hal/hidl/1.1/utils/Android.bp b/media/codec2/hal/hidl/1.1/utils/Android.bp
index 4f86511..d8b5db5 100644
--- a/media/codec2/hal/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.1/utils/Android.bp
@@ -54,7 +54,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
@@ -66,7 +65,6 @@
         "com.android.media.swcodec",
     ],
 
-
     defaults: ["hidl_defaults"],
 
     srcs: [
@@ -97,6 +95,7 @@
         "android.hardware.media.omx@1.0",
         "libbase",
         "libcodec2",
+        "libcodec2_hal_common",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl_plugin_stub",
         "libcodec2_vndk",
@@ -173,4 +172,3 @@
         "libhidlbase",
     ],
 }
-
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index d0f4f19..09e5709 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -29,6 +29,8 @@
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2BqBufferPriv.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
@@ -44,6 +46,8 @@
 namespace utils {
 
 using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
@@ -135,6 +139,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +258,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -252,6 +303,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
 
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
@@ -261,6 +325,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
 
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -469,6 +536,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +549,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -508,7 +581,12 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index d47abdd..46af809 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -194,6 +194,37 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -241,7 +272,10 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(
+                c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index f16de24..8f0478f 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -29,6 +29,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -57,6 +59,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -118,6 +122,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_1::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -140,6 +146,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index f6daee7..85862a9 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -79,6 +79,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.2/utils/Android.bp b/media/codec2/hal/hidl/1.2/utils/Android.bp
index b92dc07..a339946 100644
--- a/media/codec2/hal/hidl/1.2/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.2/utils/Android.bp
@@ -58,7 +58,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
@@ -102,6 +101,7 @@
         "android.hardware.media.omx@1.0",
         "libbase",
         "libcodec2",
+        "libcodec2_hal_common",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl_plugin_stub",
@@ -197,4 +197,3 @@
     name: "libcodec2-hidl-client-defaults",
     defaults: ["libcodec2-hidl-client-defaults@1.2"],
 }
-
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 036c900..0fe16e3 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -44,6 +44,8 @@
 namespace utils {
 
 using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
@@ -135,6 +137,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +256,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -252,7 +301,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
-
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
 
@@ -261,7 +322,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
-
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
         if (work) {
@@ -469,6 +532,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +545,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -539,7 +608,12 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index 9fac5d5..f89c835 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -241,7 +271,9 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 6a73392..f2b77bc 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -29,6 +29,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -57,6 +59,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -123,6 +127,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -145,6 +151,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index e95a651..c08fce4 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -79,6 +79,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index d867eb1..18c2468 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -45,6 +45,7 @@
 
     static_libs: [
         "libSurfaceFlingerProperties",
+        "android.media.codec-aconfig-cc",
     ],
 
     shared_libs: [
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6b45e0e..40656ff 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -90,6 +90,28 @@
     return v == "true";
 }
 
+// Flags can come with individual BufferInfos
+// when used with large frame audio
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+        {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+        {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+        {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+};
+
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+    return std::transform_reduce(
+            flagList.begin(), flagList.end(),
+            0u,
+            std::bit_or{},
+            [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+                if (toC2) {
+                    return (flags & entry.first) ? entry.second : 0;
+                } else {
+                    return (flags & entry.second) ? entry.first : 0;
+                }
+            });
+}
+
 }  // namespace
 
 CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -245,7 +267,8 @@
     if (buffer->meta()->findInt32("decode-only", &tmp) && tmp) {
         flags |= C2FrameData::FLAG_DROP_FRAME;
     }
-    ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
+    ALOGV("[%s] queueInputBuffer: buffer->size() = %zu time: %lld",
+            mName, buffer->size(), (long long)timeUs);
     std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
     work->input.ordinal.timestamp = timeUs;
@@ -296,6 +319,34 @@
                 uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
                 output->rotation[frameIndex] = rotation;
             }
+            sp<RefBase> obj;
+            if (buffer->meta()->findObject("accessUnitInfo", &obj)) {
+                ALOGV("Filling C2Info from multiple access units");
+                sp<WrapperObject<std::vector<AccessUnitInfo>>> infos{
+                        (decltype(infos.get()))obj.get()};
+                std::vector<AccessUnitInfo> &accessUnitInfoVec = infos->value;
+                std::vector<C2AccessUnitInfosStruct> multipleAccessUnitInfos;
+                uint32_t outFlags = 0;
+                for (int i = 0; i < accessUnitInfoVec.size(); i++) {
+                    outFlags = 0;
+                    outFlags = convertFlags(accessUnitInfoVec[i].mFlags, true);
+                    if (eos && (outFlags & C2FrameData::FLAG_END_OF_STREAM)) {
+                        outFlags &= (~C2FrameData::FLAG_END_OF_STREAM);
+                    }
+                    multipleAccessUnitInfos.emplace_back(
+                            outFlags,
+                            accessUnitInfoVec[i].mSize,
+                            accessUnitInfoVec[i].mTimestamp);
+                    ALOGV("%d) flags: %d, size: %d, time: %llu",
+                            i, outFlags, accessUnitInfoVec[i].mSize,
+                            (long long)accessUnitInfoVec[i].mTimestamp);
+
+                }
+                const std::shared_ptr<C2AccessUnitInfos::input> c2AccessUnitInfos =
+                        C2AccessUnitInfos::input::AllocShared(
+                                multipleAccessUnitInfos.size(), 0u, multipleAccessUnitInfos);
+                c2buffer->setInfo(c2AccessUnitInfos);
+            }
             work->input.buffers.push_back(c2buffer);
             if (encryptedBlock) {
                 work->input.infoBuffers.emplace_back(C2InfoBuffer::CreateLinearBuffer(
@@ -432,6 +483,130 @@
     return heapSeqNum;
 }
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+status_t CCodecBufferChannel::attachEncryptedBuffers(
+        const sp<hardware::HidlMemory> &memory,
+        size_t offset,
+        const sp<MediaCodecBuffer> &buffer,
+        bool secure,
+        AString* errorDetailMsg) {
+    static const C2MemoryUsage kDefaultReadWriteUsage{
+        C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    if (!hasCryptoOrDescrambler()) {
+        ALOGE("attachEncryptedBuffers requires Crypto/descrambler object");
+        return -ENOSYS;
+    }
+    size_t size = 0;
+    CHECK(buffer->meta()->findSize("ssize", &size));
+    if (size == 0) {
+        buffer->setRange(0, 0);
+        return OK;
+    }
+    sp<RefBase> obj;
+    CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+    sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+    CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+    sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+    if (secure || (mCrypto == nullptr)) {
+        if (cryptoInfos->value.size() != 1) {
+            ALOGE("Cannot decrypt multiple access units");
+            return -ENOSYS;
+        }
+        // we are dealing with just one cryptoInfo or descrambler.
+        std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+        if (info == nullptr) {
+            ALOGE("Cannot decrypt, CryptoInfos are null.");
+            return -ENOSYS;
+        }
+        return attachEncryptedBuffer(
+                memory,
+                secure,
+                info->mKey,
+                info->mIv,
+                info->mMode,
+                info->mPattern,
+                offset,
+                info->mSubSamples,
+                info->mNumSubSamples,
+                buffer,
+                errorDetailMsg);
+    }
+    std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+    std::shared_ptr<C2LinearBlock> block;
+    c2_status_t err = pool->fetchLinearBlock(
+            size,
+            kDefaultReadWriteUsage,
+            &block);
+    if (err != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffers: fetchLinearBlock failed: size = %zu (%s) err = %d",
+              mName, size, secure ? "secure" : "non-secure", err);
+        return NO_MEMORY;
+    }
+    ensureDecryptDestination(size);
+    C2WriteView wView = block->map().get();
+    if (wView.error() != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffers: block map error: %d (non-secure)",
+              mName, wView.error());
+        return UNKNOWN_ERROR;
+    }
+
+    ssize_t result = -1;
+    ssize_t codecDataOffset = 0;
+    size_t inBufferOffset = 0;
+    size_t outBufferSize = 0;
+    uint32_t cryptoInfoIdx = 0;
+    int32_t heapSeqNum = getHeapSeqNum(memory);
+    hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
+    hardware::drm::V1_0::DestinationBuffer dst;
+    dst.type = DrmBufferType::SHARED_MEMORY;
+    IMemoryToSharedBuffer(
+            mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
+    for (int i = 0; i < bufferInfos->value.size(); i++) {
+        if (bufferInfos->value[i].mSize > 0) {
+            std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+            result = mCrypto->decrypt(
+                    (uint8_t*)info->mKey,
+                    (uint8_t*)info->mIv,
+                    info->mMode,
+                    info->mPattern,
+                    src,
+                    inBufferOffset,
+                    info->mSubSamples,
+                    info->mNumSubSamples,
+                    dst,
+                    errorDetailMsg);
+            inBufferOffset += bufferInfos->value[i].mSize;
+            if (result < 0) {
+                ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
+                        mName, result);
+                return result;
+            }
+            if (wView.error() == C2_OK) {
+                if (wView.size() < result) {
+                    ALOGI("[%s] attachEncryptedBuffers: block size too small:"
+                            "size=%u result=%zd (non-secure)", mName, wView.size(), result);
+                    return UNKNOWN_ERROR;
+                }
+                memcpy(wView.data(), mDecryptDestination->unsecurePointer(), result);
+                bufferInfos->value[i].mSize = result;
+                wView.setOffset(wView.offset() + result);
+            }
+            outBufferSize += result;
+        }
+    }
+    if (wView.error() == C2_OK) {
+        wView.setOffset(0);
+    }
+    std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
+            block->share(codecDataOffset, outBufferSize - codecDataOffset, C2Fence{}))};
+    if (!buffer->copy(c2Buffer)) {
+        ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
+        return -ENOSYS;
+    }
+    return OK;
+}
+
 status_t CCodecBufferChannel::attachEncryptedBuffer(
         const sp<hardware::HidlMemory> &memory,
         bool secure,
@@ -726,6 +901,138 @@
     return queueInputBufferInternal(buffer, block, bufferSize);
 }
 
+status_t CCodecBufferChannel::queueSecureInputBuffers(
+        const sp<MediaCodecBuffer> &buffer,
+        bool secure,
+        AString *errorDetailMsg) {
+    QueueGuard guard(mSync);
+    if (!guard.isRunning()) {
+        ALOGD("[%s] No more buffers should be queued at current state.", mName);
+        return -ENOSYS;
+    }
+
+    if (!hasCryptoOrDescrambler()) {
+        ALOGE("queueSecureInputBuffers requires a Crypto/descrambler Object");
+        return -ENOSYS;
+    }
+    sp<RefBase> obj;
+    CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+    sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+    CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+    sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+    if (secure || mCrypto == nullptr) {
+        if (cryptoInfos->value.size() != 1) {
+            ALOGE("Cannot decrypt multiple access units on native handles");
+            return -ENOSYS;
+        }
+        std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+        if (info == nullptr) {
+            ALOGE("Cannot decrypt, CryptoInfos are null");
+            return -ENOSYS;
+        }
+        return queueSecureInputBuffer(
+                buffer,
+                secure,
+                info->mKey,
+                info->mIv,
+                info->mMode,
+                info->mPattern,
+                info->mSubSamples,
+                info->mNumSubSamples,
+                errorDetailMsg);
+    }
+    sp<EncryptedLinearBlockBuffer> encryptedBuffer((EncryptedLinearBlockBuffer *)buffer.get());
+
+    std::shared_ptr<C2LinearBlock> block;
+    size_t allocSize = buffer->size();
+    size_t bufferSize = 0;
+    c2_status_t blockRes = C2_OK;
+    bool copied = false;
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
+    if (mSendEncryptedInfoBuffer) {
+        static const C2MemoryUsage kDefaultReadWriteUsage{
+            C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        constexpr int kAllocGranule0 = 1024 * 64;
+        constexpr int kAllocGranule1 = 1024 * 1024;
+        std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+        // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+        if (allocSize <= kAllocGranule1) {
+            bufferSize = align(allocSize, kAllocGranule0);
+        } else {
+            bufferSize = align(allocSize, kAllocGranule1);
+        }
+        blockRes = pool->fetchLinearBlock(
+                bufferSize, kDefaultReadWriteUsage, &block);
+
+        if (blockRes == C2_OK) {
+            C2WriteView view = block->map().get();
+            if (view.error() == C2_OK && view.size() == bufferSize) {
+                copied = true;
+                // TODO: only copy clear sections
+                memcpy(view.data(), buffer->data(), allocSize);
+            }
+        }
+    }
+
+    if (!copied) {
+        block.reset();
+    }
+    // size of cryptoInfo and accessUnitInfo should be the same?
+    ssize_t result = -1;
+    ssize_t codecDataOffset = 0;
+    size_t inBufferOffset = 0;
+    size_t outBufferSize = 0;
+    uint32_t cryptoInfoIdx = 0;
+    {
+        // scoped this block to enable destruction of mappedBlock
+        std::unique_ptr<EncryptedLinearBlockBuffer::MappedBlock> mappedBlock = nullptr;
+        hardware::drm::V1_0::DestinationBuffer destination;
+        destination.type = DrmBufferType::SHARED_MEMORY;
+        IMemoryToSharedBuffer(
+                mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+        encryptedBuffer->getMappedBlock(&mappedBlock);
+        hardware::drm::V1_0::SharedBuffer source;
+        encryptedBuffer->fillSourceBuffer(&source);
+        for (int i = 0 ; i < bufferInfos->value.size(); i++) {
+            if (bufferInfos->value[i].mSize > 0) {
+                std::unique_ptr<CodecCryptoInfo> info =
+                        std::move(cryptoInfos->value[cryptoInfoIdx++]);
+                if (info->mNumSubSamples == 1
+                        && info->mSubSamples[0].mNumBytesOfClearData == 0
+                        && info->mSubSamples[0].mNumBytesOfEncryptedData == 0) {
+                    // no data so we only populate the bufferInfo
+                    result = 0;
+                } else {
+                    result = mCrypto->decrypt(
+                            (uint8_t*)info->mKey,
+                            (uint8_t*)info->mIv,
+                            info->mMode,
+                            info->mPattern,
+                            source,
+                            inBufferOffset,
+                            info->mSubSamples,
+                            info->mNumSubSamples,
+                            destination,
+                            errorDetailMsg);
+                    inBufferOffset += bufferInfos->value[i].mSize;
+                    if (result < 0) {
+                        ALOGI("[%s] decrypt failed: result=%zd", mName, result);
+                        return result;
+                    }
+                    if (destination.type == DrmBufferType::SHARED_MEMORY && mappedBlock) {
+                        mappedBlock->copyDecryptedContent(mDecryptDestination, result);
+                    }
+                    bufferInfos->value[i].mSize = result;
+                    outBufferSize += result;
+                }
+            }
+        }
+        buffer->setRange(codecDataOffset, outBufferSize - codecDataOffset);
+    }
+    return queueInputBufferInternal(buffer, block, bufferSize);
+}
+
 void CCodecBufferChannel::feedInputBufferIfAvailable() {
     QueueGuard guard(mSync);
     if (!guard.isRunning()) {
@@ -1604,8 +1911,14 @@
                     padding = 0;
                 }
                 if (delay || padding) {
-                    // We need write access to the buffers, and we're already in
-                    // array mode.
+                    // We need write access to the buffers, so turn them into array mode.
+                    // TODO: b/321930152 - define SkipCutOutputBuffers that takes output from
+                    // component, runs it through SkipCutBuffer and allocate local buffer to be
+                    // used by fwk. Make initSkipCutBuffer() return OutputBuffers similar to
+                    // toArrayMode().
+                    if (!output->buffers->isArrayMode()) {
+                        output->buffers = output->buffers->toArrayMode(numOutputSlots);
+                    }
                     output->buffers->initSkipCutBuffer(delay, padding, sampleRate, channelCount);
                 }
             }
@@ -2259,12 +2572,34 @@
         case OutputBuffers::DISCARD:
             break;
         case OutputBuffers::NOTIFY_CLIENT:
+        {
             // TRICKY: we want popped buffers reported in order, so sending
             // the callback while holding the lock here. This assumes that
             // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
             // callbacks are always sent with the Output lock held.
+            if (c2Buffer) {
+                std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+                        std::static_pointer_cast<const C2AccessUnitInfos::output>(
+                        c2Buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+                if (bufferMetadata && bufferMetadata->flexCount() > 0) {
+                    uint32_t flag = 0;
+                    std::vector<AccessUnitInfo> accessUnitInfos;
+                    for (int nMeta = 0; nMeta < bufferMetadata->flexCount(); nMeta++) {
+                        const C2AccessUnitInfosStruct &bufferMetadataStruct =
+                                bufferMetadata->m.values[nMeta];
+                        flag = convertFlags(bufferMetadataStruct.flags, false);
+                        accessUnitInfos.emplace_back(flag,
+                                static_cast<size_t>(bufferMetadataStruct.size),
+                                static_cast<size_t>(bufferMetadataStruct.timestamp));
+                    }
+                    sp<WrapperObject<std::vector<AccessUnitInfo>>> obj{
+                        new WrapperObject<std::vector<AccessUnitInfo>>{accessUnitInfos}};
+                    outBuffer->meta()->setObject("accessUnitInfo", obj);
+                }
+            }
             mCallback->onOutputBufferAvailable(index, outBuffer);
             break;
+        }
         case OutputBuffers::REALLOCATE:
             if (++reallocTryNum > kMaxReallocTry) {
                 output.unlock();
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 8dc9fb6..b470655 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -73,6 +73,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
+    status_t queueSecureInputBuffers(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString *errorDetailMsg) override;
     status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
@@ -88,6 +92,12 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
+    status_t attachEncryptedBuffers(
+            const sp<hardware::HidlMemory> &memory,
+            size_t offset,
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString* errorDetailMsg) override;
     status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     void pollForRenderedBuffers() override;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 670923b..d313f33 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -18,11 +18,14 @@
 #define LOG_TAG "CCodecBuffers"
 #include <utils/Log.h>
 
+#include <numeric>
+
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <mediadrm/ICrypto.h>
@@ -147,6 +150,171 @@
     return copy;
 }
 
+// MultiAccessUnitSkipCutBuffer for buffer and bufferInfos
+
+class MultiAccessUnitSkipCutBuffer : public SkipCutBuffer {
+
+public:
+    explicit MultiAccessUnitSkipCutBuffer(
+            int32_t skip, int32_t cut, size_t num16BitChannels):
+        SkipCutBuffer(skip, cut, num16BitChannels),
+        mFrontPaddingDelay(0), mSize(0) {
+    }
+    void clearAll() {
+        mInfos.clear();
+        mFrontPaddingDelay = 0;
+        mSize = 0;
+        SkipCutBuffer::clear();
+    }
+
+    virtual ~MultiAccessUnitSkipCutBuffer() {
+
+    }
+
+    void submitMultiAccessUnits(
+            const sp<MediaCodecBuffer>& buffer,
+            int32_t sampleRate, size_t num16BitChannels,
+            std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+        if (infos == nullptr) {
+            // there is nothing to do more.
+            SkipCutBuffer::submit(buffer);
+            return;
+        }
+        typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+        CHECK_EQ(mSize, SkipCutBuffer::size());
+        sp<BufferInfosWrapper> bufferInfos{new BufferInfosWrapper(decltype(bufferInfos->value)())};
+        uint32_t availableSize = buffer->size() + SkipCutBuffer::size();
+        uint32_t frontPadding = mFrontPadding;
+        int32_t lastEmptyAccessUnitIndex = -1;
+        int64_t byteInUs = 0;
+        if (sampleRate > 0 && num16BitChannels > 0) {
+            byteInUs = (1000000u / (sampleRate * num16BitChannels * 2));
+        }
+        if (frontPadding > 0) {
+            mInfos.clear();
+            mSize = 0;
+        }
+        for (int i = 0 ; i < infos->flexCount() && frontPadding > 0; i++) {
+            uint32_t flagsInPadding = 0;
+            int64_t timeInPadding = 0;
+            if (infos->m.values[i].size <= frontPadding) {
+                // we have more front padding so this buffer is not going to be used.
+                int32_t consumed = infos->m.values[i].size;
+                frontPadding -= consumed;
+                mFrontPaddingDelay += byteInUs * (consumed);
+                availableSize -= consumed;
+                flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+                timeInPadding = infos->m.values[i].timestamp;
+            } else {
+                C2AccessUnitInfosStruct info = infos->m.values[i];
+                mFrontPaddingDelay +=  byteInUs * (frontPadding);
+                info.size -= frontPadding;
+                info.timestamp -= mFrontPaddingDelay;
+                availableSize -= frontPadding;
+                flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+                timeInPadding = infos->m.values[i].timestamp;
+                frontPadding = 0;
+                mInfos.push_back(info);
+                mSize += info.size;
+            }
+            if (flagsInPadding != 0) {
+                bufferInfos->value.emplace_back(
+                        flagsInPadding, 0, timeInPadding);
+            }
+            lastEmptyAccessUnitIndex = i;
+        }
+        if (frontPadding <= 0) {
+            // process what's already in the buffer first
+            auto it = mInfos.begin();
+            while (it != mInfos.end() && availableSize > mBackPadding) {
+                // we have samples to send out.
+                if ((availableSize - it->size) >= mBackPadding) {
+                    // this is totally used here.
+                    int32_t consumed = it->size;
+                    bufferInfos->value.emplace_back(
+                            toMediaCodecFlags(it->flags), consumed, it->timestamp);
+                    availableSize -= consumed;
+                    mSize -= consumed;
+                    it = mInfos.erase(it);
+                } else {
+                    int32_t consumed = availableSize - mBackPadding;
+                    bufferInfos->value.emplace_back(
+                            toMediaCodecFlags(it->flags),
+                            consumed,
+                            it->timestamp);
+                    it->size -= consumed;
+                    it->timestamp += consumed * byteInUs;
+                    availableSize -= consumed;
+                    mSize -= consumed;
+                    it++;
+                }
+            }
+            // if buffer has more process all of it and keep the remaining info.
+            for (int i = (lastEmptyAccessUnitIndex + 1) ; i < infos->flexCount() ; i++) {
+                // upddate updatedInfo and mInfos
+                if (availableSize > mBackPadding) {
+                    // we have to take data from the new buffer.
+                    if (availableSize - infos->m.values[i].size >= mBackPadding) {
+                        // we are using this info
+                        int32_t consumed = infos->m.values[i].size;
+                        bufferInfos->value.emplace_back(
+                                toMediaCodecFlags(infos->m.values[i].flags),
+                                consumed,
+                                infos->m.values[i].timestamp - mFrontPaddingDelay);
+                        availableSize -= consumed;
+                    } else {
+                        // if we need to update the size
+                        C2AccessUnitInfosStruct info = infos->m.values[i];
+                        int32_t consumed = availableSize - mBackPadding;
+                        bufferInfos->value.emplace_back(
+                                toMediaCodecFlags(infos->m.values[i].flags),
+                                consumed,
+                                infos->m.values[i].timestamp - mFrontPaddingDelay);
+                        info.size -= consumed;
+                        info.timestamp = info.timestamp - mFrontPaddingDelay +
+                                consumed * byteInUs;
+                        mInfos.push_back(info);
+                        availableSize -= consumed;
+                        mSize += info.size;
+                    }
+                } else {
+                    // we have to maintain infos
+                    C2AccessUnitInfosStruct info = infos->m.values[i];
+                    info.timestamp -= mFrontPaddingDelay;
+                    mInfos.push_back(info);
+                    mSize += info.size;
+                }
+            }
+        }
+        SkipCutBuffer::submit(buffer);
+        infos = nullptr;
+        if (!bufferInfos->value.empty()) {
+            buffer->meta()->setObject("accessUnitInfo", bufferInfos);
+        }
+    }
+protected:
+    // Flags can come with individual BufferInfos
+    // when used with large frame audio
+    constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+            {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+            {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+            {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+    };
+
+    static uint32_t toMediaCodecFlags(uint32_t flags) {
+        return std::transform_reduce(
+                flagList.begin(), flagList.end(),
+                0u,
+                std::bit_or{},
+                [flags](const std::pair<uint32_t, uint32_t> &entry) {
+                    return (flags & entry.second) ? entry.first : 0;
+                });
+    }
+    std::list<C2AccessUnitInfosStruct> mInfos;
+    int64_t mFrontPaddingDelay;
+    size_t mSize;
+};
+
 // OutputBuffers
 
 OutputBuffers::OutputBuffers(const char *componentName, const char *name)
@@ -201,6 +369,15 @@
     }
 }
 
+bool OutputBuffers::submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+            int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+    if (mSkipCutBuffer == nullptr) {
+        return false;
+    }
+    mSkipCutBuffer->submitMultiAccessUnits(buffer, sampleRate, channelCount, infos);
+    return true;
+}
+
 void OutputBuffers::setSkipCutBuffer(int32_t skip, int32_t cut) {
     if (mSkipCutBuffer != nullptr) {
         size_t prevSize = mSkipCutBuffer->size();
@@ -208,7 +385,7 @@
             ALOGD("[%s] Replacing SkipCutBuffer holding %zu bytes", mName, prevSize);
         }
     }
-    mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
+    mSkipCutBuffer = new MultiAccessUnitSkipCutBuffer(skip, cut, mChannelCount);
 }
 
 bool OutputBuffers::convert(
@@ -1160,7 +1337,16 @@
         ALOGD("[%s] copy buffer failed", mName);
         return WOULD_BLOCK;
     }
-    submit(c2Buffer);
+    if (buffer && buffer->hasInfo(C2AccessUnitInfos::output::PARAM_TYPE)) {
+        std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+                        std::static_pointer_cast<const C2AccessUnitInfos::output>(
+                        buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+        if (submit(c2Buffer, mSampleRate, mChannelCount, bufferMetadata)) {
+            buffer->removeInfo(C2AccessUnitInfos::output::PARAM_TYPE);
+        }
+    } else {
+        submit(c2Buffer);
+    }
     handleImageData(c2Buffer);
     *clientBuffer = c2Buffer;
     ALOGV("[%s] grabbed buffer %zu", mName, *index);
@@ -1198,7 +1384,7 @@
     (void)flushedWork;
     mImpl.flush();
     if (mSkipCutBuffer != nullptr) {
-        mSkipCutBuffer->clear();
+        mSkipCutBuffer->clearAll();
     }
 }
 
@@ -1356,7 +1542,7 @@
 void LinearOutputBuffers::flush(
         const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     if (mSkipCutBuffer != nullptr) {
-        mSkipCutBuffer->clear();
+        mSkipCutBuffer->clearAll();
     }
     FlexOutputBuffers::flush(flushedWork);
 }
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index cbef644..f0936bc 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -20,6 +20,7 @@
 
 #include <optional>
 #include <string>
+#include <vector>
 
 #include <C2Config.h>
 #include <DataConverter.h>
@@ -33,6 +34,8 @@
 struct ICrypto;
 class MemoryDealer;
 class SkipCutBuffer;
+class MultiAccessUnitSkipCutBuffer;
+struct AccessUnitInfo;
 
 constexpr size_t kLinearBufferSize = 1048576;
 // This can fit an 8K frame.
@@ -382,13 +385,17 @@
             sp<MediaCodecBuffer>* outBuffer);
 
 protected:
-    sp<SkipCutBuffer> mSkipCutBuffer;
+
+    sp<MultiAccessUnitSkipCutBuffer> mSkipCutBuffer;
 
     /**
      * Update the SkipCutBuffer object. No-op if it's never initialized.
      */
     void updateSkipCutBuffer(int32_t sampleRate, int32_t channelCount);
 
+    bool submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+            int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos);
+
     /**
      * Submit buffer to SkipCutBuffer object, if initialized.
      */
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 6d49fa8..c22deca 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -402,10 +402,19 @@
 
     add(ConfigMapper(KEY_MAX_INPUT_SIZE, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE, "value")
         .limitTo(D::INPUT));
+
     // remove when codecs switch to PARAMKEY
     deprecated(ConfigMapper(KEY_MAX_INPUT_SIZE, "coded.max-frame-size", "value")
                .limitTo(D::INPUT));
 
+    // large frame params
+    add(ConfigMapper(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE,
+            C2_PARAMKEY_OUTPUT_LARGE_FRAME, "max-size")
+        .limitTo(D::AUDIO & D::OUTPUT));
+    add(ConfigMapper(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+            C2_PARAMKEY_OUTPUT_LARGE_FRAME, "threshold-size")
+        .limitTo(D::AUDIO & D::OUTPUT));
+
     // Rotation
     // Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 3bb6593..9c514f2 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -1036,6 +1036,37 @@
     return const_cast<native_handle_t *>(mBlock->handle());
 }
 
+void EncryptedLinearBlockBuffer::getMappedBlock(
+        std::unique_ptr<MappedBlock> * const mappedBlock) const {
+    if (mappedBlock) {
+        mappedBlock->reset(new EncryptedLinearBlockBuffer::MappedBlock(mBlock));
+    }
+    return;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::MappedBlock(
+        const std::shared_ptr<C2LinearBlock> &block) : mView(block->map().get()) {
+}
+
+bool EncryptedLinearBlockBuffer::MappedBlock::copyDecryptedContent(
+        const sp<IMemory> &decrypted, size_t length) {
+    if (mView.error() != C2_OK) {
+        return false;
+    }
+    if (mView.size() < length) {
+        ALOGE("View size(%d) less than decrypted length(%zu)",
+                mView.size(), length);
+        return false;
+    }
+    memcpy(mView.data(), decrypted->unsecurePointer(), length);
+    mView.setOffset(mView.offset() + length);
+    return true;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::~MappedBlock() {
+    mView.setOffset(0);
+}
+
 using ::aidl::android::hardware::graphics::common::Cta861_3;
 using ::aidl::android::hardware::graphics::common::Smpte2086;
 
@@ -1049,6 +1080,7 @@
         // Unwrap raw buffer handle from the C2Handle
         native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
         if (!nh) {
+            ALOGE("handle is not compatible to any gralloc C2Handle types");
             return;
         }
         // Import the raw handle so IMapper can use the buffer. The imported
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index b73acab..5e96921 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -384,6 +384,17 @@
      */
     native_handle_t *handle() const;
 
+    class MappedBlock {
+    public:
+        explicit MappedBlock(const std::shared_ptr<C2LinearBlock> &block);
+        virtual ~MappedBlock();
+        bool copyDecryptedContent(const sp<IMemory> &decrypted, size_t length);
+    private:
+        C2WriteView mView;
+    };
+
+    void getMappedBlock(std::unique_ptr<MappedBlock> * const mappedBlock) const;
+
 private:
 
     std::shared_ptr<C2LinearBlock> mBlock;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..8dce789 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,8 @@
 
 #include <strings.h>
 
+#include <android_media_codec.h>
+
 #include <C2Component.h>
 #include <C2Config.h>
 #include <C2Debug.h>
@@ -752,6 +754,24 @@
                 }
                 addSupportedColorFormats(
                         intf, caps.get(), trait, mediaType, it->second);
+
+                if (android::media::codec::provider_->large_audio_frame_finish()) {
+                    // Adding feature-multiple-frames when C2LargeFrame param is present
+                    if (trait.domain == C2Component::DOMAIN_AUDIO) {
+                        std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+                        c2_status_t err = intf->querySupportedParams(&params);
+                        if (err == C2_OK) {
+                            for (const auto &paramDesc : params) {
+                                if (C2LargeFrame::output::PARAM_TYPE == paramDesc->index()) {
+                                    std::string featureMultipleFrames =
+                                            std::string(KEY_FEATURE_) + FEATURE_MultipleFrames;
+                                    caps->addDetail(featureMultipleFrames.c_str(), 0);
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                }
             }
         }
     }
diff --git a/media/codec2/tests/C2ComponentInterface_test.cpp b/media/codec2/tests/C2ComponentInterface_test.cpp
index 67f733d..d1844b0 100644
--- a/media/codec2/tests/C2ComponentInterface_test.cpp
+++ b/media/codec2/tests/C2ComponentInterface_test.cpp
@@ -235,7 +235,7 @@
     // |*heapParams[0]| is a parameter value. The size of |heapParams| has to be one.
     ASSERT_EQ(1u, heapParams.size());
     EXPECT_TRUE(heapParams[0]);
-    EXPECT_EQ(*heapParams[0], expected);
+    EXPECT_EQ(*heapParams[0], (C2Param &)(expected));
 }
 
 template <typename T> void C2CompIntfTest::querySupportedParam() {
diff --git a/media/codec2/tests/aidl/Android.bp b/media/codec2/tests/aidl/Android.bp
new file mode 100644
index 0000000..2ad245c
--- /dev/null
+++ b/media/codec2/tests/aidl/Android.bp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "c2aidl_gtracker_test",
+    test_suites: ["device-tests"],
+    defaults: [
+        "libcodec2-aidl-client-defaults",
+    ],
+
+    header_libs: [
+        "libcodec2_client_headers",
+        "libcodec2_internal",
+        "libcodec2_vndk_headers",
+    ],
+
+    srcs: [
+        "GraphicsTracker_test.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcodec2_client",
+        "libgui",
+        "libnativewindow",
+        "libui",
+    ],
+}
diff --git a/media/codec2/tests/aidl/GraphicsTracker_test.cpp b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
new file mode 100644
index 0000000..9008086
--- /dev/null
+++ b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
@@ -0,0 +1,820 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GraphicsTracker_test"
+#include <unistd.h>
+
+#include <android/hardware_buffer.h>
+#include <codec2/aidl/GraphicsTracker.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <gui/BufferQueue.h>
+#include <gui/IProducerListener.h>
+#include <gui/IConsumerListener.h>
+#include <gui/Surface.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <C2BlockInternal.h>
+#include <C2FenceFactory.h>
+
+#include <atomic>
+#include <memory>
+#include <iostream>
+#include <thread>
+
+using ::aidl::android::hardware::media::c2::implementation::GraphicsTracker;
+using ::android::BufferItem;
+using ::android::BufferQueue;
+using ::android::Fence;
+using ::android::GraphicBuffer;
+using ::android::IGraphicBufferProducer;
+using ::android::IGraphicBufferConsumer;
+using ::android::IProducerListener;
+using ::android::IConsumerListener;
+using ::android::OK;
+using ::android::sp;
+using ::android::wp;
+
+namespace {
+struct BqStatistics {
+    std::atomic<int> mDequeued;
+    std::atomic<int> mQueued;
+    std::atomic<int> mBlocked;
+    std::atomic<int> mDropped;
+    std::atomic<int> mDiscarded;
+    std::atomic<int> mReleased;
+
+    void log() {
+        ALOGD("Dequeued: %d, Queued: %d, Blocked: %d, "
+              "Dropped: %d, Discarded %d, Released %d",
+              (int)mDequeued, (int)mQueued, (int)mBlocked,
+              (int)mDropped, (int)mDiscarded, (int)mReleased);
+    }
+
+    void clear() {
+        mDequeued = 0;
+        mQueued = 0;
+        mBlocked = 0;
+        mDropped = 0;
+        mDiscarded = 0;
+        mReleased = 0;
+    }
+};
+
+struct DummyConsumerListener : public android::BnConsumerListener {
+    void onFrameAvailable(const BufferItem& /* item */) override {}
+    void onBuffersReleased() override {}
+    void onSidebandStreamChanged() override {}
+};
+
+struct TestConsumerListener : public android::BnConsumerListener {
+    TestConsumerListener(const sp<IGraphicBufferConsumer> &consumer)
+            : BnConsumerListener(), mConsumer(consumer) {}
+    void onFrameAvailable(const BufferItem&) override {
+        constexpr static int kRenderDelayUs = 1000000/30; // 30fps
+        BufferItem buffer;
+        // consume buffer
+        sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
+        if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == android::NO_ERROR) {
+            ::usleep(kRenderDelayUs);
+            consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
+                                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+        }
+    }
+    void onBuffersReleased() override {}
+    void onSidebandStreamChanged() override {}
+
+    wp<IGraphicBufferConsumer> mConsumer;
+};
+
+struct TestProducerListener : public android::BnProducerListener {
+    TestProducerListener(std::shared_ptr<GraphicsTracker> tracker,
+                         std::shared_ptr<BqStatistics> &stat,
+                         uint32_t generation) : BnProducerListener(),
+        mTracker(tracker), mStat(stat), mGeneration(generation) {}
+    virtual void onBufferReleased() override {
+        auto tracker = mTracker.lock();
+        if (tracker) {
+            mStat->mReleased++;
+            tracker->onReleased(mGeneration);
+        }
+    }
+    virtual bool needsReleaseNotify() override { return true; }
+    virtual void onBuffersDiscarded(const std::vector<int32_t>&) override {}
+
+    std::weak_ptr<GraphicsTracker> mTracker;
+    std::shared_ptr<BqStatistics> mStat;
+    uint32_t mGeneration;
+};
+
+struct Frame {
+    AHardwareBuffer *buffer_;
+    sp<Fence> fence_;
+
+    Frame() : buffer_{nullptr}, fence_{nullptr} {}
+    Frame(AHardwareBuffer *buffer, sp<Fence> fence)
+            : buffer_(buffer), fence_(fence) {}
+    ~Frame() {
+        if (buffer_) {
+            AHardwareBuffer_release(buffer_);
+        }
+    }
+};
+
+struct FrameQueue {
+    bool mStopped;
+    bool mDrain;
+    std::queue<std::shared_ptr<Frame>> mQueue;
+    std::mutex mMutex;
+    std::condition_variable mCond;
+
+    FrameQueue() : mStopped{false}, mDrain{false} {}
+
+    bool queueItem(AHardwareBuffer *buffer, sp<Fence> fence) {
+        std::shared_ptr<Frame> frame = std::make_shared<Frame>(buffer, fence);
+        if (mStopped) {
+            return false;
+        }
+        if (!frame) {
+            return false;
+        }
+        std::unique_lock<std::mutex> l(mMutex);
+        mQueue.emplace(frame);
+        l.unlock();
+        mCond.notify_all();
+        return true;
+    }
+
+    void stop(bool drain = false) {
+        bool stopped = false;
+        {
+            std::unique_lock<std::mutex> l(mMutex);
+            if (!mStopped) {
+                mStopped = true;
+                mDrain = drain;
+                stopped = true;
+            }
+            l.unlock();
+            if (stopped) {
+                mCond.notify_all();
+            }
+        }
+    }
+
+    bool waitItem(std::shared_ptr<Frame> *frame) {
+        while(true) {
+            std::unique_lock<std::mutex> l(mMutex);
+            if (!mDrain && mStopped) {
+                // stop without consuming the queue.
+                return false;
+            }
+            if (!mQueue.empty()) {
+                *frame = mQueue.front();
+                mQueue.pop();
+                return true;
+            } else if (mStopped) {
+                // stop after consuming the queue.
+                return false;
+            }
+            mCond.wait(l);
+        }
+    }
+};
+
+} // namespace anonymous
+
+class GraphicsTrackerTest : public ::testing::Test {
+public:
+    const uint64_t kTestUsageFlag = GRALLOC_USAGE_SW_WRITE_OFTEN;
+
+    void queueBuffer(FrameQueue *queue) {
+        while (true) {
+            std::shared_ptr<Frame> frame;
+            if (!queue->waitItem(&frame)) {
+                break;
+            }
+            uint64_t bid;
+            if (__builtin_available(android __ANDROID_API_T__, *)) {
+                if (AHardwareBuffer_getId(frame->buffer_, &bid) !=
+                        android::NO_ERROR) {
+                    break;
+                }
+            } else {
+                break;
+            }
+            android::status_t ret = frame->fence_->wait(-1);
+            if (ret != android::NO_ERROR) {
+                mTracker->deallocate(bid, frame->fence_);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+
+            std::shared_ptr<C2GraphicBlock> blk =
+                    _C2BlockFactory::CreateGraphicBlock(frame->buffer_);
+            if (!blk) {
+                mTracker->deallocate(bid, Fence::NO_FENCE);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+            IGraphicBufferProducer::QueueBufferInput input(
+                    0, false,
+                    HAL_DATASPACE_UNKNOWN, android::Rect(0, 0, 1, 1),
+                    NATIVE_WINDOW_SCALING_MODE_FREEZE, 0, Fence::NO_FENCE);
+            IGraphicBufferProducer::QueueBufferOutput output{};
+            c2_status_t res = mTracker->render(
+                    blk->share(C2Rect(1, 1), C2Fence()),
+                    input, &output);
+            if (res != C2_OK) {
+                mTracker->deallocate(bid, Fence::NO_FENCE);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+            if (output.bufferReplaced) {
+                mBqStat->mDropped++;
+            }
+            mBqStat->mQueued++;
+        }
+    }
+
+    void stopTrackerAfterUs(int us) {
+        ::usleep(us);
+        mTracker->stop();
+    }
+
+protected:
+    bool init(int maxDequeueCount) {
+        mTracker = GraphicsTracker::CreateGraphicsTracker(maxDequeueCount);
+        if (!mTracker) {
+            return false;
+        }
+        BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+        if (!mProducer || !mConsumer) {
+            return false;
+        }
+        return true;
+    }
+    bool configure(sp<IProducerListener> producerListener,
+                   sp<IConsumerListener> consumerListener,
+                   int maxAcquiredCount = 1, bool controlledByApp = true) {
+        if (mConsumer->consumerConnect(
+                consumerListener, controlledByApp) != ::android::NO_ERROR) {
+            return false;
+        }
+        if (mConsumer->setMaxAcquiredBufferCount(maxAcquiredCount) != ::android::NO_ERROR) {
+            return false;
+        }
+        IGraphicBufferProducer::QueueBufferOutput qbo{};
+        if (mProducer->connect(producerListener,
+                          NATIVE_WINDOW_API_MEDIA, true, &qbo) != ::android::NO_ERROR) {
+            return false;
+        }
+        if (mProducer->setDequeueTimeout(0) != ::android::NO_ERROR) {
+            return false;
+        }
+        return true;
+    }
+
+    virtual void TearDown() override {
+        mBqStat->log();
+        mBqStat->clear();
+
+        if (mTracker) {
+            mTracker->stop();
+            mTracker.reset();
+        }
+        if (mProducer) {
+            mProducer->disconnect(NATIVE_WINDOW_API_MEDIA);
+        }
+        mProducer.clear();
+        mConsumer.clear();
+    }
+
+protected:
+    std::shared_ptr<BqStatistics> mBqStat = std::make_shared<BqStatistics>();
+    sp<IGraphicBufferProducer> mProducer;
+    sp<IGraphicBufferConsumer> mConsumer;
+    std::shared_ptr<GraphicsTracker> mTracker;
+};
+
+
+TEST_F(GraphicsTrackerTest, AllocateAndBlockedTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bid;
+
+    // Allocate and check dequeueable
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+            ASSERT_EQ(C2_OK, ret);
+            mBqStat->mDequeued++;
+            ASSERT_EQ(maxDequeueCount - (i + 1), mTracker->getCurDequeueable());
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
+            ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
+            AHardwareBuffer_release(buf);
+        }
+    } else {
+        GTEST_SKIP();
+    }
+
+    // Allocate should be blocked
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+    ALOGD("alloc : err(%d, %d)", ret, C2_BLOCKING);
+    ASSERT_EQ(C2_BLOCKING, ret);
+    mBqStat->mBlocked++;
+    ASSERT_EQ(0, mTracker->getCurDequeueable());
+}
+
+TEST_F(GraphicsTrackerTest, AllocateAndDeallocateTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bid;
+    std::vector<uint64_t> bids;
+
+    // Allocate and store buffer id
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+            ASSERT_EQ(C2_OK, ret);
+            mBqStat->mDequeued++;
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
+            bids.push_back(bid);
+            ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
+            AHardwareBuffer_release(buf);
+        }
+    } else {
+        GTEST_SKIP();
+    }
+
+    // Deallocate and check dequeueable
+    for (int i = 0; i < maxDequeueCount; ++i) {
+        ALOGD("dealloc : bufferId: %llu", (unsigned long long)bids[i]);
+        ret = mTracker->deallocate(bids[i], Fence::NO_FENCE);
+        ASSERT_EQ(C2_OK, ret);
+        ASSERT_EQ(i + 1, mTracker->getCurDequeueable());
+        mBqStat->mDiscarded++;
+    }
+}
+
+TEST_F(GraphicsTrackerTest, DropAndReleaseTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+    AHardwareBuffer *buf1, *buf2;
+    sp<Fence> fence1, fence2;
+
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence1);
+    ASSERT_EQ(C2_OK, ret);
+    mBqStat->mDequeued++;
+    ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
+
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence2);
+    ASSERT_EQ(C2_OK, ret);
+    mBqStat->mDequeued++;
+    ASSERT_EQ(maxDequeueCount - 2, mTracker->getCurDequeueable());
+
+    // Queue two buffers without consuming, one should be dropped
+    ASSERT_TRUE(frameQueue.queueItem(buf1, fence1));
+    ASSERT_TRUE(frameQueue.queueItem(buf2, fence2));
+
+    frameQueue.stop(true);
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+
+    ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
+
+    // Consume one buffer and release
+    BufferItem item;
+    ASSERT_EQ(OK, mConsumer->acquireBuffer(&item, 0));
+    ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
+            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence));
+    // Nothing to consume
+    ASSERT_NE(OK, mConsumer->acquireBuffer(&item, 0));
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+    ASSERT_EQ(1, mBqStat->mReleased);
+    ASSERT_EQ(1, mBqStat->mDropped);
+}
+
+TEST_F(GraphicsTrackerTest, RenderTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+    const int maxNumAlloc = 20;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+
+    int numAlloc = 0;
+
+    while (numAlloc < maxNumAlloc) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+        c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        if (!frameQueue.queueItem(buf, fence)) {
+            ALOGE("queue to render failed");
+            break;
+        }
+        ++numAlloc;
+    }
+
+    frameQueue.stop(true);
+    // Wait more than enough time(1 sec) to render all queued frames for sure.
+    ::usleep(1000000);
+
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+    ASSERT_EQ(numAlloc, maxNumAlloc);
+    ASSERT_EQ(numAlloc, mBqStat->mDequeued);
+    ASSERT_EQ(mBqStat->mDequeued, mBqStat->mQueued);
+    ASSERT_EQ(mBqStat->mDequeued, mBqStat->mReleased + mBqStat->mDropped);
+}
+
+TEST_F(GraphicsTrackerTest, StopAndWaitTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 2;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf1, *buf2;
+    sp<Fence> fence;
+
+    ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence));
+    mBqStat->mDequeued++;
+    AHardwareBuffer_release(buf1);
+
+    ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence));
+    mBqStat->mDequeued++;
+    AHardwareBuffer_release(buf2);
+
+    ASSERT_EQ(0, mTracker->getCurDequeueable());
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(3000000000));
+
+    std::thread stopThread(&GraphicsTrackerTest::stopTrackerAfterUs, this, 500000);
+    ASSERT_EQ(C2_BAD_STATE, waitFence.wait(3000000000));
+
+    if (stopThread.joinable()) {
+        stopThread.join();
+    }
+}
+
+TEST_F(GraphicsTrackerTest, SurfaceChangeTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    const int maxNumAlloc = 20;
+
+    const int firstPassAlloc = 12;
+    const int firstPassRender = 8;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *bufs[maxNumAlloc];
+    sp<Fence> fences[maxNumAlloc];
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+    int numAlloc = 0;
+
+    for (int i = 0; i < firstPassRender; ++i) {
+        ASSERT_EQ(C2_OK, mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &bufs[i], &fences[i]));
+        mBqStat->mDequeued++;
+        numAlloc++;
+        ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
+    }
+
+    while (numAlloc < firstPassAlloc) {
+        c2_status_t ret = mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &bufs[numAlloc], &fences[numAlloc]);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(numAlloc, firstPassAlloc);
+
+    // switching surface
+    sp<IGraphicBufferProducer> oldProducer = mProducer;
+    sp<IGraphicBufferConsumer> oldConsumer = mConsumer;
+    mProducer.clear();
+    mConsumer.clear();
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+    ASSERT_TRUE((bool)mProducer && (bool)mConsumer);
+
+    generation += 1;
+
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    ASSERT_EQ(OK, oldProducer->disconnect(NATIVE_WINDOW_API_MEDIA));
+    oldProducer.clear();
+    oldConsumer.clear();
+
+    for (int i = firstPassRender ; i < firstPassAlloc; ++i) {
+        ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
+    }
+
+    while (numAlloc < maxNumAlloc) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+        c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        if (!frameQueue.queueItem(buf, fence)) {
+            ALOGE("queue to render failed");
+            break;
+        }
+        ++numAlloc;
+    }
+
+    ASSERT_EQ(numAlloc, maxNumAlloc);
+
+    frameQueue.stop(true);
+    // Wait more than enough time(1 sec) to render all queued frames for sure.
+    ::usleep(1000000);
+
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+    // mReleased should not be checked. IProducerListener::onBufferReleased()
+    // from the previous Surface could be missing after a new Surface was
+    // configured. Instead check # of dequeueable and queueBuffer() calls.
+    ASSERT_EQ(numAlloc, mBqStat->mQueued);
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    for (int i = 0; i < maxDequeueCount; ++i) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+
+        ASSERT_EQ(C2_OK, mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate(
+            0, 0, 0, kTestUsageFlag, &bufs[0], &fences[0]));
+}
+
+TEST_F(GraphicsTrackerTest, maxDequeueIncreaseTest) {
+    uint32_t generation = 1;
+    int maxDequeueCount = 10;
+    int dequeueIncrease = 4;
+
+    int numAlloc = 0;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bids[maxDequeueCount];
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+            ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
+            AHardwareBuffer_release(buf);
+            mBqStat->mDequeued++;
+            numAlloc++;
+        }
+    } else {
+        GTEST_SKIP();
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[0], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+
+    maxDequeueCount += dequeueIncrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueIncrease + 1; ++i) {
+        ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[1], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+
+    maxDequeueCount += dequeueIncrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueIncrease + 1; ++i) {
+        ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+}
+
+TEST_F(GraphicsTrackerTest, maxDequeueDecreaseTest) {
+    uint32_t generation = 1;
+    int maxDequeueCount = 12;
+    int dequeueDecrease = 4;
+
+    int numAlloc = 0;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bids[maxDequeueCount];
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+            ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
+            AHardwareBuffer_release(buf);
+            mBqStat->mDequeued++;
+            numAlloc++;
+        }
+    } else {
+        GTEST_SKIP();
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    int discardIdx = 0;
+    maxDequeueCount -= dequeueDecrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueDecrease + 1; ++i) {
+        ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+        mBqStat->mDiscarded++;
+    }
+    ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+    mBqStat->mDequeued++;
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+    maxDequeueCount -= dequeueDecrease;
+
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueDecrease - 1; ++i) {
+        ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+        mBqStat->mDiscarded++;
+    }
+    ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+    mBqStat->mDequeued++;
+}
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index af2683b..9f57bfd 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -84,7 +84,7 @@
         "libbase",
         "libdmabufheap",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
     ],
 
     local_include_dirs: [
@@ -102,7 +102,7 @@
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder_ndk",
@@ -162,11 +162,12 @@
         "android.hardware.graphics.bufferqueue@2.0",
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
     ],
 
     shared_libs: [
+        "libbinder",
         "libbinder_ndk",
         "libui",
         "libdl",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 107ce89..971b5a5 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -318,6 +318,14 @@
         return reinterpret_cast<C2HandleAhwb *>(res);
     }
 
+    static uint32_t getPixelFormat(const C2Handle *const handle) {
+        if (handle == nullptr) {
+            return 0;
+        }
+        const ExtraData *xd = GetExtraData(handle);
+        return xd->format;
+    }
+
     static C2HandleAhwb* WrapNativeHandle(
             const native_handle_t *const handle,
             uint32_t width, uint32_t height, uint32_t format, uint64_t usage,
@@ -383,7 +391,7 @@
     }
 
     uint8_t *pointer = nullptr;
-    err = mapper.lock(handle, usage, bounds, (void **)&pointer, nullptr, nullptr);
+    err = mapper.lock(handle, usage, bounds, (void **)&pointer);
     if (err != NO_ERROR || pointer == nullptr) {
         return C2_CORRUPTED;
     }
@@ -899,7 +907,17 @@
 
 
 native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
-    return C2HandleGralloc::UnwrapNativeHandle(handle);
+    if (handle == nullptr) {
+        return nullptr;
+    }
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        return C2HandleGralloc::UnwrapNativeHandle(handle);
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        return C2HandleAhwb::UnwrapNativeHandle(handle);
+    }
+    ALOGE("tried to unwrap non c2 compatible handle");
+    return nullptr;
 }
 
 C2Handle *WrapNativeCodec2GrallocHandle(
@@ -911,7 +929,38 @@
 }
 
 uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle) {
-    return C2HandleGralloc::getPixelFormat(handle);
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        return C2HandleGralloc::getPixelFormat(handle);
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        return C2HandleAhwb::getPixelFormat(handle);
+    }
+    ALOGE("tried to extract pixelformat from non c2 compatible handle");
+    return 0;
+}
+
+bool ExtractMetadataFromCodec2GrallocHandle(
+        const C2Handle *const handle,
+        uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride) {
+    if (handle == nullptr) {
+        ALOGE("ExtractMetadata from nullptr");
+        return false;
+    }
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        uint32_t generation;
+        uint64_t igbp_id;
+        uint32_t igbp_slot;
+        (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+                                      &generation, &igbp_id, &igbp_slot);
+        return true;
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        uint64_t origId;
+        (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+        return true;
+    }
+    ALOGE("ExtractMetadata from non compatible handle");
+    return false;
 }
 
 bool MigrateNativeCodec2GrallocHandle(
@@ -1137,8 +1186,17 @@
         const C2Handle *const handle,
         uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
         uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
-    (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
-                                  generation, igbp_id, igbp_slot);
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+                                      generation, igbp_id, igbp_slot);
+        return;
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        uint64_t origId;
+        (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+        return;
+    }
+    ALOGE("Tried to extract metadata from non c2 compatible handle");
 }
 
 C2AllocatorGralloc::Impl::Impl(id_t id, bool bufferQueue)
@@ -1250,10 +1308,6 @@
 }
 
 
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle) {
-    return C2HandleAhwb::UnwrapNativeHandle(handle);
-}
-
 C2Handle *WrapNativeCodec2AhwbHandle(
         const native_handle_t *const handle,
         uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
@@ -1477,13 +1531,6 @@
     c2_status_t mInit;
 };
 
-void _UnwrapNativeCodec2AhwbMetadata(
-        const C2Handle *const handle,
-        uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
-        uint64_t *origId) {
-    (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, origId);
-}
-
 C2AllocatorAhwb::Impl::Impl(id_t id)
     : mInit(C2_OK) {
     // TODO: get this from allocator
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 52ebe25..5d50fc3 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -335,7 +335,8 @@
             p.reset();
         }
     } else {
-        ALOGE("Create sync fence from invalid fd");
+        ALOGV("Create sync fence from invalid fd");
+        return C2Fence();
     }
     return C2Fence(p);
 }
@@ -531,7 +532,9 @@
             p = SyncFenceImpl::CreateFromNativeHandle(handle);
             break;
         default:
-            ALOGD("Unsupported fence type %d", type);
+            ALOGV("Unsupported fence type %d", type);
+            // If this is malformed-handle close the handle here.
+            (void) native_handle_close(handle);
             // return a null-fence in this case
             break;
     }
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index 1a34c30..53b6262 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -22,8 +22,25 @@
 #include <C2Buffer.h>
 
 namespace android {
+// VNDK
+/**
+ * Extract pixel format from the extra data of gralloc handle.
+ *
+ * @return 0 when no valid pixel format exists.
+ */
+uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
 
 /**
+ * Extract metadata from the extra data of gralloc handle.
+ *
+ * @return {@code false} if extraction was failed, {@code true} otherwise.
+ */
+bool ExtractMetadataFromCodec2GrallocHandle(
+    const C2Handle *const handle,
+    uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t  *stride);
+
+// Not for VNDK (system partition and inside vndk only)
+/**
  * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorGralloc.
  *
  * @param handle a handle allocated by C2AllocatorGralloc. This includes handles returned for a
@@ -46,13 +63,6 @@
         uint32_t generation = 0, uint64_t igbp_id = 0, uint32_t igbp_slot = 0);
 
 /**
- * Extract pixel format from the extra data of gralloc handle.
- *
- * @return 0 when no valid pixel format exists.
- */
-uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
-
-/**
  * When the gralloc handle is migrated to another bufferqueue, update
  * bufferqueue information.
  *
@@ -71,16 +81,6 @@
         uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot);
 
 /**
- * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorAhwb.
- *
- * @param handle a handle allocated by C2AllocatorAhwb. This includes handles returned for a
- * graphic block allocation handle based on an AHardwareBuffer.
- *
- * @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
- */
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle);
-
-/**
  * Wrap the gralloc handle and metadata based on AHardwareBuffer into Codec2 handle
  * recognized by C2AllocatorAhwb.
  *
@@ -92,14 +92,6 @@
         uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
         uint64_t origId);
 
-/**
- * \todo Get this from the buffer
- */
-void _UnwrapNativeCodec2AhwbMetadata(
-        const C2Handle *const handle,
-        uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride,
-        uint64_t *origId);
-
 class C2AllocatorGralloc : public C2Allocator {
 public:
     virtual id_t getId() const override;
diff --git a/media/codec2/vndk/platform/C2IgbaBuffer.cpp b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
index 2051e8f..3622d5e 100644
--- a/media/codec2/vndk/platform/C2IgbaBuffer.cpp
+++ b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
@@ -192,28 +192,25 @@
     c2_status_t res = _fetchGraphicBlock(
             width, height, format, usage, kBlockingFetchTimeoutNs, &origId, block, &fence);
 
-    if (res == C2_BLOCKING) {
-        return C2_TIMED_OUT;
+    if (res == C2_TIMED_OUT) {
+        // SyncFence waiting timeout.
+        // Usually HAL treats C2_TIMED_OUT as an irrecoverable error.
+        // We want HAL to re-try.
+        return C2_BLOCKING;
     }
-    if (res != C2_OK) {
-        return res;
-    }
-    // TODO: bundle the fence to the block. Are API changes required?
-    res = fence.wait(kSyncFenceWaitNs);
-    if (res != C2_OK) {
-        bool aidlRet = true;
-        ::ndk::ScopedAStatus status = mIgba->deallocate(origId, &aidlRet);
-        ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
-              res, status.isOk(), aidlRet);
-    }
-    return C2_OK;
+    return res;
 }
 
 c2_status_t C2IgbaBlockPool::fetchGraphicBlock(
         uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block, C2Fence *fence) {
     uint64_t origId;
-    return _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+    c2_status_t res = _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+    if (res == C2_TIMED_OUT) {
+        *fence = C2Fence();
+        return C2_BLOCKING;
+    }
+    return res;
 }
 
 c2_status_t C2IgbaBlockPool::_fetchGraphicBlock(
@@ -263,10 +260,27 @@
             }
         }
 
-        *fence = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
+        C2Fence syncFence  = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
         AHardwareBuffer *ahwb = allocation.buffer.release(); // This is acquired.
         CHECK(AHardwareBuffer_getId(ahwb, origId) == ::android::OK);
-        c2_status_t res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
+
+        // We are waiting for SyncFence here for backward compatibility.
+        // H/W based Sync Fence could be returned to improve pipeline latency.
+        //
+        // TODO: Add a component configuration for returning sync fence
+        // from fetchGraphicBlock() as the C2Fence output param(b/322283520).
+        // In the case C2_OK along with GraphicBlock must be returned together.
+        c2_status_t res = syncFence.wait(kSyncFenceWaitNs);
+        if (res != C2_OK) {
+            AHardwareBuffer_release(ahwb);
+            bool aidlRet = true;
+            ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
+            ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
+                  res, status.isOk(), aidlRet);
+            return C2_TIMED_OUT;
+        }
+
+        res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
         AHardwareBuffer_release(ahwb);
         if (res != C2_OK) {
             bool aidlRet = true;
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index e2c1878..aa3ae5e 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..52a5914 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,7 +12,10 @@
     name: "input_monitor",
     gtest: false,
     srcs: ["src/input_monitor.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -20,7 +24,10 @@
     name: "input_monitor_callback",
     gtest: false,
     srcs: ["src/input_monitor_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..6552113 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,13 +12,16 @@
     name: "aaudio_loopback",
     gtest: false,
     srcs: ["src/loopback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     static_libs: ["libsndfile"],
     include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
     shared_libs: [
         "libaaudio",
         "libaudioutils",
-        "liblog"
-        ],
+        "liblog",
+    ],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..fe78112 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -10,7 +11,10 @@
 cc_test {
     name: "write_sine",
     srcs: ["src/write_sine.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -18,7 +22,10 @@
 cc_test {
     name: "write_sine_callback",
     srcs: ["src/write_sine_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 46c4148..6d94f38 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index e1ff3eb..1b06ea7 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -19,6 +19,8 @@
 #include "aaudio/AAudioTesting.h"
 #include <fuzzer/FuzzedDataProvider.h>
 
+#include <functional>
+
 constexpr int32_t kRandomStringLength = 256;
 constexpr int32_t kMaxRuns = 100;
 constexpr int64_t kNanosPerMillisecond = 1000 * 1000;
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index c3b32e6..9d9b574 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -2011,10 +2011,20 @@
         __INTRODUCED_IN(28);
 
 /**
- * Passes back the time at which a particular frame was presented.
+ * Returns the time at which a particular frame was played on a speaker or headset,
+ * or was recorded on a microphone.
+ *
  * This can be used to synchronize audio with video or MIDI.
  * It can also be used to align a recorded stream with a playback stream.
  *
+ * The framePosition is an index into the stream of audio data.
+ * The first frame played or recorded is at framePosition 0.
+ *
+ * These framePositions are the same units that you get from AAudioStream_getFramesRead()
+ * or AAudioStream_getFramesWritten().
+ * A "frame" is a set of audio sample values that are played simultaneously.
+ * For example, a stereo stream has two samples in a frame, left and right.
+ *
  * Timestamps are only valid when the stream is in {@link #AAUDIO_STREAM_STATE_STARTED}.
  * {@link #AAUDIO_ERROR_INVALID_STATE} will be returned if the stream is not started.
  * Note that because requestStart() is asynchronous, timestamps will not be valid until
@@ -2030,8 +2040,8 @@
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param clockid CLOCK_MONOTONIC or CLOCK_BOOTTIME
- * @param framePosition pointer to a variable to receive the position
- * @param timeNanoseconds pointer to a variable to receive the time
+ * @param[out] framePosition pointer to a variable to receive the position
+ * @param[out] timeNanoseconds pointer to a variable to receive the time
  * @return {@link #AAUDIO_OK} or a negative error
  */
 AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* _Nonnull stream,
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 01d97b6..d67ec70 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -49,12 +49,6 @@
 };
 typedef int32_t aaudio_policy_t;
 
-// Internal error codes. Only used by the framework.
-enum {
-    AAUDIO_INTERNAL_ERROR_BASE = -1000,
-    AAUDIO_ERROR_STANDBY,
-};
-
 /**
  * Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
  * or the older "Legacy" data path.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index fcb376c..d2cb265 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -128,7 +129,7 @@
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
         "-format-style=file",
-    ]
+    ],
 }
 
 cc_library {
@@ -250,7 +251,7 @@
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
         "-format-style=file",
-    ]
+    ],
 }
 
 aidl_interface {
@@ -274,8 +275,7 @@
         "shared-file-region-aidl",
         "framework-permission-aidl",
     ],
-    backend:
-    {
+    backend: {
         java: {
             sdk_version: "module_current",
         },
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index e1d517e..0c55fca 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -72,6 +72,12 @@
      */
     int32_t pull(void *destination, int32_t targetFramesToRead);
 
+    // Reset the entire graph so that volume ramps start at their
+    // target value and sample rate converters start with no phase offset.
+    void reset() {
+        mSink->pullReset();
+    }
+
     /**
      * Set numFramesToWrite frames from the source into the flowgraph.
      * Then, attempt to read targetFramesToRead from the flowgraph.
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 52925d9..7648e25 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -199,6 +199,7 @@
         if (getSampleRate() != getDeviceSampleRate()) {
             ALOGD("%s - skipping sample rate converter. SR = %d, Device SR = %d", __func__,
                     getSampleRate(), getDeviceSampleRate());
+            result = AAUDIO_ERROR_INVALID_RATE;
             goto error;
         }
     }
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5bac2ca..5d4c3d4 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -100,6 +100,10 @@
 }
 
 void AudioStreamInternalPlay::prepareBuffersForStart() {
+    // Reset volume ramps to avoid a starting noise.
+    // This was called here instead of AudioStreamInternal so that
+    // it will be easier to backport.
+    mFlowGraph.reset();
     // Prevent stale data from being played.
     mAudioEndpoint->eraseDataMemory();
 }
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index a39e90e..430ba83 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -18,7 +18,6 @@
 //#define LOG_NDEBUG 0
 #include <log/log.h>
 
-#define __STDC_FORMAT_MACROS
 #include <inttypes.h>
 #include <stdint.h>
 #include <algorithm>
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 6c22744..8af49b4 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -22,6 +22,14 @@
 
 namespace aaudio {
 
+// Internal error codes. Only used by the framework.
+enum {
+    AAUDIO_INTERNAL_ERROR_BASE = -1000,
+    AAUDIO_ERROR_STANDBY,
+    AAUDIO_ERROR_ALREADY_CLOSED,
+
+};
+
 aaudio_policy_t AudioGlobal_getMMapPolicy();
 aaudio_result_t AudioGlobal_setMMapPolicy(aaudio_policy_t policy);
 
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 59fdabc..d729047 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -69,16 +69,24 @@
     audio_channel_mask_t channelMask =
             AAudio_getChannelMaskForOpen(getChannelMask(), getSamplesPerFrame(), false /*isInput*/);
 
+    // Set flags based on selected parameters.
     audio_output_flags_t flags;
     aaudio_performance_mode_t perfMode = getPerformanceMode();
     switch(perfMode) {
-        case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+        case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY: {
             // Bypass the normal mixer and go straight to the FAST mixer.
-            // If the app asks for a sessionId then it means they want to use effects.
-            // So don't use RAW flag.
-            flags = (audio_output_flags_t) ((requestedSessionId == AAUDIO_SESSION_ID_NONE)
-                    ? (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_RAW)
-                    : (AUDIO_OUTPUT_FLAG_FAST));
+            // Some Usages need RAW mode so they can get the lowest possible latency.
+            // Other Usages should avoid RAW because it can interfere with
+            // dual sink routing or other features.
+            bool usageBenefitsFromRaw = getUsage() == AAUDIO_USAGE_GAME ||
+                    getUsage() == AAUDIO_USAGE_MEDIA;
+            // If an app does not ask for a sessionId then there will be no effects.
+            // So we can use the use RAW flag.
+            flags = (audio_output_flags_t) (((requestedSessionId == AAUDIO_SESSION_ID_NONE)
+                                             && usageBenefitsFromRaw)
+                                            ? (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_RAW)
+                                            : (AUDIO_OUTPUT_FLAG_FAST));
+        }
             break;
 
         case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index d59afef..5ec8276 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -183,9 +184,9 @@
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_full_queue.cpp"],
     shared_libs: [
-		"libaaudio",
-		"liblog"
-	],
+        "libaaudio",
+        "liblog",
+    ],
 }
 
 cc_test {
@@ -248,3 +249,30 @@
     srcs: ["test_idle_disconnected_shared_stream.cpp"],
     shared_libs: ["libaaudio"],
 }
+
+cc_test {
+    name: "test_multiple_close_simultaneously",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaaudio_tests_defaults",
+    ],
+    srcs: ["test_multiple_close_simultaneously.cpp"],
+    shared_libs: [
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaaudio",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    // This test will run 1 minute to ensure there is no crash happen.
+    // In that case, set the timeout as 2 minutes to allow the test to complete.
+    test_options: {
+        test_runner_options: [
+            {
+                name: "native-test-timeout",
+                value: "2m",
+            },
+        ],
+    },
+}
diff --git a/media/libaaudio/tests/test_multiple_close_simultaneously.cpp b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
new file mode 100644
index 0000000..f6351b6
--- /dev/null
+++ b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_multiple_close_simultaneously"
+
+#include <chrono>
+#include <condition_variable>
+#include <shared_mutex>
+#include <string>
+#include <thread>
+
+#include <gtest/gtest.h>
+
+#include <binder/IBinder.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/IAAudioService.h>
+#include <aaudio/StreamRequest.h>
+#include <aaudio/StreamParameters.h>
+
+using namespace android;
+using namespace aaudio;
+
+#define AAUDIO_SERVICE_NAME "media.aaudio"
+
+static constexpr int THREAD_NUM = 2;
+static constexpr auto TEST_DURATION = std::chrono::minutes(1);
+
+static std::string sError;
+static bool sTestPassed = true;
+
+struct Signal {
+    std::atomic_int value{0};
+    std::shared_mutex lock;
+    std::condition_variable_any cv;
+};
+
+class AAudioServiceDeathRecipient : public IBinder::DeathRecipient {
+public:
+    void binderDied(const wp<IBinder>& who __unused) override {
+        sError = "AAudioService is dead";
+        ALOGE("%s", sError.c_str());
+        sTestPassed = false;
+    }
+};
+
+sp<IAAudioService> getAAudioService(const sp<IBinder::DeathRecipient>& recipient) {
+    auto sm = defaultServiceManager();
+    if (sm == nullptr) {
+        sError = "Cannot get service manager";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
+    if (binder == nullptr) {
+        sError = "Cannot get aaudio service";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    if (binder->linkToDeath(recipient) != NO_ERROR) {
+        sError = "Cannot link to binder death";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    return interface_cast<IAAudioService>(binder);
+}
+
+void openAndMultipleClose(const sp<IAAudioService>& aaudioService) {
+    auto start = std::chrono::system_clock::now();
+    bool hasFailedOpening = false;
+    while (sTestPassed && std::chrono::system_clock::now() - start < TEST_DURATION) {
+        StreamRequest inRequest;
+        StreamParameters outParams;
+        int32_t handle = 0;
+        inRequest.attributionSource.uid = getuid();
+        inRequest.attributionSource.pid = getpid();
+        inRequest.attributionSource.token = sp<BBinder>::make();
+        auto status = aaudioService->openStream(inRequest, &outParams, &handle);
+        if (!status.isOk()) {
+            sError = "Cannot open stream, it can be caused by service death";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        if (handle <= 0) {
+            sError = "Cannot get stream handle after open, returned handle"
+                    + std::to_string(handle);
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        hasFailedOpening = false;
+
+        Signal isReady;
+        Signal startWork;
+        Signal isCompleted;
+        std::unique_lock readyLock(isReady.lock);
+        std::unique_lock completedLock(isCompleted.lock);
+        for (int i = 0; i < THREAD_NUM; ++i) {
+            std::thread closeStream([aaudioService, handle, &isReady, &startWork, &isCompleted] {
+                isReady.value++;
+                isReady.cv.notify_one();
+                {
+                    std::shared_lock<std::shared_mutex> _l(startWork.lock);
+                    startWork.cv.wait(_l, [&startWork] { return startWork.value.load() == 1; });
+                }
+                int32_t result;
+                aaudioService->closeStream(handle, &result);
+                isCompleted.value++;
+                isCompleted.cv.notify_one();
+            });
+            closeStream.detach();
+        }
+        isReady.cv.wait(readyLock, [&isReady] { return isReady.value == THREAD_NUM; });
+        {
+            std::unique_lock startWorkLock(startWork.lock);
+            startWork.value.store(1);
+        }
+        startWork.cv.notify_all();
+        isCompleted.cv.wait_for(completedLock,
+                                std::chrono::milliseconds(1000),
+                                [&isCompleted] { return isCompleted.value == THREAD_NUM; });
+        if (isCompleted.value != THREAD_NUM) {
+            sError = "Close is not completed within 1 second";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+    }
+}
+
+TEST(test_multiple_close_simultaneously, open_multiple_close) {
+    const auto recipient = sp<AAudioServiceDeathRecipient>::make();
+    auto aaudioService = getAAudioService(recipient);
+    ASSERT_NE(nullptr, aaudioService) << sError;
+    openAndMultipleClose(aaudioService);
+    ASSERT_TRUE(sTestPassed) << sError;
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 658bf63..90910a1 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -52,7 +53,7 @@
         "AudioPolicy.cpp",
         "AudioProductStrategy.cpp",
         "AudioVolumeGroup.cpp",
-        "PolicyAidlConversion.cpp"
+        "PolicyAidlConversion.cpp",
     ],
     defaults: [
         "latest_android_media_audio_common_types_cpp_export_shared",
@@ -121,6 +122,7 @@
         "latest_android_media_audio_common_types_cpp_shared",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -185,6 +187,7 @@
         "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
     ],
     sanitize: {
         misc_undefined: [
@@ -217,6 +220,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "latest_android_media_audio_common_types_cpp_export_shared",
     ],
 }
@@ -330,6 +334,7 @@
         },
     },
 }
+
 aidl_interface {
     name: "audiopolicy-types-aidl",
     unstable: true,
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index d9fd58c..1417182 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -60,9 +60,13 @@
 }
 
 // Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
-int AudioProductStrategy::attributesMatchesScore(const audio_attributes_t refAttributes,
-                                                 const audio_attributes_t clientAttritubes)
+int AudioProductStrategy::attributesMatchesScore(audio_attributes_t refAttributes,
+                                                 audio_attributes_t clientAttritubes)
 {
+    refAttributes.flags = static_cast<audio_flags_mask_t>(
+            refAttributes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
+    clientAttritubes.flags = static_cast<audio_flags_mask_t>(
+            clientAttritubes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
     if (refAttributes == clientAttritubes) {
         return MATCH_EQUALS;
     }
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 5bfdd5f..d1b1849 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -22,6 +22,7 @@
 #include <android/media/IAudioPolicyService.h>
 #include <android/media/AudioMixUpdate.h>
 #include <android/media/BnCaptureStateListener.h>
+#include <android_media_audiopolicy.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <binder/IPCThreadState.h>
@@ -44,6 +45,8 @@
 
 // ----------------------------------------------------------------------------
 
+namespace audio_flags = android::media::audiopolicy;
+
 namespace android {
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
@@ -62,115 +65,184 @@
 using media::audio::common::AudioUsage;
 using media::audio::common::Int;
 
-// client singleton for AudioFlinger binder interface
-Mutex AudioSystem::gLock;
-Mutex AudioSystem::gLockErrorCallbacks;
-Mutex AudioSystem::gLockAPS;
-sp<IAudioFlinger> AudioSystem::gAudioFlinger;
-sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
+std::mutex AudioSystem::gMutex;
 dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
 routing_callback AudioSystem::gRoutingCallback = NULL;
 vol_range_init_req_callback AudioSystem::gVolRangeInitReqCallback = NULL;
 
-// Required to be held while calling into gSoundTriggerCaptureStateListener.
-class CaptureStateListenerImpl;
+std::mutex AudioSystem::gApsCallbackMutex;
+std::mutex AudioSystem::gErrorCallbacksMutex;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
 
-Mutex gSoundTriggerCaptureStateListenerLock;
-sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
+std::mutex AudioSystem::gSoundTriggerMutex;
+sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
 
-// Binder for the AudioFlinger service that's passed to this client process from the system server.
+// Sets the Binder for the AudioFlinger service, passed to this client process
+// from the system server.
 // This allows specific isolated processes to access the audio system. Currently used only for the
 // HotwordDetectionService.
-static sp<IBinder> gAudioFlingerBinder = nullptr;
+template <typename ServiceInterface, typename Client, typename AidlInterface,
+        typename ServiceTraits>
+class ServiceHandler {
+public:
+    sp<ServiceInterface> getService(bool canStartThreadPool = true)
+            EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS {  // std::unique_ptr
+        sp<ServiceInterface> service;
+        sp<Client> client;
 
-void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
-    if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
-        ALOGE("setAudioFlingerBinder: received a binder of type %s",
-              String8(audioFlinger->getInterfaceDescriptor()).c_str());
-        return;
-    }
-    Mutex::Autolock _l(gLock);
-    if (gAudioFlinger != nullptr) {
-        ALOGW("setAudioFlingerBinder: ignoring; AudioFlinger connection already established.");
-        return;
-    }
-    gAudioFlingerBinder = audioFlinger;
-}
-
-static sp<IAudioFlinger> gLocalAudioFlinger; // set if we are local.
-
-status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
-    Mutex::Autolock _l(gLock);
-    if (gAudioFlinger != nullptr) return INVALID_OPERATION;
-    gLocalAudioFlinger = af;
-    return OK;
-}
-
-// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
-    sp<IAudioFlinger> af;
-    sp<AudioFlingerClient> afc;
-    bool reportNoError = false;
-    {
-        Mutex::Autolock _l(gLock);
-        if (gAudioFlinger != nullptr) {
-            return gAudioFlinger;
+        bool reportNoError = false;
+        {
+            std::lock_guard _l(mMutex);
+            if (mService != nullptr) {
+                return mService;
+            }
         }
 
-        if (gAudioFlingerClient == nullptr) {
-            gAudioFlingerClient = sp<AudioFlingerClient>::make();
+        std::unique_lock ul_only1thread(mSingleGetter);
+        std::unique_lock ul(mMutex);
+        if (mService != nullptr) {
+            return mService;
+        }
+        if (mClient == nullptr) {
+            mClient = sp<Client>::make();
         } else {
             reportNoError = true;
         }
+        while (true) {
+            mService = mLocalService;
+            if (mService != nullptr) break;
 
-        if (gLocalAudioFlinger != nullptr) {
-            gAudioFlinger = gLocalAudioFlinger;
-        } else {
-            sp<IBinder> binder;
-            if (gAudioFlingerBinder != nullptr) {
-                binder = gAudioFlingerBinder;
-            } else {
-                sp<IServiceManager> sm = defaultServiceManager();
-                binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+            sp<IBinder> binder = mBinder;
+            if (binder == nullptr) {
+                sp <IServiceManager> sm = defaultServiceManager();
+                binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
                 if (binder == nullptr) {
-                    return nullptr;
+                    ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+
+                    // if the condition variable is present, setLocalService() and
+                    // setBinder() is allowed to use it to notify us.
+                    if (mCvGetter == nullptr) {
+                        mCvGetter = std::make_shared<std::condition_variable>();
+                    }
+                    mCvGetter->wait_for(ul, std::chrono::seconds(1));
+                    continue;
                 }
             }
-            binder->linkToDeath(gAudioFlingerClient);
-            const auto afs = interface_cast<media::IAudioFlingerService>(binder);
-            LOG_ALWAYS_FATAL_IF(afs == nullptr);
-            gAudioFlinger = sp<AudioFlingerClientAdapter>::make(afs);
+            binder->linkToDeath(mClient);
+            auto aidlInterface = interface_cast<AidlInterface>(binder);
+            LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
+            if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
+                mService = std::move(aidlInterface);
+            } else /* constexpr */ {
+                mService = ServiceTraits::createServiceAdapter(aidlInterface);
+            }
+            break;
         }
-        afc = gAudioFlingerClient;
-        af = gAudioFlinger;
-        // Make sure callbacks can be received by gAudioFlingerClient
-        if(canStartThreadPool) {
+        if (mCvGetter) mCvGetter.reset();  // remove condition variable.
+        client = mClient;
+        service = mService;
+        // Make sure callbacks can be received by the client
+        if (canStartThreadPool) {
             ProcessState::self()->startThreadPool();
         }
+        ul.unlock();
+        ul_only1thread.unlock();
+        ServiceTraits::onServiceCreate(service, client);
+        if (reportNoError) AudioSystem::reportError(NO_ERROR);
+        return service;
     }
-    const int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    af->registerClient(afc);
-    IPCThreadState::self()->restoreCallingIdentity(token);
-    if (reportNoError) reportError(NO_ERROR);
-    return af;
+
+    status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
+        std::lock_guard _l(mMutex);
+        // we allow clearing once set, but not a double non-null set.
+        if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
+        mLocalService = service;
+        if (mCvGetter) mCvGetter->notify_one();
+        return OK;
+    }
+
+    sp<Client> getClient() EXCLUDES(mMutex)  {
+        const auto service = getService();
+        if (service == nullptr) return nullptr;
+        std::lock_guard _l(mMutex);
+        return mClient;
+    }
+
+    void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex)  {
+        std::lock_guard _l(mMutex);
+        if (mService != nullptr) {
+            ALOGW("%s: ignoring; %s connection already established.",
+                    __func__, ServiceTraits::SERVICE_NAME);
+            return;
+        }
+        mBinder = binder;
+        if (mCvGetter) mCvGetter->notify_one();
+    }
+
+    void clearService() EXCLUDES(mMutex)  {
+        std::lock_guard _l(mMutex);
+        mService.clear();
+        if (mClient) ServiceTraits::onClearService(mClient);
+    }
+
+private:
+    std::mutex mSingleGetter;
+    std::mutex mMutex;
+    std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
+    sp<IBinder> mBinder GUARDED_BY(mMutex);
+    sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
+    sp<ServiceInterface> mService GUARDED_BY(mMutex);
+    sp<Client> mClient GUARDED_BY(mMutex);
+};
+
+struct AudioFlingerTraits {
+    static void onServiceCreate(
+            const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+        const int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        af->registerClient(afc);
+        IPCThreadState::self()->restoreCallingIdentity(token);
+    }
+
+    static sp<IAudioFlinger> createServiceAdapter(
+            const sp<media::IAudioFlingerService>& aidlInterface) {
+        return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+    }
+
+    static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
+        afc->clearIoCache();
+    }
+
+    static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
+        AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
+        AudioFlingerTraits> gAudioFlingerServiceHandler;
+
+sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+    return gAudioFlingerServiceHandler.getService();
 }
 
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
-    return getAudioFlingerImpl();
+sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
+    return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
 }
 
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
-    return getAudioFlingerImpl(false);
+sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
+    return gAudioFlingerServiceHandler.getClient();
 }
 
-const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
-    // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    if (af == 0) return 0;
-    Mutex::Autolock _l(gLock);
-    return gAudioFlingerClient;
+void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
+    if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
+        ALOGE("%s: received a binder of type %s",
+                __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
+        return;
+    }
+    gAudioFlingerServiceHandler.setBinder(audioFlinger);
+}
+
+status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
+    return gAudioFlingerServiceHandler.setLocalService(af);
 }
 
 sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -192,41 +264,41 @@
 // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
 
 status_t AudioSystem::muteMicrophone(bool state) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMicMute(state);
 }
 
 status_t AudioSystem::isMicrophoneMuted(bool* state) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *state = af->getMicMute();
     return NO_ERROR;
 }
 
 status_t AudioSystem::setMasterVolume(float value) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterVolume(value);
     return NO_ERROR;
 }
 
 status_t AudioSystem::setMasterMute(bool mute) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterMute(mute);
     return NO_ERROR;
 }
 
 status_t AudioSystem::getMasterVolume(float* volume) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *volume = af->masterVolume();
     return NO_ERROR;
 }
 
 status_t AudioSystem::getMasterMute(bool* mute) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *mute = af->masterMute();
     return NO_ERROR;
@@ -235,7 +307,7 @@
 status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
                                       audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setStreamVolume(stream, value, output);
     return NO_ERROR;
@@ -243,7 +315,7 @@
 
 status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setStreamMute(stream, mute);
     return NO_ERROR;
@@ -252,7 +324,7 @@
 status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
                                       audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *volume = af->streamVolume(stream, output);
     return NO_ERROR;
@@ -260,7 +332,7 @@
 
 status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *mute = af->streamMute(stream);
     return NO_ERROR;
@@ -268,25 +340,25 @@
 
 status_t AudioSystem::setMode(audio_mode_t mode) {
     if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMode(mode);
 }
 
 status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setSimulateDeviceConnections(enabled);
 }
 
 status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setParameters(ioHandle, keyValuePairs);
 }
 
 String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     String8 result = String8("");
     if (af == 0) return result;
 
@@ -305,23 +377,23 @@
 // convert volume steps to natural log scale
 
 // change this value to change volume scaling
-static const float dBPerStep = 0.5f;
+constexpr float kdbPerStep = 0.5f;
 // shouldn't need to touch these
-static const float dBConvert = -dBPerStep * 2.302585093f / 20.0f;
-static const float dBConvertInverse = 1.0f / dBConvert;
+constexpr float kdBConvert = -kdbPerStep * 2.302585093f / 20.0f;
+constexpr float kdBConvertInverse = 1.0f / kdBConvert;
 
 float AudioSystem::linearToLog(int volume) {
-    // float v = volume ? exp(float(100 - volume) * dBConvert) : 0;
+    // float v = volume ? exp(float(100 - volume) * kdBConvert) : 0;
     // ALOGD("linearToLog(%d)=%f", volume, v);
     // return v;
-    return volume ? exp(float(100 - volume) * dBConvert) : 0;
+    return volume ? exp(float(100 - volume) * kdBConvert) : 0;
 }
 
 int AudioSystem::logToLinear(float volume) {
-    // int v = volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+    // int v = volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
     // ALOGD("logTolinear(%d)=%f", v, volume);
     // return v;
-    return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+    return volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
 }
 
 /* static */ size_t AudioSystem::calculateMinFrameCount(
@@ -366,7 +438,7 @@
 
 status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
                                       uint32_t* samplingRate) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -401,7 +473,7 @@
 
 status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
                                     size_t* frameCount) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -436,7 +508,7 @@
 
 status_t AudioSystem::getLatency(audio_io_handle_t output,
                                  uint32_t* latency) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
     if (outputDesc == 0) {
@@ -460,21 +532,21 @@
 }
 
 status_t AudioSystem::setVoiceVolume(float value) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setVoiceVolume(value);
 }
 
 status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
                                         uint32_t* dspFrames) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
 
     return af->getRenderPosition(halFrames, dspFrames, output);
 }
 
 uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     uint32_t result = 0;
     if (af == 0) return result;
     if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
@@ -485,46 +557,46 @@
 
 audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
     // Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
     return af->newAudioUniqueId(use);
 }
 
 void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af != 0) {
         af->acquireAudioSessionId(audioSession, pid, uid);
     }
 }
 
 void AudioSystem::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af != 0) {
         af->releaseAudioSessionId(audioSession, pid);
     }
 }
 
 audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return AUDIO_HW_SYNC_INVALID;
     return af->getAudioHwSyncForSession(sessionId);
 }
 
 status_t AudioSystem::systemReady() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return NO_INIT;
     return af->systemReady();
 }
 
 status_t AudioSystem::audioPolicyReady() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return NO_INIT;
     return af->audioPolicyReady();
 }
 
 status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
                                        size_t* frameCount) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -546,7 +618,7 @@
 
 
 void AudioSystem::AudioFlingerClient::clearIoCache() {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     mIoDescriptors.clear();
     mInBuffSize = 0;
     mInSamplingRate = 0;
@@ -555,14 +627,7 @@
 }
 
 void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
-    {
-        Mutex::Autolock _l(AudioSystem::gLock);
-        AudioSystem::gAudioFlinger.clear();
-    }
-
-    // clear output handles and stream to output map caches
-    clearIoCache();
-
+    gAudioFlingerServiceHandler.clearService();
     reportError(DEAD_OBJECT);
 
     ALOGW("AudioFlinger server died!");
@@ -584,7 +649,7 @@
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
     {
-        Mutex::Autolock _l(mLock);
+        std::lock_guard _l(mMutex);
         auto callbacks = std::map<audio_port_handle_t, wp<AudioDeviceCallback>>();
 
         switch (event) {
@@ -592,13 +657,10 @@
             case AUDIO_OUTPUT_REGISTERED:
             case AUDIO_INPUT_OPENED:
             case AUDIO_INPUT_REGISTERED: {
-                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
-                if (oldDesc == 0) {
-                    mIoDescriptors.add(ioDesc->getIoHandle(), ioDesc);
-                } else {
+                if (sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle())) {
                     deviceId = oldDesc->getDeviceId();
-                    mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
                 }
+                mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
                 if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
                     deviceId = ioDesc->getDeviceId();
@@ -627,7 +689,7 @@
                 ALOGV("ioConfigChanged() %s %d closed",
                       event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->getIoHandle());
 
-                mIoDescriptors.removeItem(ioDesc->getIoHandle());
+                mIoDescriptors.erase(ioDesc->getIoHandle());
                 mAudioDeviceCallbacks.erase(ioDesc->getIoHandle());
             }
                 break;
@@ -643,7 +705,7 @@
                 }
 
                 deviceId = oldDesc->getDeviceId();
-                mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
+                mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
                 if (deviceId != ioDesc->getDeviceId()) {
                     deviceId = ioDesc->getDeviceId();
@@ -689,8 +751,8 @@
         }
     }
 
-    // Callbacks must be called without mLock held. May lead to dead lock if calling for
-    // example getRoutedDevice that updates the device and tries to acquire mLock.
+    // Callbacks must be called without mMutex held. May lead to dead lock if calling for
+    // example getRoutedDevice that updates the device and tries to acquire mMutex.
     for (auto cb  : callbacksToCall) {
         // If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
         cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
@@ -709,7 +771,7 @@
 
     std::vector<sp<SupportedLatencyModesCallback>> callbacks;
     {
-        Mutex::Autolock _l(mLock);
+        std::lock_guard _l(mMutex);
         for (auto callback : mSupportedLatencyModesCallbacks) {
             if (auto ref = callback.promote(); ref != nullptr) {
                 callbacks.push_back(ref);
@@ -726,11 +788,11 @@
 status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
         uint32_t sampleRate, audio_format_t format,
         audio_channel_mask_t channelMask, size_t* buffSize) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) {
         return PERMISSION_DENIED;
     }
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     // Do we have a stale mInBuffSize or are we requesting the input buffer size for new values
     if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat)
         || (channelMask != mInChannelMask)) {
@@ -756,16 +818,15 @@
 
 sp<AudioIoDescriptor>
 AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle) {
-    sp<AudioIoDescriptor> desc;
-    ssize_t index = mIoDescriptors.indexOfKey(ioHandle);
-    if (index >= 0) {
-        desc = mIoDescriptors.valueAt(index);
+    if (const auto it = mIoDescriptors.find(ioHandle);
+        it != mIoDescriptors.end()) {
+        return it->second;
     }
-    return desc;
+    return {};
 }
 
 sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     return getIoDescriptor_l(ioHandle);
 }
 
@@ -773,7 +834,7 @@
         const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
         audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto& callbacks = mAudioDeviceCallbacks.emplace(
             audioIo,
             std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
@@ -788,7 +849,7 @@
         const wp<AudioDeviceCallback>& callback __unused, audio_io_handle_t audioIo,
         audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto it = mAudioDeviceCallbacks.find(audioIo);
     if (it == mAudioDeviceCallbacks.end()) {
         return INVALID_OPERATION;
@@ -804,7 +865,7 @@
 
 status_t AudioSystem::AudioFlingerClient::addSupportedLatencyModesCallback(
         const sp<SupportedLatencyModesCallback>& callback) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     if (std::find(mSupportedLatencyModesCallbacks.begin(),
                   mSupportedLatencyModesCallbacks.end(),
                   callback) != mSupportedLatencyModesCallbacks.end()) {
@@ -816,7 +877,7 @@
 
 status_t AudioSystem::AudioFlingerClient::removeSupportedLatencyModesCallback(
         const sp<SupportedLatencyModesCallback>& callback) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto it = std::find(mSupportedLatencyModesCallbacks.begin(),
                                  mSupportedLatencyModesCallbacks.end(),
                                  callback);
@@ -828,93 +889,78 @@
 }
 
 /* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     gAudioErrorCallbacks.insert(cb);
     return reinterpret_cast<uintptr_t>(cb);
 }
 
 /* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
 }
 
 /* static */ void AudioSystem::reportError(status_t err) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     for (auto callback : gAudioErrorCallbacks) {
         callback(err);
     }
 }
 
 /*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gDynPolicyCallback = cb;
 }
 
 /*static*/ void AudioSystem::setRecordConfigCallback(record_config_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gRecordConfigCallback = cb;
 }
 
 /*static*/ void AudioSystem::setRoutingCallback(routing_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gRoutingCallback = cb;
 }
 
 /*static*/ void AudioSystem::setVolInitReqCallback(vol_range_init_req_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gVolRangeInitReqCallback = cb;
 }
 
-// client singleton for AudioPolicyService binder interface
-// protected by gLockAPS
-sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
-sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
-
-
-// establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
-    sp<IAudioPolicyService> ap;
-    sp<AudioPolicyServiceClient> apc;
-    {
-        Mutex::Autolock _l(gLockAPS);
-        if (gAudioPolicyService == 0) {
-            sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
-            if (binder == nullptr) {
-                return nullptr;
-            }
-            if (gAudioPolicyServiceClient == NULL) {
-                gAudioPolicyServiceClient = new AudioPolicyServiceClient();
-            }
-            binder->linkToDeath(gAudioPolicyServiceClient);
-            gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
-            LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
-            apc = gAudioPolicyServiceClient;
-            // Make sure callbacks can be received by gAudioPolicyServiceClient
-            ProcessState::self()->startThreadPool();
-        }
-        ap = gAudioPolicyService;
-    }
-    if (apc != 0) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+struct AudioPolicyTraits {
+    static void onServiceCreate(const sp<IAudioPolicyService>& ap,
+            const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+        const int64_t token = IPCThreadState::self()->clearCallingIdentity();
         ap->registerClient(apc);
         ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
         ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
         IPCThreadState::self()->restoreCallingIdentity(token);
     }
 
-    return ap;
+    static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+
+    static constexpr const char *SERVICE_NAME = "media.audio_policy";
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
+        AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
+        AudioPolicyTraits> gAudioPolicyServiceHandler;
+
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+    return gAudioPolicyServiceHandler.setLocalService(aps);
+}
+
+sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
+    return gAudioPolicyServiceHandler.getService();
 }
 
 void AudioSystem::clearAudioPolicyService() {
-    Mutex::Autolock _l(gLockAPS);
-    gAudioPolicyService.clear();
+    gAudioPolicyServiceHandler.clearService();
 }
 
 // ---------------------------------------------------------------------------
 
 void AudioSystem::onNewAudioModulesAvailable() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
     aps->onNewAudioModulesAvailable();
 }
@@ -922,7 +968,7 @@
 status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
                                                const android::media::audio::common::AudioPort& port,
                                                audio_format_t encodedFormat) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
 
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -937,7 +983,7 @@
 
 audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
                                                                const char* device_address) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
 
     auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
@@ -957,7 +1003,7 @@
                                                const char* device_address,
                                                const char* device_name,
                                                audio_format_t encodedFormat) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     const char* address = "";
     const char* name = "";
 
@@ -980,7 +1026,7 @@
 
 status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
     if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     return statusTFromBinderStatus(aps->setPhoneState(
@@ -990,7 +1036,7 @@
 
 status_t
 AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     return statusTFromBinderStatus(
@@ -1003,7 +1049,7 @@
 }
 
 audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
@@ -1020,7 +1066,7 @@
 
 
 audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_io_handle_t> {
@@ -1068,7 +1114,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1117,7 +1163,7 @@
 }
 
 status_t AudioSystem::startOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1125,7 +1171,7 @@
 }
 
 status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1133,7 +1179,7 @@
 }
 
 void AudioSystem::releaseOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
 
     auto status = [&]() -> status_t {
@@ -1173,7 +1219,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1207,7 +1253,7 @@
 }
 
 status_t AudioSystem::startInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1215,7 +1261,7 @@
 }
 
 status_t AudioSystem::stopInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1223,7 +1269,7 @@
 }
 
 void AudioSystem::releaseInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
 
     auto status = [&]() -> status_t {
@@ -1240,7 +1286,7 @@
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
                                        int indexMin,
                                        int indexMax) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1261,7 +1307,7 @@
 status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
                                            int index,
                                            audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1276,7 +1322,7 @@
 status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
                                            int* index,
                                            audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1295,7 +1341,7 @@
 status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int index,
                                                   audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1310,7 +1356,7 @@
 status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int& index,
                                                   audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1325,7 +1371,7 @@
 }
 
 status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1338,7 +1384,7 @@
 }
 
 status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1351,7 +1397,7 @@
 }
 
 product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PRODUCT_STRATEGY_NONE;
 
     auto result = [&]() -> ConversionResult<product_strategy_t> {
@@ -1371,7 +1417,7 @@
     if (devices == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -1387,7 +1433,7 @@
 }
 
 audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     // FIXME change return type to status_t, and return PERMISSION_DENIED here
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
 
@@ -1408,7 +1454,7 @@
                                      product_strategy_t strategy,
                                      audio_session_t session,
                                      int id) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
@@ -1422,7 +1468,7 @@
 }
 
 status_t AudioSystem::unregisterEffect(int id) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1431,7 +1477,7 @@
 }
 
 status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1440,7 +1486,7 @@
 }
 
 status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
@@ -1450,7 +1496,7 @@
 }
 
 status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1464,7 +1510,7 @@
 
 status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
                                              uint32_t inPastMs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1477,7 +1523,7 @@
 }
 
 status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1489,19 +1535,19 @@
 }
 
 uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputSamplingRate();
 }
 
 size_t AudioSystem::getPrimaryOutputFrameCount() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputFrameCount();
 }
 
 status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setLowRamDevice(isLowRamDevice, totalMemory);
 }
@@ -1509,18 +1555,12 @@
 void AudioSystem::clearAudioConfigCache() {
     // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
     ALOGV("clearAudioConfigCache()");
-    {
-        Mutex::Autolock _l(gLock);
-        if (gAudioFlingerClient != 0) {
-            gAudioFlingerClient->clearIoCache();
-        }
-        gAudioFlinger.clear();
-    }
+    gAudioFlingerServiceHandler.clearService();
     clearAudioPolicyService();
 }
 
 status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
 
     std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
@@ -1530,7 +1570,7 @@
 }
 
 status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1541,7 +1581,7 @@
 
 audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
     ALOGV("%s", __func__);
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
 
     auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
@@ -1566,7 +1606,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
@@ -1590,7 +1630,7 @@
 status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
                                               std::vector<media::AudioPortFw>* result) {
     if (result == nullptr) return BAD_VALUE;
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
     return OK;
@@ -1600,7 +1640,7 @@
     if (port == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortFw portAidl;
@@ -1616,7 +1656,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
@@ -1629,7 +1669,7 @@
 }
 
 status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
@@ -1644,7 +1684,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
 
@@ -1667,7 +1707,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
@@ -1676,14 +1716,13 @@
 }
 
 status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->addAudioPortCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->addAudioPortCallback(callback);
     if (ret == 1) {
         aps->setAudioPortCallbacksEnabled(true);
     }
@@ -1692,14 +1731,13 @@
 
 /*static*/
 status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->removeAudioPortCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->removeAudioPortCallback(callback);
     if (ret == 0) {
         aps->setAudioPortCallbacksEnabled(false);
     }
@@ -1707,14 +1745,13 @@
 }
 
 status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->addAudioVolumeGroupCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->addAudioVolumeGroupCallback(callback);
     if (ret == 1) {
         aps->setAudioVolumeGroupCallbacksEnabled(true);
     }
@@ -1722,14 +1759,13 @@
 }
 
 status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->removeAudioVolumeGroupCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->removeAudioVolumeGroupCallback(callback);
     if (ret == 0) {
         aps->setAudioVolumeGroupCallbacksEnabled(false);
     }
@@ -1745,7 +1781,7 @@
     }
     status_t status = afc->addAudioDeviceCallback(callback, audioIo, portId);
     if (status == NO_ERROR) {
-        const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+        const sp<IAudioFlinger> af = get_audio_flinger();
         if (af != 0) {
             af->registerClient(afc);
         }
@@ -1782,7 +1818,7 @@
 }
 
 audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
     if (desc == 0) {
@@ -1797,7 +1833,7 @@
     if (session == nullptr || ioHandle == nullptr || device == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::SoundTriggerSession retAidl;
@@ -1811,7 +1847,7 @@
 }
 
 status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
@@ -1819,7 +1855,7 @@
 }
 
 audio_mode_t AudioSystem::getPhoneState() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_MODE_INVALID;
 
     auto result = [&]() -> ConversionResult<audio_mode_t> {
@@ -1832,7 +1868,7 @@
 }
 
 status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
@@ -1843,10 +1879,29 @@
     return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
 }
 
+status_t AudioSystem::getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    std::vector<::android::media::AudioMix> aidlMixes;
+    Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
+
+    for (const auto& aidlMix : aidlMixes) {
+        AudioMix mix = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioMix(aidlMix));
+        mixes.push_back(mix);
+    }
+
+    return statusTFromBinderStatus(status);
+}
+
 status_t AudioSystem::updatePolicyMixes(
         const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
                 mixesWithUpdates) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioMixUpdate> updatesAidl;
@@ -1865,7 +1920,7 @@
 }
 
 status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1876,7 +1931,7 @@
 }
 
 status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1885,7 +1940,7 @@
 
 status_t AudioSystem::setUserIdDeviceAffinities(int userId,
                                                 const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
@@ -1897,7 +1952,7 @@
 }
 
 status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
     return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
@@ -1909,7 +1964,7 @@
     if (source == nullptr || attributes == nullptr || portId == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
@@ -1924,7 +1979,7 @@
 }
 
 status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1932,7 +1987,7 @@
 }
 
 status_t AudioSystem::setMasterMono(bool mono) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->setMasterMono(mono));
 }
@@ -1941,26 +1996,26 @@
     if (mono == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->getMasterMono(mono));
 }
 
 status_t AudioSystem::setMasterBalance(float balance) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMasterBalance(balance);
 }
 
 status_t AudioSystem::getMasterBalance(float* balance) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMasterBalance(balance);
 }
 
 float
 AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NAN;
 
     auto result = [&]() -> ConversionResult<float> {
@@ -1978,13 +2033,13 @@
 }
 
 status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMicrophones(microphones);
 }
 
 status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) return PERMISSION_DENIED;
     return af->setAudioHalPids(pids);
 }
@@ -1998,7 +2053,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
@@ -2025,7 +2080,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
@@ -2043,7 +2098,7 @@
 }
 
 status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
@@ -2053,7 +2108,7 @@
 }
 
 status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2062,7 +2117,7 @@
 }
 
 status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2071,7 +2126,7 @@
 }
 
 status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2080,7 +2135,7 @@
 }
 
 status_t AudioSystem::setCurrentImeUid(uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -2088,7 +2143,7 @@
 }
 
 bool AudioSystem::isHapticPlaybackSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2101,7 +2156,7 @@
 }
 
 bool AudioSystem::isUltrasoundSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2119,7 +2174,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<AudioFormatDescription> formatsAidl;
@@ -2135,7 +2190,7 @@
 }
 
 status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioProductStrategy> strategiesAidl;
@@ -2197,7 +2252,7 @@
 status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
                                                             product_strategy_t& productStrategy,
                                                             bool fallbackOnDefault) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2213,7 +2268,7 @@
 }
 
 status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioVolumeGroup> groupsAidl;
@@ -2227,7 +2282,7 @@
 status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
                                                         volume_group_t& volumeGroup,
                                                         bool fallbackOnDefault) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2240,13 +2295,13 @@
 }
 
 status_t AudioSystem::setRttEnabled(bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->setRttEnabled(enabled));
 }
 
 bool AudioSystem::isCallScreenModeSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2261,7 +2316,7 @@
 status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
                                                 device_role_t role,
                                                 const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2278,7 +2333,7 @@
 status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy,
                                                    device_role_t role,
                                                    const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2294,7 +2349,7 @@
 
 status_t
 AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2307,7 +2362,7 @@
 status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
                                                    device_role_t role,
                                                    AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2325,7 +2380,7 @@
 status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                      device_role_t role,
                                                      const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2343,7 +2398,7 @@
 status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                      device_role_t role,
                                                      const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2359,7 +2414,7 @@
 
 status_t AudioSystem::removeDevicesRoleForCapturePreset(
         audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2375,7 +2430,7 @@
 
 status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                        device_role_t role) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2389,7 +2444,7 @@
 status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
                                                         device_role_t role,
                                                         AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2407,7 +2462,7 @@
 
 status_t AudioSystem::getSpatializer(const sp<media::INativeSpatializerCallback>& callback,
                                           sp<media::ISpatializer>* spatializer) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (spatializer == nullptr) {
         return BAD_VALUE;
     }
@@ -2426,7 +2481,7 @@
                                     const audio_config_t *config,
                                     const AudioDeviceTypeAddrVector &devices,
                                     bool *canBeSpatialized) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (canBeSpatialized == nullptr) {
         return BAD_VALUE;
     }
@@ -2450,7 +2505,7 @@
 
 status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
                                             sp<media::ISoundDose>* soundDose) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2469,7 +2524,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2493,7 +2548,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2512,7 +2567,7 @@
 
 status_t AudioSystem::setRequestedLatencyMode(
             audio_io_handle_t output, audio_latency_mode_t mode) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2521,7 +2576,7 @@
 
 status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
         std::vector<audio_latency_mode_t>* modes) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2529,7 +2584,7 @@
 }
 
 status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2538,7 +2593,7 @@
 
 status_t AudioSystem::isBluetoothVariableLatencyEnabled(
         bool *enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2547,7 +2602,7 @@
 
 status_t AudioSystem::supportsBluetoothVariableLatency(
         bool *support) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2555,7 +2610,7 @@
 }
 
 status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2583,15 +2638,15 @@
     }
 
     binder::Status setCaptureState(bool active) override {
-        Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+        std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
         mListener->onStateChanged(active);
         return binder::Status::ok();
     }
 
     void binderDied(const wp<IBinder>&) override {
-        Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+        std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
         mListener->onServiceDied();
-        gSoundTriggerCaptureStateListener = nullptr;
+        AudioSystem::gSoundTriggerCaptureStateListener = nullptr;
     }
 
 private:
@@ -2604,13 +2659,12 @@
         const sp<CaptureStateListener>& listener) {
     LOG_ALWAYS_FATAL_IF(listener == nullptr);
 
-    const sp<IAudioPolicyService>& aps =
-            AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
 
-    Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+    std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
     gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
     gSoundTriggerCaptureStateListener->init();
 
@@ -2619,7 +2673,7 @@
 
 status_t AudioSystem::setVibratorInfos(
         const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2628,7 +2682,7 @@
 
 status_t AudioSystem::getMmapPolicyInfo(
         AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2636,7 +2690,7 @@
 }
 
 int32_t AudioSystem::getAAudioMixerBurstCount() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2644,7 +2698,7 @@
 }
 
 int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2653,7 +2707,7 @@
 
 status_t AudioSystem::getSupportedMixerAttributes(
         audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2673,7 +2727,7 @@
                                                   audio_port_handle_t portId,
                                                   uid_t uid,
                                                   const audio_mixer_attributes_t *mixerAttr) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2693,7 +2747,7 @@
         const audio_attributes_t *attr,
         audio_port_handle_t portId,
         std::optional<audio_mixer_attributes_t> *mixerAttr) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2716,7 +2770,7 @@
 status_t AudioSystem::clearPreferredMixerAttributes(const audio_attributes_t *attr,
                                                     audio_port_handle_t portId,
                                                     uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2733,45 +2787,28 @@
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
         const sp<AudioPortCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        if (mAudioPortCallbacks[i] == callback) {
-            return -1;
-        }
-    }
-    mAudioPortCallbacks.add(callback);
-    return mAudioPortCallbacks.size();
+    std::lock_guard _l(mMutex);
+    return mAudioPortCallbacks.insert(callback).second ? mAudioPortCallbacks.size() : -1;
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback(
         const sp<AudioPortCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    size_t i;
-    for (i = 0; i < mAudioPortCallbacks.size(); i++) {
-        if (mAudioPortCallbacks[i] == callback) {
-            break;
-        }
-    }
-    if (i == mAudioPortCallbacks.size()) {
-        return -1;
-    }
-    mAudioPortCallbacks.removeAt(i);
-    return mAudioPortCallbacks.size();
+    std::lock_guard _l(mMutex);
+    return mAudioPortCallbacks.erase(callback) > 0 ? mAudioPortCallbacks.size() : -1;
 }
 
-
 Status AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate() {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        mAudioPortCallbacks[i]->onAudioPortListUpdate();
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioPortCallbacks) {
+        callback->onAudioPortListUpdate();
     }
     return Status::ok();
 }
 
 Status AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate() {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        mAudioPortCallbacks[i]->onAudioPatchListUpdate();
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioPortCallbacks) {
+        callback->onAudioPatchListUpdate();
     }
     return Status::ok();
 }
@@ -2779,30 +2816,16 @@
 // ----------------------------------------------------------------------------
 int AudioSystem::AudioPolicyServiceClient::addAudioVolumeGroupCallback(
         const sp<AudioVolumeGroupCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        if (mAudioVolumeGroupCallback[i] == callback) {
-            return -1;
-        }
-    }
-    mAudioVolumeGroupCallback.add(callback);
-    return mAudioVolumeGroupCallback.size();
+    std::lock_guard _l(mMutex);
+    return mAudioVolumeGroupCallbacks.insert(callback).second
+            ? mAudioVolumeGroupCallbacks.size() : -1;
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioVolumeGroupCallback(
         const sp<AudioVolumeGroupCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    size_t i;
-    for (i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        if (mAudioVolumeGroupCallback[i] == callback) {
-            break;
-        }
-    }
-    if (i == mAudioVolumeGroupCallback.size()) {
-        return -1;
-    }
-    mAudioVolumeGroupCallback.removeAt(i);
-    return mAudioVolumeGroupCallback.size();
+    std::lock_guard _l(mMutex);
+    return mAudioVolumeGroupCallbacks.erase(callback) > 0
+            ? mAudioVolumeGroupCallbacks.size() : -1;
 }
 
 Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
@@ -2811,9 +2834,9 @@
             aidl2legacy_int32_t_volume_group_t(group));
     int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
 
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioVolumeGroupCallbacks) {
+        callback->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
     }
     return Status::ok();
 }
@@ -2827,7 +2850,7 @@
     int stateLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(state));
     dynamic_policy_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gDynPolicyCallback;
     }
 
@@ -2848,7 +2871,7 @@
         AudioSource source) {
     record_config_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gRecordConfigCallback;
     }
 
@@ -2881,7 +2904,7 @@
 Status AudioSystem::AudioPolicyServiceClient::onRoutingUpdated() {
     routing_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gRoutingCallback;
     }
 
@@ -2894,7 +2917,7 @@
 Status AudioSystem::AudioPolicyServiceClient::onVolumeRangeInitRequest() {
     vol_range_init_req_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gVolRangeInitReqCallback;
     }
 
@@ -2906,12 +2929,12 @@
 
 void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
     {
-        Mutex::Autolock _l(mLock);
-        for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-            mAudioPortCallbacks[i]->onServiceDied();
+        std::lock_guard _l(mMutex);
+        for (const auto& callback : mAudioPortCallbacks) {
+            callback->onServiceDied();
         }
-        for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-            mAudioVolumeGroupCallback[i]->onServiceDied();
+        for (const auto& callback : mAudioVolumeGroupCallbacks) {
+            callback->onServiceDied();
         }
     }
     AudioSystem::clearAudioPolicyService();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 58e0486..98a1fde 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1699,25 +1699,42 @@
 }
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
+    status_t result = NO_ERROR;
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
-            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+            __func__, mPortId, deviceId, mSelectedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
-        if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
-            if (isPlaying_l()) {
-                android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-                mProxy->interrupt();
+        if (mStatus == NO_ERROR) {
+            if (isOffloadedOrDirect_l()) {
+                if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
+                } else {
+                    ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
+                          "State: %s.",
+                            __func__, mPortId, stateToString(mState));
+                    result = INVALID_OPERATION;
+                }
             } else {
-                // if the track is idle, try to restore now and
-                // defer to next start if not possible
-                if (restoreTrack_l("setOutputDevice") != OK) {
-                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                // allow track invalidation when track is not playing to propagate
+                // the updated mSelectedDeviceId
+                if (isPlaying_l()) {
+                    if (mSelectedDeviceId != mRoutedDeviceId) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                        mProxy->interrupt();
+                    }
+                } else {
+                    // if the track is idle, try to restore now and
+                    // defer to next start if not possible
+                    if (restoreTrack_l("setOutputDevice") != OK) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    }
                 }
             }
         }
     }
-    return NO_ERROR;
+    return result;
 }
 
 audio_port_handle_t AudioTrack::getOutputDevice() {
@@ -2832,7 +2849,7 @@
     return 0;
 }
 
-status_t AudioTrack::restoreTrack_l(const char *from)
+status_t AudioTrack::restoreTrack_l(const char *from, bool forceRestore)
 {
     status_t result = NO_ERROR;  // logged: make sure to set this before returning.
     const int64_t beginNs = systemTime();
@@ -2853,9 +2870,12 @@
     // output parameters and new IAudioFlinger in createTrack_l()
     AudioSystem::clearAudioConfigCache();
 
-    if (isOffloadedOrDirect_l() || mDoNotReconnect) {
+    if (!forceRestore &&
+        (isOffloadedOrDirect_l() || mDoNotReconnect)) {
         // FIXME re-creation of offloaded and direct tracks is not yet implemented;
-        // reconsider enabling for linear PCM encodings when position can be preserved.
+        // Disabled since (1) timestamp correction is not implemented for non-PCM and
+        // (2) We pre-empt existing direct tracks on resource constraint, so these tracks
+        // shouldn't reconnect.
         result = DEAD_OBJECT;
         return result;
     }
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 60b08fa..a71bb18 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -242,6 +242,7 @@
     legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
     legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
     legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+    legacy.mToken = aidl.mToken;
     return legacy;
 }
 
@@ -265,6 +266,7 @@
     aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
     aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
     aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+    aidl.mToken = legacy.mToken;
     return aidl;
 }
 
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 60bb4f0..68dba34 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -44,7 +44,15 @@
   ],
   "postsubmit": [
     {
-       "name": "audioeffect_analysis"
+      "name": "audioeffect_analysis"
+    },
+    {
+      "name": "CtsVirtualDevicesTestCases",
+      "options" : [
+        {
+          "include-filter": "android.virtualdevice.cts.VirtualAudioTest"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 9c4ccb8..e213f08 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -872,6 +872,18 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = 3,
           .repeatSegment = 0 },                              // TONE_NZ_CALL_WAITING
+        { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                             // TONE_MY_CONGESTION
+        { .segments = { { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+                        { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 }                              // TONE_MY_RINGTONE
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -976,6 +988,16 @@
             TONE_SUP_ERROR,               // TONE_SUP_ERROR
             TONE_NZ_CALL_WAITING,         // TONE_SUP_CALL_WAITING
             TONE_GB_RINGTONE              // TONE_SUP_RINGTONE
+        },
+        {   // MALAYSIA
+            TONE_SUP_DIAL,                // TONE_SUP_DIAL
+            TONE_SUP_BUSY,                // TONE_SUP_BUSY
+            TONE_MY_CONGESTION,           // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,           // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,      // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,               // TONE_SUP_ERROR
+            TONE_SUP_CALL_WAITING,        // TONE_SUP_CALL_WAITING
+            TONE_MY_RINGTONE              // TONE_SUP_RINGTONE
         }
 };
 
@@ -1055,6 +1077,8 @@
         mRegion = TAIWAN;
     } else if (strstr(value, "nz") != NULL) {
         mRegion = NZ;
+    } else if (strstr(value, "my") != NULL) {
+        mRegion = MY;
     } else {
         mRegion = CEPT;
     }
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 88b0450..f0c561c 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -39,4 +39,6 @@
     boolean allowPrivilegedMediaPlaybackCapture;
     /** Indicates if the caller can capture voice communication output */
     boolean voiceCommunicationCaptureAllowed;
+    /** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
+    IBinder mToken;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 52c8da0..633493c 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -263,6 +263,8 @@
 
     void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
 
+    List<AudioMix> getRegisteredPolicyMixes();
+
     void updatePolicyMixes(in AudioMixUpdate[] updates);
 
     void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 6093933..02c5a3f 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -1,4 +1,8 @@
 /*
+package {
+    default_team: "trendy_team_media_framework_audio",
+}
+
  * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -71,7 +75,7 @@
         "libbinder_headers",
         "libmedia_headers",
     ],
-     fuzz_config: {
+    fuzz_config: {
         cc: [
             "android-media-fuzzing-reports@google.com",
         ],
@@ -90,6 +94,6 @@
     srcs: ["audioflinger_aidl_fuzzer.cpp"],
     defaults: [
         "libaudioclient_aidl_fuzzer_defaults",
-        "service_fuzzer_defaults"
+        "service_fuzzer_defaults",
     ],
 }
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index fd3b0a8..f2ad91c 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index ec35e93..9e4ae54 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_AUDIO_POLICY_H
 #define ANDROID_AUDIO_POLICY_H
 
+#include <binder/IBinder.h>
 #include <binder/Parcel.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <system/audio.h>
@@ -127,6 +128,7 @@
     audio_devices_t mDeviceType;
     String8         mDeviceAddress;
     uint32_t        mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
+    sp<IBinder>     mToken;
     /** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
     bool            mAllowPrivilegedMediaPlaybackCapture = false;
     /** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index fcbb019..2505b11 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -58,11 +58,11 @@
      * @return {@code INVALID_SCORE} if not matching, {@code MATCH_ON_DEFAULT_SCORE} if matching
      * to default strategy, non zero positive score if matching a strategy.
      */
-    static int attributesMatchesScore(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes);
+    static int attributesMatchesScore(audio_attributes_t refAttributes,
+                                      audio_attributes_t clientAttritubes);
 
-    static bool attributesMatches(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes) {
+    static bool attributesMatches(audio_attributes_t refAttributes,
+                                  audio_attributes_t clientAttritubes) {
         return attributesMatchesScore(refAttributes, clientAttritubes) > 0;
     }
 
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index a1f7941..338534d 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,6 +19,7 @@
 
 #include <sys/types.h>
 
+#include <mutex>
 #include <set>
 #include <vector>
 
@@ -86,6 +87,7 @@
 typedef void (*routing_callback)();
 typedef void (*vol_range_init_req_callback)();
 
+class CaptureStateListenerImpl;
 class IAudioFlinger;
 class String8;
 
@@ -95,6 +97,13 @@
 
 class AudioSystem
 {
+    friend class AudioFlingerClient;
+    friend class AudioPolicyServiceClient;
+    friend class CaptureStateListenerImpl;
+    template <typename ServiceInterface, typename Client, typename AidlInterface,
+            typename ServiceTraits>
+    friend class ServiceHandler;
+
 public:
 
     // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
@@ -177,8 +186,8 @@
     static status_t setLocalAudioFlinger(const sp<IAudioFlinger>& af);
 
     // helper function to obtain AudioFlinger service handle
-    static const sp<IAudioFlinger> get_audio_flinger();
-    static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+    static sp<IAudioFlinger> get_audio_flinger();
+    static sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
@@ -402,7 +411,12 @@
     // and output configuration cache (gOutputs)
     static void clearAudioConfigCache();
 
-    static const sp<media::IAudioPolicyService> get_audio_policy_service();
+    // Sets a local AudioPolicyService interface to be used by AudioSystem.
+    // This is used by audioserver main() to allow client object initialization
+    // before exposing any interfaces to ServiceManager.
+    static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
+
+    static sp<media::IAudioPolicyService> get_audio_policy_service();
     static void clearAudioPolicyService();
 
     // helpers for android.media.AudioManager.getProperty(), see description there for meaning
@@ -462,6 +476,8 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
+    static status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes);
+
     static status_t updatePolicyMixes(
         const std::vector<
                 std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
@@ -774,23 +790,18 @@
 
     static int32_t getAAudioHardwareBurstMinUsec();
 
-private:
-
     class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
     {
     public:
-        AudioFlingerClient() :
-            mInBuffSize(0), mInSamplingRate(0),
-            mInFormat(AUDIO_FORMAT_DEFAULT), mInChannelMask(AUDIO_CHANNEL_NONE) {
-        }
+        AudioFlingerClient() = default;
 
-        void clearIoCache();
+        void clearIoCache() EXCLUDES(mMutex);
         status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
-                                    audio_channel_mask_t channelMask, size_t* buffSize);
-        sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+                audio_channel_mask_t channelMask, size_t* buffSize) EXCLUDES(mMutex);
+        sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle) EXCLUDES(mMutex);
 
         // DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
         // IAudioFlingerClient
 
@@ -798,61 +809,71 @@
         // values for output/input parameters up-to-date in client process
         binder::Status ioConfigChanged(
                 media::AudioIoConfigEvent event,
-                const media::AudioIoDescriptor& ioDesc) override;
+                const media::AudioIoDescriptor& ioDesc) final EXCLUDES(mMutex);
 
         binder::Status onSupportedLatencyModesChanged(
                 int output,
-                const std::vector<media::audio::common::AudioLatencyMode>& latencyModes) override;
+                const std::vector<media::audio::common::AudioLatencyMode>& latencyModes)
+                final EXCLUDES(mMutex);
 
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                               audio_io_handle_t audioIo,
-                                               audio_port_handle_t portId);
+                audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
         status_t removeAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                           audio_io_handle_t audioIo,
-                                           audio_port_handle_t portId);
+                audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
 
         status_t addSupportedLatencyModesCallback(
-                        const sp<SupportedLatencyModesCallback>& callback);
+                const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
         status_t removeSupportedLatencyModesCallback(
-                        const sp<SupportedLatencyModesCallback>& callback);
+                const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
 
-        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo) EXCLUDES(mMutex);
 
     private:
-        Mutex                               mLock;
-        DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> >   mIoDescriptors;
+        mutable std::mutex mMutex;
+        std::map<audio_io_handle_t, sp<AudioIoDescriptor>> mIoDescriptors GUARDED_BY(mMutex);
 
         std::map<audio_io_handle_t, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>>
-                mAudioDeviceCallbacks;
+                mAudioDeviceCallbacks GUARDED_BY(mMutex);
 
         std::vector<wp<SupportedLatencyModesCallback>>
-                mSupportedLatencyModesCallbacks GUARDED_BY(mLock);
+                mSupportedLatencyModesCallbacks GUARDED_BY(mMutex);
 
         // cached values for recording getInputBufferSize() queries
-        size_t                              mInBuffSize;    // zero indicates cache is invalid
-        uint32_t                            mInSamplingRate;
-        audio_format_t                      mInFormat;
-        audio_channel_mask_t                mInChannelMask;
-        sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle);
+        size_t mInBuffSize GUARDED_BY(mMutex) = 0; // zero indicates cache is invalid
+        uint32_t mInSamplingRate GUARDED_BY(mMutex) = 0;
+        audio_format_t mInFormat GUARDED_BY(mMutex) = AUDIO_FORMAT_DEFAULT;
+        audio_channel_mask_t mInChannelMask GUARDED_BY(mMutex) = AUDIO_CHANNEL_NONE;
+
+        sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle) REQUIRES(mMutex);
     };
 
     class AudioPolicyServiceClient: public IBinder::DeathRecipient,
-                                    public media::BnAudioPolicyServiceClient
-    {
+                                    public media::BnAudioPolicyServiceClient {
     public:
-        AudioPolicyServiceClient() {
+        AudioPolicyServiceClient() = default;
+
+        int addAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+        int removeAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+        bool isAudioPortCbEnabled() const EXCLUDES(mMutex) {
+            std::lock_guard _l(mMutex);
+            return !mAudioPortCallbacks.empty();
         }
 
-        int addAudioPortCallback(const sp<AudioPortCallback>& callback);
-        int removeAudioPortCallback(const sp<AudioPortCallback>& callback);
-        bool isAudioPortCbEnabled() const { return (mAudioPortCallbacks.size() != 0); }
+        int addAudioVolumeGroupCallback(
+                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
 
-        int addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
-        int removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
-        bool isAudioVolumeGroupCbEnabled() const { return (mAudioVolumeGroupCallback.size() != 0); }
+        int removeAudioVolumeGroupCallback(
+                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
+
+        bool isAudioVolumeGroupCbEnabled() const EXCLUDES(mMutex) {
+            std::lock_guard _l(mMutex);
+            return !mAudioVolumeGroupCallbacks.empty();
+        }
 
         // DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
         // IAudioPolicyServiceClient
         binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
@@ -873,43 +894,36 @@
         binder::Status onVolumeRangeInitRequest();
 
     private:
-        Mutex                               mLock;
-        Vector <sp <AudioPortCallback> >    mAudioPortCallbacks;
-        Vector <sp <AudioVolumeGroupCallback> > mAudioVolumeGroupCallback;
+        mutable std::mutex mMutex;
+        std::set<sp<AudioPortCallback>> mAudioPortCallbacks GUARDED_BY(mMutex);
+        std::set<sp<AudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks GUARDED_BY(mMutex);
     };
 
+    private:
+
     static audio_io_handle_t getOutput(audio_stream_type_t stream);
-    static const sp<AudioFlingerClient> getAudioFlingerClient();
+    static sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
-    static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
 
     // Invokes all registered error callbacks with the given error code.
     static void reportError(status_t err);
 
-    static sp<AudioFlingerClient> gAudioFlingerClient;
-    static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
-    friend class AudioFlingerClient;
-    friend class AudioPolicyServiceClient;
+    [[clang::no_destroy]] static std::mutex gMutex;
+    static dynamic_policy_callback gDynPolicyCallback GUARDED_BY(gMutex);
+    static record_config_callback gRecordConfigCallback GUARDED_BY(gMutex);
+    static routing_callback gRoutingCallback GUARDED_BY(gMutex);
+    static vol_range_init_req_callback gVolRangeInitReqCallback GUARDED_BY(gMutex);
 
-    static Mutex gLock;      // protects gAudioFlinger
-    static Mutex gLockErrorCallbacks;      // protects gAudioErrorCallbacks
-    static Mutex gLockAPS;   // protects gAudioPolicyService and gAudioPolicyServiceClient
-    static sp<IAudioFlinger> gAudioFlinger;
-    static std::set<audio_error_callback> gAudioErrorCallbacks;
-    static dynamic_policy_callback gDynPolicyCallback;
-    static record_config_callback gRecordConfigCallback;
-    static routing_callback gRoutingCallback;
-    static vol_range_init_req_callback gVolRangeInitReqCallback;
+    [[clang::no_destroy]] static std::mutex gApsCallbackMutex;
+    [[clang::no_destroy]] static std::mutex gErrorCallbacksMutex;
+    [[clang::no_destroy]] static std::set<audio_error_callback> gAudioErrorCallbacks
+            GUARDED_BY(gErrorCallbacksMutex);
 
-    static size_t gInBuffSize;
-    // previous parameters for recording buffer size queries
-    static uint32_t gPrevInSamplingRate;
-    static audio_format_t gPrevInFormat;
-    static audio_channel_mask_t gPrevInChannelMask;
-
-    static sp<media::IAudioPolicyService> gAudioPolicyService;
+    [[clang::no_destroy]] static std::mutex gSoundTriggerMutex;
+    [[clang::no_destroy]] static sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener
+            GUARDED_BY(gSoundTriggerMutex);
 };
 
-};  // namespace android
+}  // namespace android
 
 #endif  /*ANDROID_AUDIOSYSTEM_H_*/
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 523383f..19780ae 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1220,7 +1220,7 @@
             void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
-            status_t restoreTrack_l(const char *from);
+            status_t restoreTrack_l(const char *from, bool forceRestore = false);
 
             uint32_t    getUnderrunCount_l() const;
 
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 46e9501..3e515fc 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,11 +225,14 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
-         // TAIWAN supervisory tones
+        // TAIWAN supervisory tones
         TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
-         // NEW ZEALAND supervisory tones
+        // NEW ZEALAND supervisory tones
         TONE_NZ_CALL_WAITING,       // Call waiting tone: 400 Hz,  0.2s ON, 3s OFF,
                                     //        0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
+        // MALAYSIA supervisory tones
+        TONE_MY_CONGESTION,         // Congestion tone: 425 Hz, 500ms ON, 250ms OFF...
+        TONE_MY_RINGTONE,           // Ring tone: 425 Hz, 400ms ON 200ms OFF 400ms ON 2s OFF..
         NUM_ALTERNATE_TONES
     };
 
@@ -244,6 +247,7 @@
         INDIA,
         TAIWAN,
         NZ,
+        MY,
         CEPT,
         NUM_REGIONS
     };
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index f72ac89..b667c8d 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -133,6 +134,10 @@
         "libaudiopolicy",
     ],
     data: ["bbb*.raw"],
+    srcs: [
+        "audio_test_utils.cpp",
+        "test_execution_tracer.cpp",
+    ],
     test_config_template: "audio_test_template.xml",
 }
 
@@ -141,7 +146,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiorecord_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -150,7 +154,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiotrack_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -159,7 +162,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audioeffect_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -172,7 +174,6 @@
     ],
     srcs: [
         "audioeffect_analyser.cpp",
-        "audio_test_utils.cpp",
     ],
     static_libs: [
         "libpffft",
@@ -184,7 +185,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiorouting_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -193,14 +193,15 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audioclient_serialization_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
 cc_test {
     name: "trackplayerbase_tests",
     defaults: ["libaudioclient_gtests_defaults"],
-    srcs: ["trackplayerbase_tests.cpp"],
+    srcs: [
+        "trackplayerbase_tests.cpp",
+    ],
 }
 
 cc_test {
@@ -208,6 +209,5 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiosystem_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index ee5489b..9a202cc3 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -565,12 +565,14 @@
     int attempts = 5;
     status_t status;
     unsigned int generation1, generation;
-    unsigned int numPorts = 0;
+    unsigned int numPorts;
     do {
         if (attempts-- < 0) {
             status = TIMED_OUT;
             break;
         }
+        // query for number of ports.
+        numPorts = 0;
         status = AudioSystem::listAudioPorts(AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_NONE, &numPorts,
                                              nullptr, &generation1);
         if (status != NO_ERROR) {
@@ -622,12 +624,14 @@
     int attempts = 5;
     status_t status;
     unsigned int generation1, generation;
-    unsigned int numPatches = 0;
+    unsigned int numPatches;
     do {
         if (attempts-- < 0) {
             status = TIMED_OUT;
             break;
         }
+        // query for number of patches.
+        numPatches = 0;
         status = AudioSystem::listAudioPatches(&numPatches, nullptr, &generation1);
         if (status != NO_ERROR) {
             ALOGE("AudioSystem::listAudioPatches returned error %d", status);
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index 707b9b3..5debabc 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -15,18 +15,23 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "AudioClientSerializationUnitTests"
+#define LOG_TAG "AudioClientSerializationTests"
 
 #include <cstdint>
 #include <cstdlib>
 #include <ctime>
-
-#include <gtest/gtest.h>
+#include <vector>
 
 #include <android_audio_policy_configuration_V7_0-enums.h>
+#include <gtest/gtest.h>
+#include <media/AudioPolicy.h>
+#include <media/AudioProductStrategy.h>
+#include <media/AudioVolumeGroup.h>
+#include <media/VolumeGroupAttributes.h>
+#include <system/audio.h>
 #include <xsdc/XsdcSupport.h>
 
-#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 namespace xsd {
@@ -310,3 +315,9 @@
 // audioStream
 INSTANTIATE_TEST_SUITE_P(SerializationParameterizedTests, AudioAttributesParameterizedTest,
                          ::testing::Combine(testing::ValuesIn(kStreamtypes)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 94accae..f4d37bc 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -14,23 +14,26 @@
  * limitations under the License.
  */
 
-// #define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectAnalyser"
-
-#include <android-base/file.h>
-#include <android-base/stringprintf.h>
-#include <gtest/gtest.h>
-#include <media/AudioEffect.h>
-#include <system/audio_effects/effect_bassboost.h>
-#include <system/audio_effects/effect_equalizer.h>
 #include <fstream>
 #include <iostream>
 #include <string>
 #include <tuple>
 #include <vector>
 
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioEffectAnalyser"
+
+#include <android-base/file.h>
+#include <android-base/stringprintf.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <media/AudioEffect.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_equalizer.h>
+
 #include "audio_test_utils.h"
 #include "pffft.hpp"
+#include "test_execution_tracer.h"
 
 #define CHECK_OK(expr, msg) \
     mStatus = (expr);       \
@@ -417,3 +420,10 @@
         prevGain = diffB;
     }
 }
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index 26121cd..59d0c6a 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -15,8 +15,9 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectUnitTests"
+#define LOG_TAG "AudioEffectTests"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 #include <media/AudioEffect.h>
 #include <system/audio_effects/effect_hapticgenerator.h>
@@ -24,6 +25,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
@@ -80,7 +82,7 @@
     uint32_t numEffects = AudioEffect::kMaxPreProcessing;
     status_t ret = AudioEffect::queryDefaultPreProcessing(audioRecord->getSessionId(), descriptors,
                                                           &numEffects);
-    if (ret != OK) {
+    if (ret != OK || numEffects > AudioEffect::kMaxPreProcessing) {
         return false;
     }
     for (int i = 0; i < numEffects; i++) {
@@ -247,6 +249,7 @@
             ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
             EXPECT_EQ(NO_ERROR, capture->create());
             EXPECT_EQ(NO_ERROR, capture->start());
+            ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
             if (!isEffectDefaultOnRecord(&descriptors[i].type, &descriptors[i].uuid,
                                          capture->getAudioRecordHandle())) {
                 selectedEffect = i;
@@ -265,6 +268,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
@@ -287,6 +291,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_TRUE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                         capture->getAudioRecordHandle()))
             << "Effect should have been default on record. " << type;
@@ -304,6 +309,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
@@ -559,3 +565,10 @@
     EXPECT_TRUE(cb->receivedFramesProcessed)
             << "AudioEffect frames processed callback not received";
 }
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 8c63a6d..0bf2e82 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -14,43 +14,56 @@
  * limitations under the License.
  */
 
+#include <sstream>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AudioRecordTest"
 
+#include <android-base/logging.h>
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
 class AudioRecordTest : public ::testing::Test {
   public:
-    virtual void SetUp() override {
+    void SetUp() override {
         mAC = new AudioCapture(AUDIO_SOURCE_DEFAULT, 44100, AUDIO_FORMAT_PCM_16_BIT,
                                AUDIO_CHANNEL_IN_FRONT);
         ASSERT_NE(nullptr, mAC);
         ASSERT_EQ(OK, mAC->create()) << "record creation failed";
     }
 
-    virtual void TearDown() override {
+    void TearDown() override {
         if (mAC) ASSERT_EQ(OK, mAC->stop());
     }
 
     sp<AudioCapture> mAC;
 };
 
-class AudioRecordCreateTest
-    : public ::testing::TestWithParam<
-              std::tuple<uint32_t, audio_format_t, audio_channel_mask_t, audio_input_flags_t,
-                         audio_session_t, audio_source_t>> {
+using RecordCreateTestParam = std::tuple<uint32_t, audio_format_t, audio_channel_mask_t,
+                                         audio_input_flags_t, audio_session_t, audio_source_t>;
+enum {
+    RECORD_PARAM_SAMPLE_RATE,
+    RECORD_PARAM_FORMAT,
+    RECORD_PARAM_CHANNEL_MASK,
+    RECORD_PARAM_FLAGS,
+    RECORD_PARAM_SESSION_ID,
+    RECORD_PARAM_INPUT_SOURCE
+};
+
+class AudioRecordCreateTest : public ::testing::TestWithParam<RecordCreateTestParam> {
   public:
     AudioRecordCreateTest()
-        : mSampleRate(std::get<0>(GetParam())),
-          mFormat(std::get<1>(GetParam())),
-          mChannelMask(std::get<2>(GetParam())),
-          mFlags(std::get<3>(GetParam())),
-          mSessionId(std::get<4>(GetParam())),
-          mInputSource(std::get<5>(GetParam())){};
+        : mSampleRate(std::get<RECORD_PARAM_SAMPLE_RATE>(GetParam())),
+          mFormat(std::get<RECORD_PARAM_FORMAT>(GetParam())),
+          mChannelMask(std::get<RECORD_PARAM_CHANNEL_MASK>(GetParam())),
+          mFlags(std::get<RECORD_PARAM_FLAGS>(GetParam())),
+          mSessionId(std::get<RECORD_PARAM_SESSION_ID>(GetParam())),
+          mInputSource(std::get<RECORD_PARAM_INPUT_SOURCE>(GetParam())){};
 
     const uint32_t mSampleRate;
     const audio_format_t mFormat;
@@ -62,14 +75,14 @@
 
     sp<AudioCapture> mAC;
 
-    virtual void SetUp() override {
+    void SetUp() override {
         mAC = new AudioCapture(mInputSource, mSampleRate, mFormat, mChannelMask, mFlags, mSessionId,
                                mTransferType);
         ASSERT_NE(nullptr, mAC);
         ASSERT_EQ(OK, mAC->create()) << "record creation failed";
     }
 
-    virtual void TearDown() override {
+    void TearDown() override {
         if (mAC) ASSERT_EQ(OK, mAC->stop());
     }
 };
@@ -197,6 +210,18 @@
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
 }
 
+static std::string GetRecordTestName(const testing::TestParamInfo<RecordCreateTestParam>& info) {
+    const auto& p = info.param;
+    std::ostringstream s;
+    s << std::get<RECORD_PARAM_SAMPLE_RATE>(p) << "_"
+      << audio_format_to_string(std::get<RECORD_PARAM_FORMAT>(p)) << "__"
+      << audio_channel_mask_to_string(std::get<RECORD_PARAM_CHANNEL_MASK>(p)) << "__"
+      << "Flags_0x" << std::hex << std::get<RECORD_PARAM_FLAGS>(p) << std::dec << "__"
+      << "Session_" << std::get<RECORD_PARAM_SESSION_ID>(p) << "__"
+      << audio_source_to_string(std::get<RECORD_PARAM_INPUT_SOURCE>(p));
+    return s.str();
+}
+
 // for port primary input
 INSTANTIATE_TEST_SUITE_P(AudioRecordPrimaryInput, AudioRecordCreateTest,
                          ::testing::Combine(::testing::Values(8000, 11025, 12000, 16000, 22050,
@@ -207,7 +232,8 @@
                                                               AUDIO_CHANNEL_IN_FRONT_BACK),
                                             ::testing::Values(AUDIO_INPUT_FLAG_NONE),
                                             ::testing::Values(AUDIO_SESSION_NONE),
-                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+                         GetRecordTestName);
 
 // for port fast input
 INSTANTIATE_TEST_SUITE_P(AudioRecordFastInput, AudioRecordCreateTest,
@@ -219,7 +245,8 @@
                                                               AUDIO_CHANNEL_IN_FRONT_BACK),
                                             ::testing::Values(AUDIO_INPUT_FLAG_FAST),
                                             ::testing::Values(AUDIO_SESSION_NONE),
-                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+                         GetRecordTestName);
 
 // misc
 INSTANTIATE_TEST_SUITE_P(AudioRecordMiscInput, AudioRecordCreateTest,
@@ -232,4 +259,15 @@
                                                               AUDIO_SOURCE_CAMCORDER,
                                                               AUDIO_SOURCE_VOICE_RECOGNITION,
                                                               AUDIO_SOURCE_VOICE_COMMUNICATION,
-                                                              AUDIO_SOURCE_UNPROCESSED)));
+                                                              AUDIO_SOURCE_UNPROCESSED)),
+                         GetRecordTestName);
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+    // This is for death handlers instantiated by the framework code.
+    android::ProcessState::self()->setThreadPoolMaxThreadCount(1);
+    android::ProcessState::self()->startThreadPool();
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index c101f00..3b2285e 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -19,11 +19,13 @@
 
 #include <string.h>
 
+#include <binder/Binder.h>
 #include <binder/ProcessState.h>
 #include <cutils/properties.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
@@ -150,6 +152,7 @@
         config.sample_rate = 48000;
         AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
         mix.mDeviceType = deviceType;
+        mix.mToken = sp<BBinder>::make();
         mMixes.push(mix);
         if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
             mPolicyMixRegistered = true;
@@ -267,21 +270,6 @@
     playback->stop();
 }
 
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
-  public:
-    void OnTestStart(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Started", test_info);
-    }
-    void OnTestEnd(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Completed", test_info);
-    }
-
-  private:
-    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
-        ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
-    }
-};
-
 int main(int argc, char** argv) {
     android::ProcessState::self()->startThreadPool();
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index d9789f1..03c15f4 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -14,18 +14,19 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioSystemTest"
-
 #include <string.h>
 
 #include <set>
 
+#define LOG_TAG "AudioSystemTest"
+
 #include <gtest/gtest.h>
 #include <log/log.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/IAudioFlinger.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioDeviceDescription;
@@ -706,21 +707,6 @@
     }
 }
 
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
-  public:
-    void OnTestStart(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Started", test_info);
-    }
-    void OnTestEnd(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Completed", test_info);
-    }
-
-  private:
-    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
-        ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
-    }
-};
-
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
     ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index 2b68225..0282bd7 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -15,10 +15,13 @@
  */
 
 //#define LOG_NDEBUG 0
+#define LOG_TAG "AudioTrackTests"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
@@ -209,3 +212,10 @@
                                              AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_FAST,
                                              AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
                            ::testing::Values(AUDIO_SESSION_NONE)));
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.cpp b/media/libaudioclient/tests/test_execution_tracer.cpp
new file mode 100644
index 0000000..797bb4b
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TestExecutionTracer"
+
+#include "test_execution_tracer.h"
+
+#include <android-base/logging.h>
+
+void TestExecutionTracer::OnTestStart(const ::testing::TestInfo& test_info) {
+    TraceTestState("Started", test_info);
+}
+
+void TestExecutionTracer::OnTestEnd(const ::testing::TestInfo& test_info) {
+    TraceTestState("Finished", test_info);
+}
+
+void TestExecutionTracer::OnTestPartResult(const ::testing::TestPartResult& result) {
+    if (result.failed()) {
+        LOG(ERROR) << result;
+    } else {
+        LOG(INFO) << result;
+    }
+}
+
+// static
+void TestExecutionTracer::TraceTestState(const std::string& state,
+                                         const ::testing::TestInfo& test_info) {
+    LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.h b/media/libaudioclient/tests/test_execution_tracer.h
new file mode 100644
index 0000000..9031aaf
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gtest/gtest.h>
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+  public:
+    void OnTestStart(const ::testing::TestInfo& test_info) override;
+    void OnTestEnd(const ::testing::TestInfo& test_info) override;
+    void OnTestPartResult(const ::testing::TestPartResult& result) override;
+
+  private:
+    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info);
+};
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
index c9b704d..7317bf0 100644
--- a/media/libaudioclient/tests/trackplayerbase_tests.cpp
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -16,10 +16,12 @@
 
 #define LOG_TAG "TrackPlayerBaseTest"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
-
 #include <media/TrackPlayerBase.h>
 
+#include "test_execution_tracer.h"
+
 using namespace android;
 using namespace android::media;
 
@@ -159,3 +161,10 @@
 
 INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, PauseTestParam,
                          ::testing::Values(std::make_tuple(1.0, 75.0, 2, 24000)));
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index ae0457f..6dbf284 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -192,7 +192,8 @@
                 dst->append(
                         base::StringPrintf("%*s extra audio descriptor %zu:\n", eadSpaces, "", i));
                 dst->append(base::StringPrintf(
-                    "%*s- standard: %u\n", descSpaces, "", mExtraAudioDescriptors[i].standard));
+                        "%*s- standard: %u\n", descSpaces, "",
+                        static_cast<unsigned>(mExtraAudioDescriptors[i].standard)));
                 dst->append(base::StringPrintf("%*s- descriptor:", descSpaces, ""));
                 for (auto v : mExtraAudioDescriptors[i].audioDescriptor) {
                     dst->append(base::StringPrintf(" %02x", v));
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 3c05b0b..b8d0998 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -23,7 +23,6 @@
     ],
 
     required: [
-        "libaudiohal@4.0",
         "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index f88915d..15cb297 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -51,13 +51,11 @@
  * media/java/android/media/AudioHalVersionInfo.java.
  */
 static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
-    // TODO: remove this comment to get AIDL
-    // AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
-    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 4, 0),
 };
 
 static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index fb1cc34..4d81f77 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -41,7 +41,7 @@
     ],
     header_libs: [
         "android.hardware.audio.common.util@all-versions",
-    ]
+    ],
 }
 
 cc_defaults {
@@ -71,7 +71,7 @@
     ],
     header_libs: [
         "libaudioclient_headers",
-        "libaudiohal_headers"
+        "libaudiohal_headers",
     ],
     defaults: [
         "latest_android_media_audio_common_types_cpp_export_shared",
@@ -83,36 +83,10 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@4.0",
-    defaults: [
-        "libaudiohal_default",
-        "libaudiohal_hidl_default"
-    ],
-    srcs: [
-        ":audio_core_hal_client_sources",
-        ":audio_effect_hidl_hal_client_sources",
-        "EffectsFactoryHalEntry.cpp",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common@4.0",
-        "android.hardware.audio.common@4.0-util",
-        "android.hardware.audio.effect@4.0",
-        "android.hardware.audio.effect@4.0-util",
-        "android.hardware.audio@4.0",
-        "android.hardware.audio@4.0-util",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=4",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ]
-}
-
-cc_library_shared {
     name: "libaudiohal@5.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -131,14 +105,14 @@
         "-DMAJOR_VERSION=5",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -157,14 +131,14 @@
         "-DMAJOR_VERSION=6",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_static {
     name: "libaudiohal.effect@7.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_effect_hidl_hal_client_sources",
@@ -179,14 +153,14 @@
         "-DMAJOR_VERSION=7",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
     name: "libaudiohal@7.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -206,7 +180,7 @@
         "-DMAJOR_VERSION=7",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
@@ -215,7 +189,7 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_sounddose_ndk_shared",
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -242,7 +216,7 @@
         "-DCOMMON_TYPES_MINOR_VERSION=0",
         "-DCORE_TYPES_MINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_defaults {
@@ -257,7 +231,7 @@
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "av-audio-types-aidl-ndk",
+        "av-audio-types-aidl-V1-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
@@ -287,9 +261,30 @@
     ],
     srcs: [
         "DevicesFactoryHalEntry.cpp",
+        "EffectsFactoryHalEntry.cpp",
+        ":audio_effect_hal_aidl_src_files",
+        ":core_audio_hal_aidl_src_files",
+    ],
+}
+
+filegroup {
+    name: "core_audio_hal_aidl_src_files",
+    srcs: [
+        "ConversionHelperAidl.cpp",
+        "DeviceHalAidl.cpp",
+        "DevicesFactoryHalAidl.cpp",
+        "Hal2AidlMapper.cpp",
+        "StreamHalAidl.cpp",
+    ],
+}
+
+filegroup {
+    name: "audio_effect_hal_aidl_src_files",
+    srcs: [
         "EffectConversionHelperAidl.cpp",
         "EffectBufferHalAidl.cpp",
         "EffectHalAidl.cpp",
+        "EffectsFactoryHalAidl.cpp",
         "effectsAidlConversion/AidlConversionAec.cpp",
         "effectsAidlConversion/AidlConversionAgc1.cpp",
         "effectsAidlConversion/AidlConversionAgc2.cpp",
@@ -306,21 +301,7 @@
         "effectsAidlConversion/AidlConversionVendorExtension.cpp",
         "effectsAidlConversion/AidlConversionVirtualizer.cpp",
         "effectsAidlConversion/AidlConversionVisualizer.cpp",
-        "EffectsFactoryHalAidl.cpp",
-        "EffectsFactoryHalEntry.cpp",
         ":audio_effectproxy_src_files",
-        ":core_audio_hal_aidl_src_files",
-    ],
-}
-
-filegroup {
-    name: "core_audio_hal_aidl_src_files",
-    srcs: [
-        "ConversionHelperAidl.cpp",
-        "DeviceHalAidl.cpp",
-        "DevicesFactoryHalAidl.cpp",
-        "Hal2AidlMapper.cpp",
-        "StreamHalAidl.cpp",
     ],
 }
 
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.cpp b/media/libaudiohal/impl/ConversionHelperAidl.cpp
index 46abfda..7a32811 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperAidl.cpp
@@ -37,10 +37,6 @@
     using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
     if (parameterKeys.size() == 0) return OK;
     const String8 rawKeys = parameterKeys.keysToString();
-    if (vendorExt == nullptr) {
-        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeys.c_str());
-        return OK;
-    }
 
     std::vector<std::string> parameterIds;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameterIds(
@@ -85,10 +81,6 @@
     using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
     if (parameters.size() == 0) return OK;
     const String8 rawKeysAndValues = parameters.toString();
-    if (vendorExt == nullptr) {
-        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeysAndValues.c_str());
-        return OK;
-    }
 
     std::vector<VendorParameter> syncParameters, asyncParameters;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameters(
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 86fa63f..14765f6b 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -274,15 +274,16 @@
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
-status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
+status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
     if (config == nullptr || size == nullptr) {
         return BAD_VALUE;
     }
+    constexpr bool isInput = true;
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
     AudioDevice aidlDevice;
     aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
     AudioSource aidlSource = AudioSource::DEFAULT;
@@ -296,6 +297,9 @@
                         0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
                         &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+    if (mixPortConfig.id == 0) return BAD_VALUE;  // HAL suggests a different config.
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
@@ -401,8 +405,7 @@
                       *static_cast<StreamCallbackBase*>(this)),
               StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>(
                       *static_cast<StreamCallbackBase*>(this)) {}
-    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& in_audioMetadata) override {
-        std::basic_string<uint8_t> halMetadata(in_audioMetadata.begin(), in_audioMetadata.end());
+    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& halMetadata) override {
         return StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::runCb(
                 [&halMetadata](auto cb) { cb->onCodecFormatChanged(halMetadata); });
     }
@@ -914,15 +917,39 @@
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
-    // Call `setConnectedState` instead.
-    // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
-    RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
-    std::lock_guard l(mLock);
-    mDeviceDisconnectionNotified.insert(port->id);
-    // Return that there was no error as otherwise the disconnection procedure will not be
-    // considered complete for upper layers, and 'setConnectedState' will not be called again
-    return OK;
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    const bool isInput = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::portDirection(port->role, port->type)) ==
+                    ::aidl::android::AudioPortDirection::INPUT;
+    AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+              __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    status_t status = NO_ERROR;
+    {
+        std::lock_guard l(mLock);
+        status = mMapper.prepareToDisconnectExternalDevice(aidlPort);
+    }
+    if (status == UNKNOWN_TRANSACTION) {
+        // If there is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+        // Call `setConnectedState` instead.
+        RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
+        std::lock_guard l(mLock);
+        mDeviceDisconnectionNotified.insert(port->id);
+        // Return that there was no error as otherwise the disconnection procedure will not be
+        // considered complete for upper layers, and 'setConnectedState' will not be called again
+        return OK;
+    } else {
+        return status;
+    }
 }
 
 status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
@@ -936,11 +963,10 @@
         std::lock_guard l(mLock);
         if (mDeviceDisconnectionNotified.erase(port->id) > 0) {
             // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
-            // and then call `setConnectedState`. However, there is no API for
-            // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
-            // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
-            // previous call is successful. Also remove the cache here to avoid a large cache after
-            // a long run.
+            // and then call `setConnectedState`. If `prepareToDisconnectExternalDevice` doesn't
+            // exit, `setConnectedState` will be called when calling
+            // `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous call is
+            // successful. Also remove the cache here to avoid a large cache after a long run.
             return OK;
         }
     }
@@ -962,7 +988,7 @@
     if (mModule == nullptr) return NO_INIT;
     {
         std::lock_guard l(mLock);
-        mMapper.resetUnusedPatchesAndPortConfigs();
+        mMapper.resetUnusedPatchesPortConfigsAndPorts();
     }
     ModuleDebug debug{ .simulateDeviceConnections = enabled };
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
@@ -1029,15 +1055,11 @@
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyReconfigA2dp),
                     [&](const String8& value) -> status_t {
-                        if (mVendorExt != nullptr) {
-                            std::vector<VendorParameter> result;
-                            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                    mVendorExt->parseBluetoothA2dpReconfigureOffload(
-                                            std::string(value.c_str()), &result)));
-                            reconfigureOffload = std::move(result);
-                        } else {
-                            reconfigureOffload = std::vector<VendorParameter>();
-                        }
+                        std::vector<VendorParameter> result;
+                        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mVendorExt->parseBluetoothA2dpReconfigureOffload(
+                                        std::string(value.c_str()), &result)));
+                        reconfigureOffload = std::move(result);
                         return OK;
                     }));
     if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index f705db7..d925b46 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -113,7 +113,7 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e8e1f46..478e0f0 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -236,7 +236,7 @@
 }
 
 status_t DeviceHalHidl::getInputBufferSize(
-        const struct audio_config *config, size_t *size) {
+        struct audio_config *config, size_t *size) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 7a712df..1362dab 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -67,7 +67,7 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 96a3e60..347afa6 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -17,13 +17,16 @@
 #include <algorithm>
 #include <map>
 #include <memory>
+#include <mutex>
 #include <string>
 
 #define LOG_TAG "DevicesFactoryHalAidl"
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/core/IModule.h>
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
 #include <android/binder_manager.h>
+#include <cutils/properties.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <utils/Log.h>
@@ -35,6 +38,7 @@
 using aidl::android::hardware::audio::core::IConfig;
 using aidl::android::hardware::audio::core::IModule;
 using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::hardware::audio::core::VendorParameter;
 using aidl::android::media::audio::common::AudioHalEngineConfig;
 using aidl::android::media::audio::IHalAdapterVendorExtension;
 using android::detail::AudioHalVersionInfo;
@@ -62,10 +66,84 @@
     return cpp;
 }
 
+class HalAdapterVendorExtensionWrapper :
+            public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+  private:
+    template<typename F>
+    ndk::ScopedAStatus callWithRetryOnCrash(F method) {
+        ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+        for (auto service = getService(); service != nullptr; service = getService(true)) {
+            status = method(service);
+            if (status.getStatus() != STATUS_DEAD_OBJECT) break;
+        }
+        return status;
+    }
+
+    ndk::ScopedAStatus parseVendorParameterIds(ParameterScope in_scope,
+                                               const std::string& in_rawKeys,
+                                               std::vector<std::string>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseVendorParameterIds(in_scope, in_rawKeys, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus parseVendorParameters(
+            ParameterScope in_scope, const std::string& in_rawKeysAndValues,
+            std::vector<VendorParameter>* out_syncParameters,
+            std::vector<VendorParameter>* out_asyncParameters) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseVendorParameters(in_scope, in_rawKeysAndValues,
+                    out_syncParameters, out_asyncParameters);
+        });
+    }
+
+    ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+            const std::string& in_rawValue, std::vector<VendorParameter>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseBluetoothA2dpReconfigureOffload(in_rawValue, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(const std::string& in_rawValue,
+            std::vector<VendorParameter>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseBluetoothLeReconfigureOffload(in_rawValue, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus processVendorParameters(ParameterScope in_scope,
+                                               const std::vector<VendorParameter>& in_parameters,
+                                               std::string* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->processVendorParameters(in_scope, in_parameters, _aidl_return);
+        });
+    }
+
+    std::shared_ptr<IHalAdapterVendorExtension> getService(bool reset = false) {
+        std::lock_guard l(mLock);
+        if (reset || !mVendorExt.has_value()) {
+            if (property_get_bool("ro.audio.ihaladaptervendorextension_enabled", false)) {
+                auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
+                mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
+                        IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
+                                        AServiceManager_waitForService(serviceName.c_str()))));
+            } else {
+                mVendorExt = nullptr;
+            }
+        }
+        return mVendorExt.value();
+    }
+
+    std::mutex mLock;
+    std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
+            mVendorExt GUARDED_BY(mLock);
+};
+
 }  // namespace
 
 DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
-    : mConfig(std::move(config)) {
+        : mConfig(std::move(config)),
+          mVendorExt(ndk::SharedRefBase::make<HalAdapterVendorExtensionWrapper>()) {
 }
 
 status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
@@ -110,19 +188,10 @@
         ALOGE("%s fromBinder %s failed", __func__, serviceName.c_str());
         return NO_INIT;
     }
-    *device = sp<DeviceHalAidl>::make(name, service, getVendorExtension());
+    *device = sp<DeviceHalAidl>::make(name, service, mVendorExt);
     return OK;
 }
 
-status_t DevicesFactoryHalAidl::getHalPids(std::vector<pid_t> *pids) {
-    if (pids == nullptr) {
-        return BAD_VALUE;
-    }
-    // Retrieval of HAL pids requires "list services" permission which is not granted
-    // to the audio server. This job is performed by AudioService (in Java) instead.
-    return PERMISSION_DENIED;
-}
-
 status_t DevicesFactoryHalAidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
     // Dynamic registration of module instances is not supported. The functionality
     // in the audio server which is related to this callback can be removed together
@@ -158,20 +227,6 @@
     return OK;
 }
 
-std::shared_ptr<IHalAdapterVendorExtension> DevicesFactoryHalAidl::getVendorExtension() {
-    if (!mVendorExt.has_value()) {
-        auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
-        if (AServiceManager_isDeclared(serviceName.c_str())) {
-            mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
-                    IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
-                                    AServiceManager_waitForService(serviceName.c_str()))));
-        } else {
-            mVendorExt = nullptr;
-        }
-    }
-    return mVendorExt.value();
-}
-
 // Main entry-point to the shared library.
 extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
     auto serviceName = std::string(IConfig::descriptor) + "/default";
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 97e3796..2a3a9e7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -35,8 +35,6 @@
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
 
-    status_t getHalPids(std::vector<pid_t> *pids) override;
-
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
@@ -47,10 +45,7 @@
 
   private:
     const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
-    std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
-            mVendorExt;
-
-    std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> getVendorExtension();
+    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
 
     ~DevicesFactoryHalAidl() = default;
 };
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index eef60b5..1cac9da 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -163,29 +163,6 @@
     return BAD_VALUE;
 }
 
-status_t DevicesFactoryHalHidl::getHalPids(std::vector<pid_t> *pids) {
-    std::set<pid_t> pidsSet;
-    auto factories = copyDeviceFactories();
-    for (const auto& factory : factories) {
-        using ::android::hidl::base::V1_0::DebugInfo;
-
-        DebugInfo debugInfo;
-        auto ret = factory->getDebugInfo([&] (const auto &info) {
-               debugInfo = info;
-            });
-        if (!ret.isOk()) {
-           return INVALID_OPERATION;
-        }
-        if (debugInfo.pid == (int)IServiceManager::PidConstant::NO_PID) {
-            continue;
-        }
-        pidsSet.insert(debugInfo.pid);
-    }
-
-    *pids = {pidsSet.begin(), pidsSet.end()};
-    return NO_ERROR;
-}
-
 status_t DevicesFactoryHalHidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
     ALOG_ASSERT(callback != nullptr);
     bool needToCallCallback = false;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 3285af7..e38d86d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -43,8 +43,6 @@
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
 
-    status_t getHalPids(std::vector<pid_t> *pids) override;
-
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
index a701852..33fe3ed 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -58,25 +58,14 @@
 }
 
 EffectBufferHalAidl::~EffectBufferHalAidl() {
+    if (mAudioBuffer.raw) free(mAudioBuffer.raw);
 }
 
 status_t EffectBufferHalAidl::init() {
-    int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
-    if (fd < 0) {
-        ALOGE("%s create ashmem failed %d", __func__, fd);
-        return fd;
+    if (0 != posix_memalign(&mAudioBuffer.raw, 32, mBufferSize)) {
+        return NO_MEMORY;
     }
 
-    ScopedFileDescriptor tempFd(fd);
-    mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
-                            MAP_SHARED, fd, 0 /* offset */);
-    if (mAudioBuffer.raw == MAP_FAILED) {
-        ALOGE("mmap failed for fd %d", fd);
-        mAudioBuffer.raw = nullptr;
-        return INVALID_OPERATION;
-    }
-
-    mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
index 035314b..cf6031f 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -50,7 +50,6 @@
     const size_t mBufferSize;
     bool mFrameCountChanged;
     void* mExternalData;
-    aidl::android::hardware::common::Ashmem mMemory;
     audio_buffer_t mAudioBuffer;
 
     // Can not be constructed directly by clients.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 196b432..e1a82f8 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -180,18 +180,6 @@
 
     State state;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
-    // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
-    if (state != State::INIT && mCommon != common) {
-        ALOGI("%s at state %s, common parameter change from %s to %s, closing effect", __func__,
-              android::internal::ToString(state).c_str(), mCommon.toString().c_str(),
-              common.toString().c_str());
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
-        mStatusQ.reset();
-        mInputQ.reset();
-        mOutputQ.reset();
-    }
-
     if (state == State::INIT) {
         ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
               android::internal::ToString(state).c_str(), common.input.toString().c_str(),
@@ -199,22 +187,7 @@
         IEffect::OpenEffectReturn openReturn;
         RETURN_STATUS_IF_ERROR(
                 statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
-
-        if (mIsProxyEffect) {
-            mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
-            mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
-            mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
-        } else {
-            mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
-            mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
-            mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
-        }
-
-        if (status_t status = updateEventFlags(); status != OK) {
-            ALOGV("%s closing at status %d", __func__, status);
-            mEffect->close();
-            return status;
-        }
+        updateMqsAndEventFlags(openReturn);
     } else if (mCommon != common) {
         ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
         Parameter aidlParam = UNION_MAKE(Parameter, common, common);
@@ -225,6 +198,26 @@
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
+void EffectConversionHelperAidl::updateMqsAndEventFlags(const IEffect::OpenEffectReturn& ret) {
+    if (mIsProxyEffect) {
+        mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
+    } else {
+        mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
+    }
+    updateEventFlags();
+    updateDataMqs(ret);
+}
+
+void EffectConversionHelperAidl::updateDataMqs(const IEffect::OpenEffectReturn& ret) {
+    if (mIsProxyEffect) {
+        mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
+        mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
+    } else {
+        mInputQ = std::make_shared<DataMQ>(ret.inputDataMQ);
+        mOutputQ = std::make_shared<DataMQ>(ret.outputDataMQ);
+    }
+}
+
 status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
                                                      const void* pCmdData __unused,
                                                      uint32_t* replySize, void* pReplyData) {
@@ -416,10 +409,8 @@
         }
         // update FMQs if the effect instance already open
         if (State state; effectProxy->getState(&state).isOk() && state != State::INIT) {
-            mStatusQ = effectProxy->getStatusMQ();
-            mInputQ = effectProxy->getInputMQ();
-            mOutputQ = effectProxy->getOutputMQ();
-            updateEventFlags();
+            IEffect::OpenEffectReturn openReturn;
+            updateMqsAndEventFlags(openReturn);
         }
     }
     return *static_cast<int32_t*>(pReplyData) = OK;
@@ -517,5 +508,14 @@
     return desc;
 }
 
+status_t EffectConversionHelperAidl::reopen() {
+    IEffect::OpenEffectReturn openReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->reopen(&openReturn)));
+
+    // status MQ won't be changed after open
+    updateDataMqs(openReturn);
+    return OK;
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 5db334c..0c0184e 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -47,6 +47,7 @@
     bool isBypassingOrTunnel() const;
 
     ::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
+    status_t reopen();
 
   protected:
     const int32_t mSessionId;
@@ -68,9 +69,6 @@
                                 void* pReplyData);
 
   private:
-    const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
-            .type = aidl::android::media::audio::common::AudioFormatType::PCM,
-            .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
     const bool mIsProxyEffect;
 
     static constexpr int kDefaultframeCount = 0x100;
@@ -80,13 +78,16 @@
         return pt ? std::to_string(*pt) : "nullptr";
     }
 
-    using AudioChannelLayout = aidl::android::media::audio::common::AudioChannelLayout;
     const aidl::android::media::audio::common::AudioConfig kDefaultAudioConfig = {
             .base = {.sampleRate = 44100,
-                     .channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
-                             AudioChannelLayout::LAYOUT_STEREO),
-                     .format = kDefaultFormatDescription},
+                     .channelMask = aidl::android::media::audio::common::AudioChannelLayout::make<
+                             aidl::android::media::audio::common::AudioChannelLayout::layoutMask>(
+                             aidl::android::media::audio::common::AudioChannelLayout::
+                                     LAYOUT_STEREO),
+                     .format = {.type = aidl::android::media::audio::common::AudioFormatType::PCM,
+                                .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT}},
             .frameCount = kDefaultframeCount};
+
     // command handler map
     typedef status_t (EffectConversionHelperAidl::*CommandHandler)(uint32_t /* cmdSize */,
                                                                    const void* /* pCmdData */,
@@ -97,7 +98,6 @@
     std::shared_ptr<StatusMQ> mStatusQ = nullptr;
     std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
 
-
     struct EventFlagDeleter {
         void operator()(::android::hardware::EventFlag* flag) const {
             if (flag) {
@@ -107,6 +107,10 @@
     };
     std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
     status_t updateEventFlags();
+    void updateDataMqs(
+            const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+    void updateMqsAndEventFlags(
+            const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
 
     status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                         void* pReplyData);
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index f26444c..b1b1dfe 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -53,23 +53,27 @@
 #include "effectsAidlConversion/AidlConversionVisualizer.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
 using ::aidl::android::hardware::audio::effect::State;
 
 namespace android {
 namespace effect {
 
 EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
-                             const std::shared_ptr<IEffect>& effect,
-                             int32_t sessionId, int32_t ioId, const Descriptor& desc,
-                             bool isProxyEffect)
+                             const std::shared_ptr<IEffect>& effect, int32_t sessionId,
+                             int32_t ioId, const Descriptor& desc, bool isProxyEffect)
     : mFactory(factory),
       mEffect(effect),
       mSessionId(sessionId),
       mIoId(ioId),
       mIsProxyEffect(isProxyEffect) {
+    assert(mFactory != nullptr);
+    assert(mEffect != nullptr);
     createAidlConversion(effect, sessionId, ioId, desc);
 }
 
@@ -165,26 +169,45 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
+    const std::string effectName = mConversion->getDescriptor().common.name;
     State state = State::INIT;
     if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
         state != State::PROCESSING) {
-        ALOGI("%s skipping %s process because it's %s", __func__,
-              mConversion->getDescriptor().common.name.c_str(),
+        ALOGI("%s skipping %s process because it's %s", __func__, effectName.c_str(),
               mConversion->isBypassing()
                       ? "bypassing"
                       : aidl::android::hardware::audio::effect::toString(state).c_str());
         return -ENODATA;
     }
 
+    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
+    auto efGroup = mConversion->getEventFlagGroup();
+    if (!efGroup) {
+        ALOGE("%s invalid efGroup", __func__);
+        return INVALID_OPERATION;
+    }
+
+    // use IFactory HAL version because IEffect can be an EffectProxy instance
+    static const int halVersion = [&]() {
+        int version = 0;
+        return mFactory->getInterfaceVersion(&version).isOk() ? version : 0;
+    }();
+
+    if (uint32_t efState = 0; halVersion >= kReopenSupportedVersion &&
+                              ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
+                                                             1 /* ns */, true /* retry */) &&
+                              efState & kEventFlagDataMqUpdate) {
+        ALOGI("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
+              halVersion);
+        mConversion->reopen();
+    }
     auto statusQ = mConversion->getStatusMQ();
     auto inputQ = mConversion->getInputMQ();
     auto outputQ = mConversion->getOutputMQ();
-    auto efGroup = mConversion->getEventFlagGroup();
     if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
-        !outputQ->isValid() || !efGroup) {
-        ALOGE("%s invalid FMQ [Status %d I %d O %d] efGroup %p", __func__,
-              statusQ ? statusQ->isValid() : 0, inputQ ? inputQ->isValid() : 0,
-              outputQ ? outputQ->isValid() : 0, efGroup.get());
+        !outputQ->isValid()) {
+        ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
+              inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
         return INVALID_OPERATION;
     }
 
@@ -225,8 +248,8 @@
         return INVALID_OPERATION;
     }
 
-    ALOGD("%s %s consumed %zu produced %zu", __func__,
-          mConversion->getDescriptor().common.name.c_str(), floatsToWrite, floatsToRead);
+    ALOGD("%s %s consumed %zu produced %zu", __func__, effectName.c_str(), floatsToWrite,
+          floatsToRead);
     return OK;
 }
 
@@ -263,6 +286,7 @@
 
 status_t EffectHalAidl::close() {
     TIME_CHECK();
+    mEffect->command(CommandId::STOP);
     return statusTFromBinderStatus(mEffect->close());
 }
 
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index aee42a9..d440ef8 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -106,8 +106,8 @@
 ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
                                      const std::optional<Parameter::Specific>& specific,
                                      IEffect::OpenEffectReturn* ret __unused) {
-    ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
-            EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+    ndk::ScopedAStatus status =
+            ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_STATE, "nullEffectHandle");
     for (auto& sub : mSubEffects) {
         IEffect::OpenEffectReturn openReturn;
         if (!sub.handle || !(status = sub.handle->open(common, specific, &openReturn)).isOk()) {
@@ -130,7 +130,33 @@
     return status;
 }
 
+ndk::ScopedAStatus EffectProxy::reopen(OpenEffectReturn* ret __unused) {
+    ndk::ScopedAStatus status =
+            ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_STATE, "nullEffectHandle");
+    for (auto& sub : mSubEffects) {
+        IEffect::OpenEffectReturn openReturn;
+        if (!sub.handle || !(status = sub.handle->reopen(&openReturn)).isOk()) {
+            ALOGE("%s: failed to open %p UUID %s", __func__, sub.handle.get(),
+                  ::android::audio::utils::toString(sub.descriptor.common.id.uuid).c_str());
+            break;
+        }
+        sub.effectMq.statusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+        sub.effectMq.inputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+        sub.effectMq.outputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+    }
+
+    // close all opened effects if failure
+    if (!status.isOk()) {
+        ALOGE("%s: closing all sub-effects with error %s", __func__,
+              status.getDescription().c_str());
+        close();
+    }
+
+    return status;
+}
+
 ndk::ScopedAStatus EffectProxy::close() {
+    command(CommandId::STOP);
     return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
         return effect->close();
     });
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
index 0d62642..9b9e8f1 100644
--- a/media/libaudiohal/impl/EffectProxy.h
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -62,6 +62,8 @@
                     specific,
             ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
     ndk::ScopedAStatus close() override;
+    ndk::ScopedAStatus reopen(
+            ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
     ndk::ScopedAStatus getDescriptor(
             ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
     ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 413a1f8..2b7f298 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -145,8 +145,30 @@
             std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
         for (const auto& s : configs) {
             AudioPortConfig portConfig;
-            RETURN_STATUS_IF_ERROR(setPortConfig(
-                            s, destinationPortIds, &portConfig, cleanups));
+            if (status_t status = setPortConfig(
+                            s, destinationPortIds, &portConfig, cleanups); status != OK) {
+                if (s.ext.getTag() == AudioPortExt::mix) {
+                    // See b/315528763. Despite that the framework knows the actual format of
+                    // the mix port, it still uses the original format. Luckily, there is
+                    // the I/O handle which can be used to find the mix port.
+                    ALOGI("fillPortConfigs: retrying to find a mix port config with default "
+                            "configuration");
+                    if (auto it = findPortConfig(std::nullopt, s.flags,
+                                    s.ext.get<AudioPortExt::mix>().handle);
+                            it != mPortConfigs.end()) {
+                        portConfig = it->second;
+                    } else {
+                        const std::string flags = s.flags.has_value() ?
+                                s.flags->toString() : "<unspecified>";
+                        ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
+                                "not found in module %s", flags.c_str(),
+                                s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+                        return BAD_VALUE;
+                    }
+                } else {
+                    return status;
+                }
+            }
             LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
                     "fillPortConfigs: initial config: %s, port config: %s",
                     s.toString().c_str(), portConfig.toString().c_str());
@@ -216,6 +238,29 @@
     return OK;
 }
 
+status_t Hal2AidlMapper::createOrUpdatePortConfigRetry(
+        const AudioPortConfig& requestedPortConfig, AudioPortConfig* result, bool* created) {
+    AudioPortConfig suggestedOrAppliedPortConfig;
+    RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig,
+                    &suggestedOrAppliedPortConfig, created));
+    if (suggestedOrAppliedPortConfig.id == 0) {
+        // Try again with the suggested config
+        suggestedOrAppliedPortConfig.id = requestedPortConfig.id;
+        AudioPortConfig appliedPortConfig;
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
+                        &appliedPortConfig, created));
+        if (appliedPortConfig.id == 0) {
+            ALOGE("%s: module %s did not apply suggested config %s", __func__,
+                    mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
+            return NO_INIT;
+        }
+        *result = appliedPortConfig;
+    } else {
+        *result = suggestedOrAppliedPortConfig;
+    }
+    return OK;
+}
+
 void Hal2AidlMapper::eraseConnectedPort(int32_t portId) {
     mPorts.erase(portId);
     mConnectedPorts.erase(portId);
@@ -225,6 +270,7 @@
         ALOGD("%s: disconnected port replacement: %s", __func__, port.toString().c_str());
         mDisconnectedPortReplacement = std::pair<int32_t, AudioPort>();
     }
+    updateDynamicMixPorts();
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
@@ -272,27 +318,19 @@
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
-        AudioPortConfig suggestedOrAppliedPortConfig;
-        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig,
-                        &suggestedOrAppliedPortConfig, created));
-        if (suggestedOrAppliedPortConfig.id == 0) {
-            // Try again with the suggested config
-            suggestedOrAppliedPortConfig.id = requestedPortConfig.id;
-            AudioPortConfig appliedPortConfig;
-            RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
-                            &appliedPortConfig, created));
-            if (appliedPortConfig.id == 0) {
-                ALOGE("%s: module %s did not apply suggested config %s", __func__,
-                        mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
-                return NO_INIT;
-            }
-            *portConfig = appliedPortConfig;
-        } else {
-            *portConfig = suggestedOrAppliedPortConfig;
-        }
+        return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
     } else {
-        *portConfig = portConfigIt->second;
-        *created = false;
+        AudioPortConfig requestedPortConfig = portConfigIt->second;
+        if (config != nullptr) {
+            setPortConfigFromConfig(&requestedPortConfig, *config);
+        }
+
+        if (requestedPortConfig != portConfigIt->second) {
+            return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
+        } else {
+            *portConfig = portConfigIt->second;
+            *created = false;
+        }
     }
     return OK;
 }
@@ -348,12 +386,12 @@
         return BAD_VALUE;
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
-        if (requestedPortConfig.ext.getTag() == AudioPortExt::Tag::mix) {
-            AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
-            if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
-                    source != AudioSource::SYS_RESERVED_INVALID) {
-                mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
-            }
+        setPortConfigFromConfig(&requestedPortConfig, config);
+
+        AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
+        if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
+                source != AudioSource::SYS_RESERVED_INVALID) {
+            mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
         }
 
         if (requestedPortConfig != portConfigIt->second) {
@@ -388,9 +426,19 @@
                 requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
                 portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
-        return findOrCreateDevicePortConfig(
-                requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
-                portConfig, created);
+        if (const auto& p = requestedPortConfig;
+                p.sampleRate.has_value() && p.channelMask.has_value() &&
+                p.format.has_value()) {
+            AudioConfig config;
+            setConfigFromPortConfig(&config, requestedPortConfig);
+            return findOrCreateDevicePortConfig(
+                    requestedPortConfig.ext.get<Tag::device>().device, &config,
+                    portConfig, created);
+        } else {
+            return findOrCreateDevicePortConfig(
+                    requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
+                    portConfig, created);
+        }
     }
     ALOGW("%s: unsupported audio port config: %s",
             __func__, requestedPortConfig.toString().c_str());
@@ -666,6 +714,14 @@
     return false;
 }
 
+status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
+    auto portsIt = findPort(devicePort.ext.get<AudioPortExt::device>().device);
+    if (portsIt == mPorts.end()) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mModule->prepareToDisconnectExternalDevice(portsIt->second.id));
+}
+
 status_t Hal2AidlMapper::prepareToOpenStream(
         int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
         AudioSource source, Cleanups* cleanups, AudioConfig* config,
@@ -674,8 +730,7 @@
             this, __func__, ioHandle, device.toString().c_str(),
             flags.toString().c_str(), toString(source).c_str(),
             config->toString().c_str(), mixPortConfig->toString().c_str());
-    resetUnusedPatchesAndPortConfigs();
-    const bool isInput = flags.getTag() == AudioIoFlags::Tag::input;
+    resetUnusedPatchesPortConfigsAndPorts();
     const AudioConfig initialConfig = *config;
     // Find / create AudioPortConfigs for the device port and the mix port,
     // then find / create a patch between them, and open a stream on the mix port.
@@ -687,8 +742,38 @@
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPortConfig, devicePortConfig.id);
     }
+    status_t status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+            devicePortConfig.id, flags, source, initialConfig, cleanups, config,
+            mixPortConfig, patch);
+    if (status != OK) {
+        // If using the client-provided config did not work out for establishing a mix port config
+        // or patching, try with the device port config. Note that in general device port config and
+        // mix port config are not required to be the same, however they must match if the HAL
+        // module can't perform audio stream conversions.
+        AudioConfig deviceConfig = initialConfig;
+        if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
+            ALOGD("%s: retrying with device port config: %s", __func__,
+                    devicePortConfig.toString().c_str());
+            status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+                    devicePortConfig.id, flags, source, initialConfig, cleanups,
+                    &deviceConfig, mixPortConfig, patch);
+            if (status == OK) {
+                *config = deviceConfig;
+            }
+        }
+    }
+    return status;
+}
+
+status_t Hal2AidlMapper::prepareToOpenStreamHelper(
+        int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+        const AudioIoFlags& flags, AudioSource source, const AudioConfig& initialConfig,
+        Cleanups* cleanups, AudioConfig* config, AudioPortConfig* mixPortConfig,
+        AudioPatch* patch) {
+    const bool isInput = flags.getTag() == AudioIoFlags::Tag::input;
+    bool created = false;
     RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
-                    std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
+                    std::set<int32_t>{devicePortId}, mixPortConfig, &created));
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
     }
@@ -715,7 +800,7 @@
         ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
                 config->toString().c_str());
         RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
-                        std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
+                        std::set<int32_t>{devicePortId}, mixPortConfig, &created));
         if (created) {
             cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
         }
@@ -728,10 +813,10 @@
     }
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfig.id}, {mixPortConfig->id}, patch, &created));
+                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfig.id}, patch, &created));
+                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
     }
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
@@ -773,7 +858,7 @@
             result = BAD_VALUE;
         }
     }
-    resetUnusedPortConfigs();
+    resetUnusedPortConfigsAndPorts();
     return result;
 }
 
@@ -790,7 +875,7 @@
     ALOGE("%s: port config id %d not found", __func__, portConfigId);
 }
 
-void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
+void Hal2AidlMapper::resetUnusedPatchesPortConfigsAndPorts() {
     // Since patches can be created independently of streams via 'createOrUpdatePatch',
     // here we only clean up patches for released streams.
     std::set<int32_t> patchesToRelease;
@@ -804,11 +889,11 @@
             it = mStreams.erase(it);
         }
     }
-    // 'releaseAudioPatches' also resets unused port configs.
+    // 'releaseAudioPatches' also resets unused port configs and ports.
     releaseAudioPatches(patchesToRelease);
 }
 
-void Hal2AidlMapper::resetUnusedPortConfigs() {
+void Hal2AidlMapper::resetUnusedPortConfigsAndPorts() {
     // The assumption is that port configs are used to create patches
     // (or to open streams, but that involves creation of patches, too). Thus,
     // orphaned port configs can and should be reset.
@@ -849,6 +934,7 @@
 }
 
 status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
+    resetUnusedPatchesPortConfigsAndPorts();
     if (connected) {
         AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
         std::optional<AudioPort> templatePort;
@@ -883,7 +969,6 @@
             }
             templatePort = portsIt->second;
         }
-        resetUnusedPatchesAndPortConfigs();
 
         // Use the ID of the "template" port, use all the information from the provided port.
         AudioPort connectedPort = devicePort;
@@ -910,7 +995,6 @@
             ALOGD("%s: device port for device %s found in the module %s",
                     __func__, matchDevice.toString().c_str(), mInstance.c_str());
         }
-        resetUnusedPatchesAndPortConfigs();
 
         // Disconnection of remote submix out with address "0" is a special case. We need to replace
         // the connected port entry with the "augmented template".
@@ -943,6 +1027,9 @@
     if (status == OK) {
         auto portIt = mPorts.find(portId);
         if (portIt != mPorts.end()) {
+            if (port->ext.getTag() == AudioPortExt::Tag::mix && portIt->second != *port) {
+                mDynamicMixPortIds.insert(portId);
+            }
             portIt->second = *port;
         } else {
             ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
@@ -991,4 +1078,15 @@
     return OK;
 }
 
+void Hal2AidlMapper::updateDynamicMixPorts() {
+    for (int32_t portId : mDynamicMixPortIds) {
+        if (auto it = mPorts.find(portId); it != mPorts.end()) {
+            updateAudioPort(portId, &it->second);
+        } else {
+            // This must not happen
+            ALOGE("%s, cannot find port for id=%d", __func__, portId);
+        }
+    }
+}
+
 } // namespace android
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index ee55b22..f937173 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -72,6 +72,8 @@
         return ::aidl::android::convertContainer(mRoutes, routes, converter);
     }
     status_t initialize();
+    status_t prepareToDisconnectExternalDevice(
+            const ::aidl::android::media::audio::common::AudioPort& devicePort);
     // If the resulting 'mixPortConfig->id' is 0, that means the stream was not created,
     // and 'config' is a suggested config.
     status_t prepareToOpenStream(
@@ -89,7 +91,7 @@
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
         Cleanups* cleanups = nullptr);
     status_t releaseAudioPatch(int32_t patchId);
-    void resetUnusedPatchesAndPortConfigs();
+    void resetUnusedPatchesPortConfigsAndPorts();
     status_t setDevicePortConnectedState(
             const ::aidl::android::media::audio::common::AudioPort& devicePort, bool connected);
 
@@ -124,6 +126,9 @@
     status_t createOrUpdatePortConfig(
             const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
             ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
+    status_t createOrUpdatePortConfigRetry(
+            const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+            ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
     void eraseConnectedPort(int32_t portId);
     status_t findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds,
@@ -164,6 +169,14 @@
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle);
     bool isPortBeingHeld(int32_t portId);
+    status_t prepareToOpenStreamHelper(
+        int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+        const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+        ::aidl::android::media::audio::common::AudioSource source,
+        const ::aidl::android::media::audio::common::AudioConfig& initialConfig,
+        Cleanups* cleanups, ::aidl::android::media::audio::common::AudioConfig* config,
+        ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch);
     bool portConfigBelongsToPort(int32_t portConfigId, int32_t portId) {
         auto it = mPortConfigs.find(portConfigId);
         return it != mPortConfigs.end() && it->second.portId == portId;
@@ -171,10 +184,11 @@
     status_t releaseAudioPatches(const std::set<int32_t>& patchIds);
     void resetPatch(int32_t patchId) { (void)releaseAudioPatch(patchId); }
     void resetPortConfig(int32_t portConfigId);
-    void resetUnusedPortConfigs();
+    void resetUnusedPortConfigsAndPorts();
     status_t updateAudioPort(
             int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
     status_t updateRoutes();
+    void updateDynamicMixPorts();
 
     Ports mPorts;
     // Remote submix "template" ports (no address specified, no profiles).
@@ -192,6 +206,7 @@
     ConnectedPorts mConnectedPorts;
     std::pair<int32_t, ::aidl::android::media::audio::common::AudioPort>
             mDisconnectedPortReplacement;
+    std::set<int32_t> mDynamicMixPortIds;
 };
 
 }  // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 378d919..2a8ebc6 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -234,7 +234,9 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return mStream->dump(fd, Args(args).args(), args.size());
+    status_t status = mStream->dump(fd, Args(args).args(), args.size());
+    mStreamPowerLog.dump(fd);
+    return status;
 }
 
 status_t StreamHalAidl::start() {
@@ -786,7 +788,7 @@
     if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
                 parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
                 [&](int value) {
-                    return value > 0 ?
+                    return value >= 0 ?
                             mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
                 }))) {
         updateMetadata = true;
@@ -808,6 +810,7 @@
                         mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
                                 ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
                                         channel_mask, false /*isInput*/));
+                        return OK;
                     }
                     return BAD_VALUE;
                 }))) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 72eadc6..77c75db 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -840,7 +840,7 @@
             const android::hardware::hidl_vec<uint8_t>& audioMetadata)  override {
         sp<StreamOutHalHidl> stream = mStream.promote();
         if (stream != nullptr) {
-            std::basic_string<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
+            std::vector<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
             stream->onCodecFormatChanged(metadataBs);
         }
         return Void();
@@ -967,7 +967,7 @@
     callback->onError();
 }
 
-void StreamOutHalHidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) {
+void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
     sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
     if (callback == nullptr) return;
     ALOGV("asyncCodecFormatCallback %s", __func__);
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 5361047..48da633 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -194,7 +194,7 @@
     status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
 
     // Methods used by StreamCodecFormatCallback (HIDL).
-    void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
+    void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs);
 
     status_t setLatencyMode(audio_latency_mode_t mode) override;
     status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index 49e6827..d1794f0 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -16,17 +16,17 @@
 
 #include <cstdint>
 #include <cstring>
-#include <optional>
 #define LOG_TAG "AidlConversionSpatializer"
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/effect/DefaultExtension.h>
 #include <aidl/android/hardware/audio/effect/VendorExtension.h>
 #include <error/expected_utils.h>
-#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
+#include <media/AidlConversionNdk.h>
+#include <system/audio_effects/aidl_effects_utils.h>
 #include <system/audio_effects/effect_spatializer.h>
-
 #include <utils/Log.h>
 
 #include "AidlConversionSpatializer.h"
@@ -34,38 +34,321 @@
 namespace android {
 namespace effect {
 
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
-using ::aidl::android::hardware::audio::effect::DefaultExtension;
-using ::aidl::android::hardware::audio::effect::Parameter;
-using ::aidl::android::hardware::audio::effect::VendorExtension;
-using ::android::status_t;
+using aidl::android::getParameterSpecificField;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::hardware::audio::effect::DefaultExtension;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Range;
+using aidl::android::hardware::audio::effect::Spatializer;
+using aidl::android::hardware::audio::effect::VendorExtension;
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::HeadTracking;
+using aidl::android::media::audio::common::Spatialization;
+using aidl::android::media::audio::common::toString;
+using android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
+bool AidlConversionSpatializer::isSpatializerParameterSupported() {
+    return mIsSpatializerAidlParamSupported.value_or(
+            (mIsSpatializerAidlParamSupported =
+                     [&]() {
+                         ::aidl::android::hardware::audio::effect::Parameter aidlParam;
+                         auto id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::vendor);
+                         // No range defined in descriptor capability means no Spatializer AIDL
+                         // implementation BAD_VALUE return from getParameter indicates the
+                         // parameter is not supported by HAL
+                         return mDesc.capability.range.getTag() == Range::spatializer &&
+                                mEffect->getParameter(id, &aidlParam).getStatus() !=
+                                        android::BAD_VALUE;
+                     }())
+                    .value());
+}
+
 status_t AidlConversionSpatializer::setParameter(EffectParamReader& param) {
-    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+    Parameter aidlParam;
+    if (isSpatializerParameterSupported()) {
+        uint32_t command = 0;
+        if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+            OK != param.readFromParameter(&command)) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            return BAD_VALUE;
+        }
+
+        switch (command) {
+            case SPATIALIZER_PARAM_LEVEL: {
+                Spatialization::Level level = Spatialization::Level::NONE;
+                if (OK != param.readFromValue(&level)) {
+                    ALOGE("%s invalid level value %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    spatializationLevel, level);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+                HeadTracking::Mode mode = HeadTracking::Mode::DISABLED;
+                if (OK != param.readFromValue(&mode)) {
+                    ALOGE("%s invalid mode value %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingMode,
+                                                    mode);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEAD_TO_STAGE: {
+                const size_t valueSize = param.getValueSize();
+                if (valueSize / sizeof(float) > 6 || valueSize % sizeof(float) != 0) {
+                    ALOGE("%s invalid parameter value size %zu", __func__, valueSize);
+                    return BAD_VALUE;
+                }
+                std::array<float, 6> headToStage = {};
+                for (size_t i = 0; i < valueSize / sizeof(float); i++) {
+                    if (OK != param.readFromValue(&headToStage[i])) {
+                        ALOGE("%s failed to read headToStage from %s", __func__,
+                              param.toString().c_str());
+                        return BAD_VALUE;
+                    }
+                }
+                HeadTracking::SensorData sensorData =
+                        HeadTracking::SensorData::make<HeadTracking::SensorData::headToStage>(
+                                headToStage);
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    headTrackingSensorData, sensorData);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+                int32_t modeInt32 = 0;
+                int32_t sensorId = -1;
+                if (OK != param.readFromValue(&modeInt32) || OK != param.readFromValue(&sensorId)) {
+                    ALOGE("%s %d invalid parameter value %s", __func__, __LINE__,
+                          param.toString().c_str());
+                    return BAD_VALUE;
+                }
+
+                const auto mode = static_cast<HeadTracking::ConnectionMode>(modeInt32);
+                if (mode < *ndk::enum_range<HeadTracking::ConnectionMode>().begin() ||
+                    mode > *ndk::enum_range<HeadTracking::ConnectionMode>().end()) {
+                    ALOGE("%s %d invalid mode %d", __func__, __LINE__, modeInt32);
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    headTrackingConnectionMode, mode);
+                if (status_t status = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+                    status != OK) {
+                    ALOGE("%s failed to set headTrackingConnectionMode %s", __func__,
+                          toString(mode).c_str());
+                    return status;
+                }
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingSensorId,
+                                                    sensorId);
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+            }
+            default: {
+                ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
+                return BAD_VALUE;
+            }
+        }
+    } else {
+        aidlParam = VALUE_OR_RETURN_STATUS(
+                ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+    }
+
+    ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
-    DefaultExtension defaultExt;
-    // read parameters into DefaultExtension vector<uint8_t>
-    defaultExt.bytes.resize(param.getParameterSize());
-    if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
-    }
+    if (isSpatializerParameterSupported()) {
+        uint32_t command = 0;
+        if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+            OK != param.readFromParameter(&command)) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            return BAD_VALUE;
+        }
 
-    VendorExtension idTag;
-    idTag.extension.setParcelable(defaultExt);
-    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
-    Parameter aidlParam;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-    // copy the AIDL extension data back to effect_param_t
-    return VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+        switch (command) {
+            case SPATIALIZER_PARAM_SUPPORTED_LEVELS: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationLevel);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
+                    const auto spatializer =
+                            Spatializer::make<Spatializer::spatializationLevel>(level);
+                    if (spatializer >= range->min && spatializer <= range->max) {
+                        if (status_t status = param.writeToValue(&level); status != OK) {
+                            ALOGI("%s %d: write level %s to value failed %d", __func__, __LINE__,
+                                  toString(level).c_str(), status);
+                            return status;
+                        }
+                    }
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_LEVEL: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::spatializationLevel);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto level = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::spatializationLevel,
+                        Spatialization::Level));
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                return param.writeToValue(&level);
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationLevel);
+                if (!range) {
+                    ALOGE("%s %d: range not defined for spatializationLevel", __func__, __LINE__);
+                    return BAD_VALUE;
+                }
+                const auto& nonSupport = Spatializer::make<Spatializer::spatializationLevel>(
+                        Spatialization::Level::NONE);
+                const bool support = (range->min > range->max ||
+                                         (range->min == nonSupport && range->max == nonSupport))
+                                                ? false
+                                                : true;
+                return param.writeToValue(&support);
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                Spatializer::headTrackingMode);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::headTrackingMode,
+                        HeadTracking::Mode));
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                return param.writeToValue(&mode);
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::supportedChannelLayout);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
+                        std::vector<AudioChannelLayout>));
+                for (const auto& layout : supportedLayouts) {
+                    audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                                    layout, false /* isInput */));
+                    if (status_t status = param.writeToValue(&mask); status != OK) {
+                        ALOGI("%s %d: write mask %s to value failed %d", __func__, __LINE__,
+                              layout.toString().c_str(), status);
+                        return status;
+                    }
+                }
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                return OK;
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationMode);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
+                    if (const auto spatializer =
+                                Spatializer::make<Spatializer::spatializationMode>(mode);
+                        spatializer >= range->min && spatializer <= range->max) {
+                        if (status_t status = param.writeToValue(&mode); status != OK) {
+                            ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+                                  toString(mode).c_str(), status);
+                            return status;
+                        }
+                    }
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::headTrackingConnectionMode);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
+                    if (const auto spatializer =
+                                Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
+                        spatializer < range->min || spatializer > range->max) {
+                        continue;
+                    }
+                    if (status_t status = param.writeToValue(&mode); status != OK) {
+                        ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+                                toString(mode).c_str(), status);
+                        return status;
+                    }
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+                status_t status = OK;
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(
+                        Spatializer, spatializerTag, Spatializer::headTrackingConnectionMode);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer,
+                        Spatializer::headTrackingConnectionMode, HeadTracking::ConnectionMode));
+
+                id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                Spatializer::headTrackingSensorId);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto sensorId = VALUE_OR_RETURN_STATUS(
+                        GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Spatializer, spatializer,
+                                                     Spatializer::headTrackingSensorId, int32_t));
+                uint32_t modeInt32 = static_cast<int32_t>(mode);
+                if (status = param.writeToValue(&modeInt32); status != OK) {
+                    ALOGI("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+                          toString(mode).c_str(), status);
+                    return status;
+                }
+                if (status = param.writeToValue(&sensorId); status != OK) {
+                    ALOGI("%s %d: write sensorId %d to value failed %d", __func__, __LINE__,
+                          sensorId, status);
+                    return status;
+                }
+                ALOGI("%s %d: %s", __func__, __LINE__, aidlParam.toString().c_str());
+                return OK;
+            }
+            default: {
+                ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
+                return BAD_VALUE;
+            }
+        }
+    } else {
+        Parameter aidlParam;
+        DefaultExtension defaultExt;
+        // read parameters into DefaultExtension vector<uint8_t>
+        defaultExt.bytes.resize(param.getParameterSize());
+        if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            param.setStatus(BAD_VALUE);
+            return BAD_VALUE;
+        }
+
+        VendorExtension idTag;
+        idTag.extension.setParcelable(defaultExt);
+        Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+        ALOGI("%s %d: %s", __func__, __LINE__,
+              aidlParam.get<Parameter::specific>().toString().c_str());
+        // copy the AIDL extension data back to effect_param_t
+        return VALUE_OR_RETURN_STATUS(
+                ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+    }
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
index 7c60b14..444e5a7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
@@ -32,6 +32,8 @@
     ~AidlConversionSpatializer() {}
 
   private:
+    std::optional<bool> mIsSpatializerAidlParamSupported;
+    bool isSpatializerParameterSupported();
     status_t setParameter(utils::EffectParamReader& param) override;
     status_t getParameter(utils::EffectParamWriter& param) override;
 };
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index bb5f851..7f6c1fb 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -78,8 +78,9 @@
     virtual status_t getParameters(const String8& keys, String8 *values) = 0;
 
     // Returns audio input buffer size according to parameters passed.
-    virtual status_t getInputBufferSize(const struct audio_config *config,
-            size_t *size) = 0;
+    // If there is no possibility for the HAL to open an input with the provided
+    // parameters, the method will return BAD_VALUE and modify the provided `config`.
+    virtual status_t getInputBufferSize(struct audio_config *config, size_t *size) = 0;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index 8397e9b..c34a671 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -42,8 +42,6 @@
     // necessary to release references to the returned object.
     virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
 
-    virtual status_t getHalPids(std::vector<pid_t> *pids) = 0;
-
     // Sets a DevicesFactoryHalCallback to notify the client.
     // The callback can be only set once.
     virtual status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) = 0;
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index a780a17..37615af 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -116,7 +116,7 @@
 
 class StreamOutHalInterfaceEventCallback : public virtual RefBase {
 public:
-    virtual void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) = 0;
+    virtual void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) = 0;
 
 protected:
     StreamOutHalInterfaceEventCallback() = default;
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 8f011c8..b9af0bf 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
@@ -49,6 +50,7 @@
     shared_libs: [
         "libvibrator",
     ],
+    test_config_template: "AudioHalTestTemplate.xml",
 }
 
 cc_test {
@@ -63,3 +65,14 @@
     ],
     header_libs: ["libaudiohalimpl_headers"],
 }
+
+cc_test {
+    name: "EffectHalVersionCompatibilityTest",
+    srcs: [
+        "EffectHalVersionCompatibility_test.cpp",
+        ":audio_effect_hal_aidl_src_files",
+    ],
+    defaults: ["libaudiohal_aidl_test_default"],
+    header_libs: ["libaudiohalimpl_headers"],
+    static_libs: ["libgmock"],
+}
diff --git a/media/libaudiohal/tests/AudioHalTestTemplate.xml b/media/libaudiohal/tests/AudioHalTestTemplate.xml
new file mode 100644
index 0000000..b1cb2f0
--- /dev/null
+++ b/media/libaudiohal/tests/AudioHalTestTemplate.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Runs {MODULE}.">
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
+
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="{MODULE}" />
+        <option name="native-test-timeout" value="10m" />
+    </test>
+</configuration>
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 1204a3b..3541078 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -443,21 +443,6 @@
     EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
 }
 
-// Without a vendor extension, any unrecognized parameters must be ignored.
-TEST_F(DeviceHalAidlTest, VendorParameterIgnored) {
-    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
-    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
-    EXPECT_EQ(OK, mDevice->setParameters(createParameterString("random_name", "random_value")));
-    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
-    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
-
-    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
-    String8 values;
-    EXPECT_EQ(OK, mDevice->getParameters(String8("random_name"), &values));
-    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
-    EXPECT_EQ(0UL, values.length());
-}
-
 class DeviceHalAidlVendorParametersTest : public testing::Test {
   public:
     void SetUp() override {
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
new file mode 100644
index 0000000..e8731ea
--- /dev/null
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <unordered_map>
+#define LOG_TAG "EffectHalVersionCompatibilityTest"
+
+#include <EffectHalAidl.h>
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_aidl_utils.h>
+#include <system/audio_config.h>
+#include <system/audio_effects/audio_effects_utils.h>
+#include <system/audio_effects/effect_uuid.h>
+#include <utils/Log.h>
+
+using aidl::android::hardware::audio::effect::CommandId;
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::IFactory;
+using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Processing;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+using android::OK;
+using android::sp;
+using android::effect::EffectHalAidl;
+using testing::_;
+using testing::Eq;
+
+namespace {
+
+/**
+ * Maps of parameter and the version it was introduced.
+ */
+// parameters defined directly in the Parameter union, except Parameter::specific (defined in
+// kParamIdEffectVersionMap).
+static const std::unordered_map<Parameter::Tag, int /* version */> kParamTagVersionMap = {
+        {Parameter::common, 1},         {Parameter::deviceDescription, 1},
+        {Parameter::mode, 1},           {Parameter::source, 1},
+        {Parameter::offload, 1},        {Parameter::volumeStereo, 1},
+        {Parameter::sourceMetadata, 2}, {Parameter::sinkMetadata, 2},
+};
+
+// Map of the version a specific effect type introduction
+// Id tags defined Parameter::Id union, except Parameter::Id::commonTag (defined in
+// kParamTagVersionMap).
+static const std::unordered_map<Parameter::Id::Tag, int /* version */> kParamIdEffectVersionMap = {
+        {Parameter::Id::vendorEffectTag, 1},
+        {Parameter::Id::acousticEchoCancelerTag, 1},
+        {Parameter::Id::automaticGainControlV1Tag, 1},
+        {Parameter::Id::automaticGainControlV2Tag, 1},
+        {Parameter::Id::bassBoostTag, 1},
+        {Parameter::Id::downmixTag, 1},
+        {Parameter::Id::dynamicsProcessingTag, 1},
+        {Parameter::Id::environmentalReverbTag, 1},
+        {Parameter::Id::equalizerTag, 1},
+        {Parameter::Id::hapticGeneratorTag, 1},
+        {Parameter::Id::loudnessEnhancerTag, 1},
+        {Parameter::Id::noiseSuppressionTag, 1},
+        {Parameter::Id::presetReverbTag, 1},
+        {Parameter::Id::virtualizerTag, 1},
+        {Parameter::Id::visualizerTag, 1},
+        {Parameter::Id::volumeTag, 1},
+        {Parameter::Id::spatializerTag, 2},
+};
+// Tags defined Parameter::Specific union.
+static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
+        kParamEffectVersionMap = {
+                {Parameter::Specific::vendorEffect, 1},
+                {Parameter::Specific::acousticEchoCanceler, 1},
+                {Parameter::Specific::automaticGainControlV1, 1},
+                {Parameter::Specific::automaticGainControlV2, 1},
+                {Parameter::Specific::bassBoost, 1},
+                {Parameter::Specific::downmix, 1},
+                {Parameter::Specific::dynamicsProcessing, 1},
+                {Parameter::Specific::environmentalReverb, 1},
+                {Parameter::Specific::equalizer, 1},
+                {Parameter::Specific::hapticGenerator, 1},
+                {Parameter::Specific::loudnessEnhancer, 1},
+                {Parameter::Specific::noiseSuppression, 1},
+                {Parameter::Specific::presetReverb, 1},
+                {Parameter::Specific::virtualizer, 1},
+                {Parameter::Specific::visualizer, 1},
+                {Parameter::Specific::volume, 1},
+                {Parameter::Specific::spatializer, 2},
+};
+
+class MockFactory : public IFactory {
+  public:
+    explicit MockFactory(int version) : IFactory(), mVersion(version) {}
+    MOCK_METHOD(ndk::ScopedAStatus, queryEffects,
+                (const std::optional<AudioUuid>& in_type_uuid,
+                 const std::optional<AudioUuid>& in_impl_uuid,
+                 const std::optional<AudioUuid>& in_proxy_uuid,
+                 std::vector<Descriptor>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, queryProcessing,
+                (const std::optional<Processing::Type>& in_type,
+                 std::vector<Processing>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, createEffect,
+                (const AudioUuid& in_impl_uuid, std::shared_ptr<IEffect>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, destroyEffect, (const std::shared_ptr<IEffect>& in_handle),
+                (override));
+
+    ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+        *_aidl_return = mVersion;
+        return ndk::ScopedAStatus::ok();
+    }
+
+    // these must be implemented but won't be used in this testing
+    ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+    bool isRemote() { return false; }
+    ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+  private:
+    const int mVersion;
+};
+
+class MockEffect : public IEffect {
+  public:
+    explicit MockEffect(int version) : IEffect(), mVersion(version) {}
+    MOCK_METHOD(ndk::ScopedAStatus, open,
+                (const Parameter::Common& common,
+                 const std::optional<Parameter::Specific>& specific,
+                 IEffect::OpenEffectReturn* ret),
+                (override));
+    MOCK_METHOD(ndk::ScopedAStatus, close, (), (override));
+    MOCK_METHOD(binder_status_t, dump, (int fd, const char** args, uint32_t numArgs), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, command, (CommandId id), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getState, (State * state), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getDescriptor, (Descriptor * desc), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, destroy, (), ());
+
+    // reopen introduced in version kReopenSupportedVersion
+    ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+        return mVersion < kReopenSupportedVersion
+                       ? ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION)
+                       : ndk::ScopedAStatus::ok();
+    }
+
+    // for all parameters introduced
+    ndk::ScopedAStatus setParameter(const Parameter& param) override {
+        const auto paramTag = param.getTag();
+        switch (paramTag) {
+            case Parameter::common:
+            case Parameter::deviceDescription:
+            case Parameter::mode:
+            case Parameter::source:
+            case Parameter::offload:
+            case Parameter::volumeStereo:
+            case Parameter::sinkMetadata:
+                FALLTHROUGH_INTENDED;
+            case Parameter::sourceMetadata: {
+                if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+                    kParamTagVersionMap.at(paramTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+            case Parameter::specific: {
+                // TODO
+                break;
+            }
+        }
+        return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+    }
+
+    /**
+     * Only care about version compatibility here:
+     * @return BAD_VALUE if a tag is not supported by current AIDL version.
+     * @return OK if a tag is supported by current AIDL version.
+     */
+    ndk::ScopedAStatus getParameter(const Parameter::Id& id, Parameter*) override {
+        const auto idTag = id.getTag();
+        switch (idTag) {
+            case Parameter::Id::commonTag: {
+                const auto paramTag = id.get<Parameter::Id::commonTag>();
+                if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+                    kParamTagVersionMap.at(paramTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+            case Parameter::Id::vendorEffectTag:
+            case Parameter::Id::acousticEchoCancelerTag:
+            case Parameter::Id::automaticGainControlV1Tag:
+            case Parameter::Id::automaticGainControlV2Tag:
+            case Parameter::Id::bassBoostTag:
+            case Parameter::Id::downmixTag:
+            case Parameter::Id::dynamicsProcessingTag:
+            case Parameter::Id::environmentalReverbTag:
+            case Parameter::Id::equalizerTag:
+            case Parameter::Id::hapticGeneratorTag:
+            case Parameter::Id::loudnessEnhancerTag:
+            case Parameter::Id::noiseSuppressionTag:
+            case Parameter::Id::presetReverbTag:
+            case Parameter::Id::virtualizerTag:
+            case Parameter::Id::visualizerTag:
+            case Parameter::Id::volumeTag:
+                FALLTHROUGH_INTENDED;
+            case Parameter::Id::spatializerTag: {
+                if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
+                    kParamIdEffectVersionMap.at(idTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+        }
+        return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+    }
+
+    ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+        *_aidl_return = mVersion;
+        return ndk::ScopedAStatus::ok();
+    }
+
+    // these must be implemented but won't be used in this testing
+    ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+    bool isRemote() { return false; }
+    ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+  private:
+    const int mVersion;
+};
+
+static const std::vector<AudioUuid> kTestParamUUIDs = {
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectUuidNull(),
+};
+static const std::vector<int> kTestParamVersion = {1, 2};  // Effect AIDL HAL versions to test
+
+enum ParamName { UUID, VERSION };
+using TestParam = std::tuple<AudioUuid, int /* version */>;
+
+class EffectHalVersionCompatibilityTest : public ::testing::TestWithParam<TestParam> {
+  public:
+    void SetUp() override {
+        mMockFactory = ndk::SharedRefBase::make<MockFactory>(mVersion);
+        ASSERT_NE(mMockFactory, nullptr);
+        mMockEffect = ndk::SharedRefBase::make<MockEffect>(mVersion);
+        ASSERT_NE(mMockEffect, nullptr);
+        mEffectHalAidl = sp<EffectHalAidl>::make(mMockFactory, mMockEffect, 0, 0, mDesc, false);
+        ASSERT_NE(mEffectHalAidl, nullptr);
+    }
+
+    void TearDown() override {
+        EXPECT_CALL(*mMockFactory, destroyEffect(_));
+        mEffectHalAidl.clear();
+        mMockEffect.reset();
+        mMockFactory.reset();
+    }
+
+  protected:
+    const int mVersion = std::get<VERSION>(GetParam());
+    const AudioUuid mTypeUuid = std::get<UUID>(GetParam());
+    const Descriptor mDesc = {.common.id.type = mTypeUuid};
+    std::shared_ptr<MockFactory> mMockFactory = nullptr;
+    std::shared_ptr<MockEffect> mMockEffect = nullptr;
+    sp<EffectHalAidl> mEffectHalAidl = nullptr;
+};
+
+TEST_P(EffectHalVersionCompatibilityTest, testEffectAidlHalCreateDestroy) {
+    // do nothing
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectHalVersionCompatibilityTestWithVersion, EffectHalVersionCompatibilityTest,
+        ::testing::Combine(testing::ValuesIn(kTestParamUUIDs),
+                           testing::ValuesIn(kTestParamVersion)),
+        [](const testing::TestParamInfo<EffectHalVersionCompatibilityTest::ParamType>& info) {
+            auto version = std::to_string(std::get<VERSION>(info.param));
+            auto uuid = android::audio::utils::toString(std::get<UUID>(info.param));
+            std::string name = "EffectHalVersionCompatibilityTest_V" + version + "_" + uuid;
+            std::replace_if(
+                    name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
+            return name;
+        });
+
+}  // namespace
\ No newline at end of file
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index 0cb654c..d783c64 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -21,10 +21,13 @@
 #include <cstdint>
 #include <cstring>
 #include <memory>
+#include <string>
 #include <utility>
 #define LOG_TAG "EffectsFactoryHalInterfaceTest"
 
 #include <aidl/android/media/audio/common/AudioUuid.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
 #include <gtest/gtest.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -40,15 +43,18 @@
 #include <system/audio_effects/effect_hapticgenerator.h>
 #include <system/audio_effects/effect_loudnessenhancer.h>
 #include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_spatializer.h>
 #include <utils/RefBase.h>
 #include <vibrator/ExternalVibrationUtils.h>
 
 namespace android {
 
-using ::aidl::android::media::audio::common::AudioUuid;
-using ::android::audio::utils::toString;
+using aidl::android::media::audio::common::AudioUuid;
+using android::audio::utils::toString;
 using effect::utils::EffectParamReader;
 using effect::utils::EffectParamWriter;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
 
 // EffectsFactoryHalInterface
 TEST(libAudioHalTest, createEffectsFactoryHalInterface) {
@@ -144,34 +150,68 @@
     EXPECT_NE(0, version.getMajorVersion());
 }
 
+enum ParamSetGetType { SET_N_GET, SET_ONLY, GET_ONLY };
 class EffectParamCombination {
   public:
     template <typename P, typename V>
-    void init(const P& p, const V& v, size_t len) {
-        setBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
-        getBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
-        expectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
-        parameterSet =
-                std::make_shared<EffectParamReader>(createEffectParam(setBuffer.data(), p, v));
-        parameterGet =
-                std::make_shared<EffectParamReader>(createEffectParam(getBuffer.data(), p, v));
-        parameterExpect =
-                std::make_shared<EffectParamReader>(createEffectParam(expectBuffer.data(), p, v));
-        valueSize = len;
+    void init(const P& p, const V& v, size_t len, ParamSetGetType type) {
+        if (type != GET_ONLY) {
+            mSetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
+            mParameterSet =
+                    std::make_shared<EffectParamReader>(createEffectParam(mSetBuffer.data(), p, v));
+        }
+
+        if (type != SET_ONLY) {
+            mGetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+            mExpectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+            mParameterGet =
+                    std::make_shared<EffectParamReader>(createEffectParam(mGetBuffer.data(), p, v));
+            mParameterExpect = std::make_shared<EffectParamReader>(
+                    createEffectParam(mExpectBuffer.data(), p, v));
+            mValueSize = len;
+        }
+        mType = type;
     }
 
-    std::shared_ptr<EffectParamReader> parameterSet; /* setParameter */
-    std::shared_ptr<EffectParamReader> parameterGet; /* getParameter */
-    std::shared_ptr<EffectParamReader> parameterExpect; /* expected from getParameter */
-    size_t valueSize;   /* ValueSize expect to write in reply data buffer */
+    std::shared_ptr<EffectParamReader> mParameterSet;    /* setParameter */
+    std::shared_ptr<EffectParamReader> mParameterGet;    /* getParameter */
+    std::shared_ptr<EffectParamReader> mParameterExpect; /* expected from getParameter */
+    size_t mValueSize = 0ul; /* ValueSize expect to write in reply data buffer */
+    ParamSetGetType mType = SET_N_GET;
+
+    std::string toString() {
+        uint32_t command = 0;
+        std::string str = "Command: ";
+        if (mType != GET_ONLY) {
+            str += (OK == mParameterSet->readFromParameter(&command) ? std::to_string(command)
+                                                                     : mParameterSet->toString());
+        } else {
+            str += (OK == mParameterGet->readFromParameter(&command) ? std::to_string(command)
+                                                                     : mParameterSet->toString());
+        }
+        str += "_";
+        str += toString(mType);
+        return str;
+    }
+
+    static std::string toString(ParamSetGetType type) {
+        switch (type) {
+            case SET_N_GET:
+                return "Type:SetAndGet";
+            case SET_ONLY:
+                return "Type:SetOnly";
+            case GET_ONLY:
+                return "Type:GetOnly";
+        }
+    }
 
   private:
-    std::vector<uint8_t> setBuffer;
-    std::vector<uint8_t> getBuffer;
-    std::vector<uint8_t> expectBuffer;
+    std::vector<uint8_t> mSetBuffer;
+    std::vector<uint8_t> mGetBuffer;
+    std::vector<uint8_t> mExpectBuffer;
 
     template <typename P, typename V>
-    EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
+    static EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
         effect_param_t* paramRet = (effect_param_t*)buf;
         paramRet->psize = sizeof(P);
         paramRet->vsize = sizeof(V);
@@ -184,48 +224,106 @@
 };
 
 template <typename P, typename V>
-std::shared_ptr<EffectParamCombination> createEffectParamCombination(const P& p, const V& v,
-                                                                     size_t len) {
+std::shared_ptr<EffectParamCombination> createEffectParamCombination(
+        const P& p, const V& v, size_t len, ParamSetGetType type = SET_N_GET) {
     auto comb = std::make_shared<EffectParamCombination>();
-    comb->init(p, v, len);
+    comb->init(p, v, len, type);
     return comb;
 }
 
-enum ParamName { TUPLE_UUID, TUPLE_PARAM_COMBINATION };
-using EffectParamTestTuple =
-        std::tuple<const effect_uuid_t* /* type UUID */, std::shared_ptr<EffectParamCombination>>;
-
+enum ParamName { TUPLE_UUID, TUPLE_IS_INPUT, TUPLE_PARAM_COMBINATION };
+using EffectParamTestTuple = std::tuple<const effect_uuid_t* /* type UUID */, bool /* isInput */,
+                                        std::vector<std::shared_ptr<EffectParamCombination>>>;
 static const effect_uuid_t EXTEND_EFFECT_TYPE_UUID = {
         0xfa81dbde, 0x588b, 0x11ed, 0x9b6a, {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
 constexpr std::array<uint8_t, 10> kVendorExtensionData({0xff, 0x5, 0x50, 0xab, 0xcd, 0x00, 0xbd,
                                                         0xdb, 0xee, 0xff});
-std::vector<EffectParamTestTuple> testPairs = {
-        std::make_tuple(FX_IID_AEC,
-                        createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
-                                                     sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(FX_IID_AGC,
-                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(SL_IID_BASSBOOST,
-                        createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(EFFECT_UIID_DOWNMIX,
-                        createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(SL_IID_DYNAMICSPROCESSING,
-                        createEffectParamCombination(
-                                std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
-                                30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)),
+static std::vector<EffectParamTestTuple> testPairs = {
         std::make_tuple(
-                FX_IID_LOUDNESS_ENHANCER,
-                createEffectParamCombination(LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
-                                             sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(FX_IID_NS,
-                        createEffectParamCombination(NS_PARAM_LEVEL, 1 /* level */,
-                                                     sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(&EXTEND_EFFECT_TYPE_UUID,
-                        createEffectParamCombination(8, kVendorExtensionData,
-                                                     sizeof(kVendorExtensionData)))};
+                FX_IID_AEC, true /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
+                                                     sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_AGC, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                SL_IID_BASSBOOST, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                EFFECT_UIID_DOWNMIX, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                SL_IID_DYNAMICSPROCESSING, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
+                        30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_LOUDNESS_ENHANCER, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
+                        sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_NS, true /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        NS_PARAM_LEVEL, 1 /* level */, sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_SPATIALIZER, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(SPATIALIZER_PARAM_LEVEL,
+                                                     SPATIALIZATION_LEVEL_MULTICHANNEL,
+                                                     sizeof(uint8_t), SET_N_GET),
+                        createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                                                     HeadTracking::Mode::RELATIVE_WORLD,
+                                                     sizeof(uint8_t), SET_N_GET),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_HEAD_TO_STAGE,
+                                std::array<float, 6>{.55f, 0.2f, 1.f, .999f, .43f, 19.f},
+                                sizeof(std::array<float, 6>), SET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_HEADTRACKING_CONNECTION,
+                                std::array<uint32_t, 2>{
+                                        static_cast<uint32_t>(HeadTracking::ConnectionMode::
+                                                                      DIRECT_TO_SENSOR_TUNNEL),
+                                        0x5e /* sensorId */},
+                                sizeof(std::array<uint32_t, 2>), SET_N_GET),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_LEVELS,
+                                std::array<Spatialization::Level, 3>{
+                                        Spatialization::Level::NONE,
+                                        Spatialization::Level::MULTICHANNEL,
+                                        Spatialization::Level::BED_PLUS_OBJECTS},
+                                sizeof(std::array<uint8_t, 3>), GET_ONLY),
+                        createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED, true,
+                                                     sizeof(bool), GET_ONLY),
+                        createEffectParamCombination(SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+                                                     AUDIO_CHANNEL_OUT_5POINT1, sizeof(uint8_t),
+                                                     GET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
+                                std::array<Spatialization::Mode, 2>{
+                                        Spatialization::Mode::BINAURAL,
+                                        Spatialization::Mode::TRANSAURAL},
+                                sizeof(std::array<uint8_t, 2>), GET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
+                                std::array<HeadTracking::ConnectionMode, 3>{
+                                        HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED,
+                                        HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW,
+                                        HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL},
+                                sizeof(std::array<uint8_t, 3>), GET_ONLY),
+                }),
+        std::make_tuple(
+                &EXTEND_EFFECT_TYPE_UUID, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        uint32_t{8}, kVendorExtensionData, sizeof(kVendorExtensionData))}),
+};
 
 class libAudioHalEffectParamTest : public ::testing::TestWithParam<EffectParamTestTuple> {
   public:
@@ -233,13 +331,8 @@
         : mParamTuple(GetParam()),
           mFactory(EffectsFactoryHalInterface::create()),
           mTypeUuid(std::get<TUPLE_UUID>(mParamTuple)),
-          mCombination(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
-          mExpectedValue([&]() {
-              std::vector<uint8_t> expectData(mCombination->valueSize);
-              mCombination->parameterExpect->readFromValue(expectData.data(),
-                                                           mCombination->valueSize);
-              return expectData;
-          }()),
+          mCombinations(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
+          mIsInput(std::get<TUPLE_IS_INPUT>(mParamTuple)),
           mDescs([&]() {
               std::vector<effect_descriptor_t> descs;
               if (mFactory && mTypeUuid && OK == mFactory->getDescriptors(mTypeUuid, &descs)) {
@@ -263,7 +356,8 @@
         uint32_t reply = 0;
         uint32_t replySize = sizeof(reply);
         ASSERT_EQ(OK, interface->command(EFFECT_CMD_INIT, 0, nullptr, &replySize, &reply));
-        ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(mEffectConfig),
+
+        ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
                                          &mEffectConfig, &replySize, &reply));
     }
 
@@ -284,60 +378,85 @@
     }
 
     void setAndGetParameter(const sp<EffectHalInterface>& interface) {
-        uint32_t replySize = sizeof(uint32_t);
-        uint8_t reply[replySize];
-        auto parameterSet = mCombination->parameterSet;
-        ASSERT_EQ(OK,
-                  interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)parameterSet->getTotalSize(),
-                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
-                                     &replySize, &reply))
-                << parameterSet->toString();
-        ASSERT_EQ(replySize, sizeof(uint32_t));
+        for (const auto combination : mCombinations) {
+            uint32_t replySize = kSetParamReplySize;
+            uint8_t reply[replySize];
+            const auto type = combination->mType;
+            if (type != GET_ONLY) {
+                const auto& set = combination->mParameterSet;
+                ASSERT_EQ(OK,
+                          interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)set->getTotalSize(),
+                                             const_cast<effect_param_t*>(&set->getEffectParam()),
+                                             &replySize, &reply))
+                        << set->toString();
+                ASSERT_EQ(replySize, kSetParamReplySize);
+            }
 
-        effect_param_t* getParam =
-                const_cast<effect_param_t*>(&mCombination->parameterGet->getEffectParam());
-        size_t maxReplySize = mCombination->valueSize + sizeof(effect_param_t) +
-                              sizeof(parameterSet->getPaddedParameterSize());
-        replySize = maxReplySize;
-        EXPECT_EQ(OK,
-                  interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)parameterSet->getTotalSize(),
-                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
-                                     &replySize, getParam));
-        EffectParamReader parameterGet(*getParam);
-        EXPECT_EQ(replySize, parameterGet.getTotalSize()) << parameterGet.toString();
-        if (mCombination->valueSize) {
-            std::vector<uint8_t> response(mCombination->valueSize);
-            EXPECT_EQ(OK, parameterGet.readFromValue(response.data(), mCombination->valueSize))
-                    << " try get valueSize " << mCombination->valueSize << " from "
-                    << parameterGet.toString() << " set " << parameterSet->toString();
-            EXPECT_EQ(response, mExpectedValue);
+            if (type != SET_ONLY) {
+                auto get = combination->mParameterGet;
+                auto expect = combination->mParameterExpect;
+                effect_param_t* getParam = const_cast<effect_param_t*>(&get->getEffectParam());
+                size_t maxReplySize = combination->mValueSize + sizeof(effect_param_t) +
+                                      sizeof(expect->getPaddedParameterSize());
+                replySize = maxReplySize;
+                EXPECT_EQ(OK,
+                          interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)expect->getTotalSize(),
+                                             const_cast<effect_param_t*>(&expect->getEffectParam()),
+                                             &replySize, getParam));
+
+                EffectParamReader getReader(*getParam);
+                EXPECT_EQ(replySize, getReader.getTotalSize()) << getReader.toString();
+                if (combination->mValueSize) {
+                    std::vector<uint8_t> expectedData(combination->mValueSize);
+                    EXPECT_EQ(OK, expect->readFromValue(expectedData.data(), expectedData.size()))
+                            << combination->toString();
+                    std::vector<uint8_t> response(combination->mValueSize);
+                    EXPECT_EQ(OK, getReader.readFromValue(response.data(), combination->mValueSize))
+                            << " try get valueSize " << combination->mValueSize << " from:\n"
+                            << getReader.toString() << "\nexpect:\n"
+                            << expect->toString();
+                    EXPECT_EQ(expectedData, response) << combination->toString();
+                }
+            }
         }
     }
 
+    static constexpr size_t kSetParamReplySize = sizeof(uint32_t);
     const EffectParamTestTuple mParamTuple;
     const sp<EffectsFactoryHalInterface> mFactory;
     const effect_uuid_t* mTypeUuid;
-    std::shared_ptr<EffectParamCombination> mCombination;
-    const std::vector<uint8_t> mExpectedValue;
+    std::vector<std::shared_ptr<EffectParamCombination>> mCombinations{};
+    const bool mIsInput;
     const std::vector<effect_descriptor_t> mDescs;
-    std::vector<sp<EffectHalInterface>> mHalInterfaces;
-    effect_config_t mEffectConfig = {.inputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_READ,
-                                                  .format = AUDIO_FORMAT_PCM_FLOAT,
-                                                  .bufferProvider.getBuffer = nullptr,
-                                                  .bufferProvider.releaseBuffer = nullptr,
-                                                  .bufferProvider.cookie = nullptr,
-                                                  .mask = EFFECT_CONFIG_ALL,
-                                                  .samplingRate = 48000,
-                                                  .channels = AUDIO_CHANNEL_IN_STEREO},
-
-                                     .outputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_WRITE,
-                                                   .format = AUDIO_FORMAT_PCM_FLOAT,
-                                                   .bufferProvider.getBuffer = nullptr,
-                                                   .bufferProvider.releaseBuffer = nullptr,
-                                                   .bufferProvider.cookie = nullptr,
-                                                   .mask = EFFECT_CONFIG_ALL,
-                                                   .samplingRate = 48000,
-                                                   .channels = AUDIO_CHANNEL_OUT_STEREO}};
+    std::vector<sp<EffectHalInterface>> mHalInterfaces{};
+    effect_config_t mEffectConfig = {
+            .inputCfg =
+                    {
+                            .buffer = {.frameCount = 0x100},
+                            .samplingRate = 48000,
+                            .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+                                                 : AUDIO_CHANNEL_IN_STEREO,
+                            .bufferProvider = {.getBuffer = nullptr,
+                                               .releaseBuffer = nullptr,
+                                               .cookie = nullptr},
+                            .format = AUDIO_FORMAT_PCM_FLOAT,
+                            .accessMode = EFFECT_BUFFER_ACCESS_READ,
+                            .mask = EFFECT_CONFIG_ALL,
+                    },
+            .outputCfg =
+                    {
+                            .buffer = {.frameCount = 0x100},
+                            .samplingRate = 48000,
+                            .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+                                                 : AUDIO_CHANNEL_OUT_STEREO,
+                            .bufferProvider = {.getBuffer = nullptr,
+                                               .releaseBuffer = nullptr,
+                                               .cookie = nullptr},
+                            .format = AUDIO_FORMAT_PCM_FLOAT,
+                            .accessMode = EFFECT_BUFFER_ACCESS_WRITE,
+                            .mask = EFFECT_CONFIG_ALL,
+                    },
+    };
 };
 
 TEST_P(libAudioHalEffectParamTest, setAndGetParam) {
@@ -392,7 +511,8 @@
             AudioUuid uuid = ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(
                                      *std::get<TUPLE_UUID>(info.param))
                                      .value();
-            std::string name = "UUID_" + toString(uuid);
+            std::string name = "UUID_" + toString(uuid) + "_";
+            name += std::get<TUPLE_IS_INPUT>(info.param) ? "_input" : "_output";
             std::replace_if(
                     name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
             return name;
@@ -404,6 +524,4 @@
     return RUN_ALL_TESTS();
 }
 
-// TODO: b/263986405 Add multi-thread testing
-
 } // namespace android
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 57b860d..f9ae2d4 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -441,10 +441,10 @@
                 track->prepareForAdjustChannels(mFrameCount);
             }
             } break;
-        case HAPTIC_INTENSITY: {
-            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
-            if (track->mHapticIntensity != hapticIntensity) {
-                track->mHapticIntensity = hapticIntensity;
+        case HAPTIC_SCALE: {
+            const os::HapticScale hapticScale = *reinterpret_cast<os::HapticScale*>(value);
+            if (track->mHapticScale != hapticScale) {
+                track->mHapticScale = hapticScale;
             }
             } break;
         case HAPTIC_MAX_AMPLITUDE: {
@@ -585,7 +585,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = os::HapticScale::NONE;
+    t->mHapticScale = {/*level=*/os::HapticLevel::NONE };
     t->mHapticMaxAmplitude = NAN;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
@@ -636,7 +636,7 @@
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity,
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticScale,
                                         t->mHapticMaxAmplitude);
                 } break;
                 default:
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index f2c386d..1a08a03 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "AudioResamplerSinc"
 //#define LOG_NDEBUG 0
 
-#define __STDC_CONSTANT_MACROS
 #include <malloc.h>
 #include <pthread.h>
 #include <string.h>
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index b39fb92..f558fd5 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -49,7 +49,7 @@
         DOWNMIX_TYPE    = 0x4004,
         // for haptic
         HAPTIC_ENABLED  = 0x4007, // Set haptic data from this track should be played or not.
-        HAPTIC_INTENSITY = 0x4008, // Set the intensity to play haptic data.
+        HAPTIC_SCALE = 0x4008, // Set the scale to play haptic data.
         HAPTIC_MAX_AMPLITUDE = 0x4009, // Set the max amplitude allowed for haptic data.
         // for target TIMESTRETCH
         PLAYBACK_RATE   = 0x4300, // Configure timestretch on this track name;
@@ -141,7 +141,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        os::HapticScale      mHapticIntensity;
+        os::HapticScale      mHapticScale;
         float                mHapticMaxAmplitude;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index ad402db..a33bf55 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for libaudioprocessing
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..b96ec6b 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -8,23 +9,23 @@
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_resampler_fuzzer",
-  srcs: [
-    "libaudioprocessing_resampler_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_resampler_fuzzer",
+    srcs: [
+        "libaudioprocessing_resampler_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_record_buffer_converter_fuzzer",
-  srcs: [
-    "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_record_buffer_converter_fuzzer",
+    srcs: [
+        "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index a5259aa..0b25327 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -1,5 +1,6 @@
 // Multichannel downmix effect library
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_downmix_license",
     ],
@@ -56,13 +57,11 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     header_libs: [
         "libaudioeffects",
-        "libhardware_headers"
+        "libhardware_headers",
     ],
     shared_libs: [
         "libaudioutils",
@@ -71,6 +70,6 @@
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 0e76d1d..5fb44b5 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -20,12 +20,60 @@
 
 #include "DownmixContext.h"
 
-using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
 
 namespace aidl::android::hardware::audio::effect {
 
+namespace {
+
+inline bool isChannelMaskValid(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
+    // check against unsupported channels (up to FCC_26)
+    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
+    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
+        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
+        return false;
+    }
+    return true;
+}
+
+inline bool isStereoChannelMask(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+
+    return channelMask.get<AudioChannelLayout::layoutMask>() == AudioChannelLayout::LAYOUT_STEREO;
+}
+
+}  // namespace
+
+bool DownmixContext::validateCommonConfig(const Parameter::Common& common) {
+    const AudioConfig& input = common.input;
+    const AudioConfig& output = common.output;
+    if (input.base.sampleRate != output.base.sampleRate) {
+        LOG(ERROR) << __func__ << ": SRC not supported, input: " << input.toString()
+                   << " output: " << output.toString();
+        return false;
+    }
+
+    if (!isStereoChannelMask(output.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": output should be stereo, not "
+                   << output.base.channelMask.toString();
+        return false;
+    }
+
+    if (!isChannelMaskValid(input.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": invalid input channel, " << input.base.channelMask.toString();
+        return false;
+    }
+
+    return true;
+}
+
 DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     LOG(DEBUG) << __func__;
@@ -62,8 +110,7 @@
     resetBuffer();
 }
 
-IEffect::Status DownmixContext::lvmProcess(float* in, float* out, int samples) {
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
     if (in == nullptr || out == nullptr ||
@@ -84,7 +131,6 @@
     bool accumulate = false;
     int frames = samples * sizeof(float) / getInputFrameSize();
     if (mType == Downmix::Type::STRIP) {
-        int inputChannelCount = getChannelCount(mChMask);
         while (frames) {
             if (accumulate) {
                 out[0] = std::clamp(out[0] + in[0], -1.f, 1.f);
@@ -93,7 +139,7 @@
                 out[0] = in[0];
                 out[1] = in[1];
             }
-            in += inputChannelCount;
+            in += mInputChannelCount;
             out += 2;
             frames--;
         }
@@ -105,8 +151,11 @@
             return status;
         }
     }
-    LOG(DEBUG) << __func__ << " done processing";
-    return {STATUS_OK, samples, samples};
+    int producedSamples = (samples / mInputChannelCount) << 1;
+    LOG(DEBUG) << __func__ << " done processing " << samples << " samples, generated "
+               << producedSamples << " frameSize: " << getInputFrameSize() << " - "
+               << getOutputFrameSize();
+    return {STATUS_OK, samples, producedSamples};
 }
 
 void DownmixContext::init_params(const Parameter::Common& common) {
@@ -122,18 +171,4 @@
     }
 }
 
-bool DownmixContext::isChannelMaskValid(AudioChannelLayout channelMask) {
-    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
-    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
-    // check against unsupported channels (up to FCC_26)
-    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
-    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
-        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
-        return false;
-    }
-    return true;
-}
-
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 1571c38..a381d7f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -50,7 +50,9 @@
         return RetCode::SUCCESS;
     }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status downmixProcess(float* in, float* out, int samples);
+
+    static bool validateCommonConfig(const Parameter::Common& common);
 
   private:
     DownmixState mState;
@@ -60,7 +62,6 @@
 
     // Common Params
     void init_params(const Parameter::Common& common);
-    bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 7068c5c..46156ce 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -71,42 +71,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DownmixImpl::setParameterCommon(const Parameter& param) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-
-    auto tag = param.getTag();
-    switch (tag) {
-        case Parameter::common:
-            RETURN_IF(mContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setCommFailed");
-            break;
-        case Parameter::deviceDescription:
-            RETURN_IF(mContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
-                              RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
-            break;
-        case Parameter::mode:
-            RETURN_IF(mContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setModeFailed");
-            break;
-        case Parameter::source:
-            RETURN_IF(mContext->setAudioSource(param.get<Parameter::source>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setSourceFailed");
-            break;
-        case Parameter::volumeStereo:
-            RETURN_IF(mContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
-                              RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
-            break;
-        default: {
-            LOG(ERROR) << __func__ << " unsupportedParameterTag " << toString(tag);
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commonParamNotSupported");
-        }
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
     switch (command) {
@@ -193,6 +157,8 @@
         return mContext;
     }
 
+    if (!DownmixContext::validateCommonConfig(common)) return nullptr;
+
     mContext = std::make_shared<DownmixContext>(1 /* statusFmqDepth */, common);
     return mContext;
 }
@@ -204,13 +170,52 @@
     return RetCode::SUCCESS;
 }
 
+void DownmixImpl::process() {
+    /**
+     * wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
+     * in the life cycle of workerThread (threadLoop).
+     */
+    uint32_t efState = 0;
+    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+        LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
+    }
+
+    {
+        std::lock_guard lg(mImplMutex);
+        RETURN_VALUE_IF(!mImplContext, void(), "nullContext");
+        auto statusMQ = mImplContext->getStatusFmq();
+        auto inputMQ = mImplContext->getInputDataFmq();
+        auto outputMQ = mImplContext->getOutputDataFmq();
+        auto buffer = mImplContext->getWorkBuffer();
+        if (!inputMQ || !outputMQ) {
+            return;
+        }
+
+        const auto availableToRead = inputMQ->availableToRead();
+        const auto availableToWrite = outputMQ->availableToWrite() *
+                                      mImplContext->getInputFrameSize() /
+                                      mImplContext->getOutputFrameSize();
+        assert(mImplContext->getWorkBufferSize() >=
+               std::max(availableToRead(), availableToWrite));
+        auto processSamples = std::min(availableToRead, availableToWrite);
+        if (processSamples) {
+            inputMQ->read(buffer, processSamples);
+            IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
+            outputMQ->write(buffer, status.fmqProduced);
+            statusMQ->writeBlocking(&status, 1);
+            LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
+                        << status.fmqConsumed << " produced " << status.fmqProduced;
+        }
+    }
+}
+
 // Processing method running in EffectWorker thread.
 IEffect::Status DownmixImpl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     if (!mContext) {
         LOG(ERROR) << __func__ << " nullContext";
         return {EX_NULL_POINTER, 0, 0};
     }
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->downmixProcess(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index 812d26b..54557dc 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -34,21 +34,26 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
+    // downmix override the process because of different input/output sample size requirement
+    void process() override;
+
   private:
-    std::shared_ptr<DownmixContext> mContext;
-    ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific);
+    std::shared_ptr<DownmixContext> mContext GUARDED_BY(mImplMutex);
+    ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/benchmark/Android.bp b/media/libeffects/downmix/benchmark/Android.bp
index 10f14e2..5b62a0c 100644
--- a/media/libeffects/downmix/benchmark/Android.bp
+++ b/media/libeffects/downmix/benchmark/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..77d8f83 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
@@ -14,7 +15,7 @@
 //
 // Use "atest downmix_tests" to run.
 cc_test {
-    name:"downmix_tests",
+    name: "downmix_tests",
     gtest: true,
     host_supported: true,
     vendor: true,
@@ -45,7 +46,7 @@
 // test application and outputs then compares files in a local directory
 // on device (/data/local/tmp/downmixtest/).
 cc_test {
-    name:"downmixtest",
+    name: "downmixtest",
     host_supported: false,
     proprietary: true,
 
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 7838117..e93a4e6 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -86,9 +86,7 @@
     ],
 
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
         "dynamicsprocessingdefaults",
     ],
 
@@ -97,6 +95,6 @@
     ],
 
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 85ea53a..7e1549d 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -211,11 +211,16 @@
     RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
                       common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
               EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat");
+    std::lock_guard lg(mImplMutex);
     RETURN_OK_IF(mState != State::INIT);
-    auto context = createContext(common);
-    RETURN_IF(!context, EX_NULL_POINTER, "createContextFailed");
+    mImplContext = createContext(common);
+    RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
+    int version = 0;
+    RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+              "FailedToGetInterfaceVersion");
+    mImplContext->setVersion(version);
+    mEventFlag = mImplContext->getStatusEventFlag();
 
-    RETURN_IF_ASTATUS_NOT_OK(setParameterCommon(common), "setCommParamErr");
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
     } else {
@@ -227,8 +232,8 @@
     }
 
     mState = State::IDLE;
-    context->dupeFmq(ret);
-    RETURN_IF(createThread(context, getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
+    mContext->dupeFmq(ret);
+    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
               "FailedToCreateWorker");
     return ndk::ScopedAStatus::ok();
 }
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 1e1e72e..4897888 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -39,22 +39,25 @@
     ndk::ScopedAStatus open(const Parameter::Common& common,
                             const std::optional<Parameter::Specific>& specific,
                             OpenEffectReturn* ret) override;
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
-    std::shared_ptr<DynamicsProcessingContext> mContext;
+    std::shared_ptr<DynamicsProcessingContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
-                                                      Parameter::Specific* specific);
+                                                      Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
     bool isParamInRange(const Parameter::Specific& specific);
 };
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index e5e5368..042b063 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -63,6 +63,9 @@
 }
 
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
+    if(auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
+        return ret;
+    }
     mCommon = common;
     init();
     LOG(INFO) << __func__ << common.toString();
@@ -312,9 +315,11 @@
 
 void DynamicsProcessingContext::init() {
     std::lock_guard lg(mMutex);
-    mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
-    mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            mCommon.input.base.channelMask);
+    if (mState == DYNAMICS_PROCESSING_STATE_UNINITIALIZED) {
+        mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
+    }
+    mChannelCount = static_cast<int>(::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask));
 }
 
 dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index ced7f19..839c6dd 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -74,7 +74,7 @@
     static constexpr float kPreferredProcessingDurationMs = 10.0f;
     static constexpr int kBandCount = 5;
     std::mutex mMutex;
-    size_t mChannelCount GUARDED_BY(mMutex) = 0;
+    int mChannelCount GUARDED_BY(mMutex) = 0;
     DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
     std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
     bool mEngineInited GUARDED_BY(mMutex) = false;
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index d94093e..ad5188f 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -20,10 +21,10 @@
     name: "libeffects",
     vendor: true,
     srcs: [
-         "EffectsFactory.c",
-         "EffectsConfigLoader.c",
-         "EffectsFactoryState.c",
-         "EffectsXmlConfigLoader.cpp",
+        "EffectsFactory.c",
+        "EffectsConfigLoader.c",
+        "EffectsFactoryState.c",
+        "EffectsXmlConfigLoader.cpp",
     ],
 
     shared_libs: [
@@ -34,7 +35,7 @@
     ],
     cflags: ["-fvisibility=hidden"],
 
-    local_include_dirs:["include/media"],
+    local_include_dirs: ["include/media"],
 
     header_libs: [
         "libaudioeffects",
@@ -61,5 +62,8 @@
         "libeffectsconfig",
         "libeffects",
     ],
-    local_include_dirs:[".", "include"],
+    local_include_dirs: [
+        ".",
+        "include",
+    ],
 }
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index fc80211..7d96b53 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -75,9 +75,7 @@
     ],
 
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
         "hapticgeneratordefaults",
     ],
 
@@ -86,6 +84,6 @@
     ],
 
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index e89e501..5d9886c 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -145,7 +145,7 @@
     memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
     context->param.hapticChannelCount = 0;
     context->param.audioChannelCount = 0;
-    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+    context->param.maxHapticIntensity = os::HapticLevel::MUTE;
 
     context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
     context->param.bpfQ = 1.0f;
@@ -316,9 +316,10 @@
             return -EINVAL;
         }
         int id = *(int *) value;
-        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
-        ALOGD("Setting haptic intensity as %d", hapticIntensity);
-        if (hapticIntensity == os::HapticScale::MUTE) {
+        os::HapticLevel hapticIntensity =
+                static_cast<os::HapticLevel>(*((int *) value + 1));
+        ALOGD("Setting haptic intensity as %d", static_cast<int>(hapticIntensity));
+        if (hapticIntensity == os::HapticLevel::MUTE) {
             context->param.id2Intensity.erase(id);
         } else {
             context->param.id2Intensity.emplace(id, hapticIntensity);
@@ -478,7 +479,7 @@
         return -ENODATA;
     }
 
-    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+    if (context->param.maxHapticIntensity == os::HapticLevel::MUTE) {
         // Haptic channels are muted, not need to generate haptic data.
         return 0;
     }
@@ -504,8 +505,9 @@
     float* hapticOutBuffer = HapticGenerator_runProcessingChain(
             context->processingChain, context->inputBuffer.data(),
             context->outputBuffer.data(), inBuffer->frameCount);
-    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity,
-                        context->param.maxHapticAmplitude);
+        os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
+                            { /*level=*/context->param.maxHapticIntensity},
+                            context->param.maxHapticAmplitude);
 
     // For haptic data, the haptic playback thread will copy the data from effect input buffer,
     // which contains haptic data at the end of the buffer, directly to sink buffer.
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index 85e961f..f122c0a 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -49,8 +49,8 @@
     uint32_t hapticChannelCount;
 
     // A map from track id to haptic intensity.
-    std::map<int, os::HapticScale> id2Intensity;
-    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+    std::map<int, os::HapticLevel> id2Intensity;
+    os::HapticLevel maxHapticIntensity; // max intensity will be used to scale haptic data.
     float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
 
     float resonantFrequency;
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 031477f..2d3bdd0 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -185,7 +185,7 @@
 IEffect::Status HapticGeneratorImpl::effectProcessImpl(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, samples);
+    return mContext->process(in, out, samples);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index fe9616a..53dcd49 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -33,16 +33,18 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index de44e05..5c38d17 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstddef>
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
 #include <Utils.h>
@@ -117,7 +118,7 @@
     return RetCode::SUCCESS;
 }
 
-IEffect::Status HapticGeneratorContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status HapticGeneratorContext::process(float* in, float* out, int samples) {
     LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
 
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
@@ -162,8 +163,8 @@
     }
 
     // Construct input buffer according to haptic channel source
-    for (size_t i = 0; i < mFrameCount; ++i) {
-        for (size_t j = 0; j < mParams.mHapticChannelCount; ++j) {
+    for (int64_t i = 0; i < mFrameCount; ++i) {
+        for (int j = 0; j < mParams.mHapticChannelCount; ++j) {
             mInputBuffer[i * mParams.mHapticChannelCount + j] =
                     in[i * mParams.mAudioChannelCount + mParams.mHapticChannelSource[j]];
         }
@@ -173,15 +174,14 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            static_cast<::android::os::HapticScale>(mParams.mMaxVibratorScale),
+            {/*level=*/static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale) },
             mParams.mVibratorInfo.qFactor);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
     // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
     // In that case, copy haptic data to input buffer instead of output buffer.
     // Note: this may not work with rpc/binder calls
-    int offset = samples;
-    for (int i = 0; i < hapticSampleCount; ++i) {
+    for (size_t i = 0; i < hapticSampleCount; ++i) {
         in[samples + i] = hapticOutBuffer[i];
     }
     return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
@@ -199,7 +199,7 @@
     mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
-    for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
+    for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
         // By default, use the first audio channel to generate haptic channels.
         mParams.mHapticChannelSource[i] = 0;
     }
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index a0a0a4c..8618b7b 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -75,7 +75,7 @@
     RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
     HapticGenerator::VibratorInformation getHgVibratorInformation();
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
@@ -92,7 +92,7 @@
     HapticGeneratorState mState;
     HapticGeneratorParam mParams GUARDED_BY(mMutex);
     int mSampleRate;
-    int mFrameCount = 0;
+    int64_t mFrameCount = 0;
 
     // A cache for all shared pointers of the HapticGenerator
     struct HapticGeneratorProcessorsRecord mProcessorsRecord;
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 7acba11..46e4669 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -54,9 +54,7 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     header_libs: [
         "libaudioeffects",
@@ -71,6 +69,6 @@
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index a7d9282..bcf0db6 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -178,7 +178,7 @@
 IEffect::Status LoudnessEnhancerImpl::effectProcessImpl(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, samples);
+    return mContext->process(in, out, samples);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 5b9e924..e2e716c 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -33,22 +33,25 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
-    std::shared_ptr<LoudnessEnhancerContext> mContext;
+    std::shared_ptr<LoudnessEnhancerContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterLoudnessEnhancer(const LoudnessEnhancer::Tag& tag,
-                                                    Parameter::Specific* specific);
+                                                    Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index bc3fa45..be914bf 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -65,7 +65,7 @@
     return RetCode::SUCCESS;
 }
 
-IEffect::Status LoudnessEnhancerContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status LoudnessEnhancerContext::process(float* in, float* out, int samples) {
     LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
 
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 9a1ec4c..fd688d7 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -42,7 +42,7 @@
     RetCode setLeGain(int gainMb);
     int getLeGain() const { return mGain; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     std::mutex mMutex;
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index c21c5f2..8036983 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 7998879..c1a77f0 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -1,5 +1,6 @@
 // Music bundle
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_lib_license",
     ],
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..c32e91e 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for effects
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
 cc_test {
     name: "EffectReverbTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectReverbTest.cpp",
@@ -29,7 +30,7 @@
 cc_test {
     name: "EffectBundleTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index a163f4b..aa18deb 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -20,6 +20,7 @@
 #define LOG_TAG "BundleContext"
 #include <android-base/logging.h>
 #include <audio_utils/power.h>
+#include <media/AidlConversionCppNdk.h>
 #include <Utils.h>
 
 #include "BundleContext.h"
@@ -36,9 +37,10 @@
               const lvm::BundleEffectType& type)
         : EffectContext(statusDepth, common), mType(type) {
     LOG(DEBUG) << __func__ << type;
-    int channelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+
+    int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             common.input.base.channelMask);
-    mSamplesPerSecond = common.input.base.sampleRate * channelCount;
+    mSamplesPerSecond = common.input.base.sampleRate * inputChannelCount;
 }
 
 BundleContext::~BundleContext() {
@@ -50,9 +52,15 @@
     std::lock_guard lg(mMutex);
     // init with pre-defined preset NORMAL
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        mBandGainMdB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
+        mBandGainmB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
     }
 
+    // Initialise control params
+    LVM_ControlParams_t controlParams;
+    RetCode retStatus = initControlParameter(controlParams);
+    RETURN_VALUE_IF(retStatus != RetCode::SUCCESS, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    " UnsupportedParams");
+
     // allocate lvm instance
     LVM_ReturnStatus_en status;
     LVM_InstParams_t params = {.BufferMode = LVM_UNMANAGED_BUFFERS,
@@ -63,8 +71,6 @@
     GOTO_IF_LVM_ERROR(status, deinit, "LVM_GetInstanceHandleFailed");
 
     // set control
-    LVM_ControlParams_t controlParams;
-    initControlParameter(controlParams);
     status = LVM_SetControlParameters(mInstance, &controlParams);
     GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetControlParametersFailed");
 
@@ -227,8 +233,8 @@
         bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
 
         if (eqEnabled) {
-            for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                float bandFactor = mBandGainMdB[i] / 1500.0;
+            for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                float bandFactor = mBandGainmB[i] / 1500.0;
                 float bandCoefficient = lvm::kBandEnergyCoefficient[i];
                 float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
                 if (bandEnergy > 0) energyContribution += bandEnergy;
@@ -236,9 +242,9 @@
 
             // cross EQ coefficients
             float bandFactorSum = 0;
-            for (int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
-                float bandFactor1 = mBandGainMdB[i] / 1500.0;
-                float bandFactor2 = mBandGainMdB[i + 1] / 1500.0;
+            for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+                float bandFactor1 = mBandGainmB[i] / 1500.0;
+                float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
 
                 if (bandFactor1 > 0 && bandFactor2 > 0) {
                     float crossEnergy =
@@ -259,8 +265,8 @@
             energyContribution += boostFactor * boostCoefficient * boostCoefficient;
 
             if (eqEnabled) {
-                for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                    float bandFactor = mBandGainMdB[i] / 1500.0;
+                for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                    float bandFactor = mBandGainmB[i] / 1500.0;
                     float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
                     float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
                     if (bandEnergy > 0) energyBassBoost += bandEnergy;
@@ -278,15 +284,15 @@
 
         // roundoff
         int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
-        if (maxLevelRound + mVolume > 0) {
-            gainCorrection = maxLevelRound + mVolume;
+        if (maxLevelRound + mVolumedB > 0) {
+            gainCorrection = maxLevelRound + mVolumedB;
         }
 
-        params.VC_EffectLevel = mVolume - gainCorrection;
+        params.VC_EffectLevel = mVolumedB - gainCorrection;
         if (params.VC_EffectLevel < -96) {
             params.VC_EffectLevel = -96;
         }
-        LOG(INFO) << "\tVol: " << mVolume << ", GainCorrection: " << gainCorrection
+        LOG(INFO) << "\tVol: " << mVolumedB << ", GainCorrection: " << gainCorrection
                   << ", Actual vol: " << params.VC_EffectLevel;
 
         /* Activate the initial settings */
@@ -312,7 +318,9 @@
             device != AudioDeviceDescription{AudioDeviceType::OUT_CARKIT,
                                              AudioDeviceDescription::CONNECTION_BT_SCO} &&
             device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER,
-                                             AudioDeviceDescription::CONNECTION_BT_A2DP}) {
+                                             AudioDeviceDescription::CONNECTION_BT_A2DP} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_SUBMIX,
+                                             AudioDeviceDescription::CONNECTION_VIRTUAL}) {
             return false;
         }
     }
@@ -329,7 +337,9 @@
             device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
                                              AudioDeviceDescription::CONNECTION_BT_A2DP} &&
             device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
-                                             AudioDeviceDescription::CONNECTION_USB}) {
+                                             AudioDeviceDescription::CONNECTION_USB} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_SUBMIX,
+                                             AudioDeviceDescription::CONNECTION_VIRTUAL}) {
             return false;
         }
     }
@@ -421,7 +431,6 @@
 
 RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
     LVM_ControlParams_t params;
-    LVM_ReturnStatus_en status = LVM_SUCCESS;
 
     // Convert volume to dB
     float leftdB = VolToDb(volume.left);
@@ -470,6 +479,7 @@
 RetCode BundleContext::setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels) {
     RETURN_VALUE_IF(bandLevels.size() > lvm::MAX_NUM_BANDS || bandLevels.empty(),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "sizeExceedMax");
+
     RetCode ret = updateControlParameter(bandLevels);
     if (RetCode::SUCCESS == ret) {
         mCurPresetIdx = lvm::PRESET_CUSTOM;
@@ -484,15 +494,13 @@
     std::vector<Equalizer::BandLevel> bandLevels;
     bandLevels.reserve(lvm::MAX_NUM_BANDS);
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        bandLevels.emplace_back(
-                Equalizer::BandLevel{static_cast<int32_t>(i), mBandGainMdB[i]});
+        bandLevels.emplace_back(Equalizer::BandLevel{static_cast<int32_t>(i), mBandGainmB[i]});
     }
     return bandLevels;
 }
 
 std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
     std::vector<int32_t> freqs;
-
     LVM_ControlParams_t params;
     {
         std::lock_guard lg(mMutex);
@@ -512,14 +520,14 @@
     const auto [min, max] =
             std::minmax_element(bandLevels.begin(), bandLevels.end(),
                                 [](const auto& a, const auto& b) { return a.index < b.index; });
-    return min->index >= 0 && max->index < lvm::MAX_NUM_BANDS;
+    return min->index >= 0 && static_cast<size_t>(max->index) < lvm::MAX_NUM_BANDS;
 }
 
 RetCode BundleContext::updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels) {
     RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
                     "indexOutOfRange");
 
-    std::array<int, lvm::MAX_NUM_BANDS> tempLevel(mBandGainMdB);
+    std::array<int, lvm::MAX_NUM_BANDS> tempLevel(mBandGainmB);
     for (const auto& it : bandLevels) {
         tempLevel[it.index] = it.levelMb;
     }
@@ -540,8 +548,8 @@
         RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
                         RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     }
-    mBandGainMdB = tempLevel;
-    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainMdB)
+    mBandGainmB = tempLevel;
+    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainmB)
                << "mdB";
 
     return RetCode::SUCCESS;
@@ -568,25 +576,25 @@
 
 RetCode BundleContext::setVolumeLevel(float level) {
     if (mMuteEnabled) {
-        mLevelSaved = level;
+        mLevelSaveddB = level;
     } else {
-        mVolume = level;
+        mVolumedB = level;
     }
     LOG(INFO) << __func__ << " success with level " << level;
     return limitLevel();
 }
 
 float BundleContext::getVolumeLevel() const {
-    return (mMuteEnabled ? mLevelSaved : mVolume);
+    return (mMuteEnabled ? mLevelSaveddB : mVolumedB);
 }
 
 RetCode BundleContext::setVolumeMute(bool mute) {
     mMuteEnabled = mute;
     if (mMuteEnabled) {
-        mLevelSaved = mVolume;
-        mVolume = -96;
+        mLevelSaveddB = mVolumedB;
+        mVolumedB = -96;
     } else {
-        mVolume = mLevelSaved;
+        mVolumedB = mLevelSaveddB;
     }
     return limitLevel();
 }
@@ -619,11 +627,30 @@
     return RetCode::SUCCESS;
 }
 
-void BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+    int outputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.output.base.channelMask);
+    auto outputChannelMaskConv = aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+            mCommon.output.base.channelMask, /*isInput*/ false);
+    RETURN_VALUE_IF(!outputChannelMaskConv.ok(), RetCode::ERROR_ILLEGAL_PARAMETER,
+                    " outputChannelMaskNotValid");
+
+    params.NrChannels = outputChannelCount;
+    params.ChMask = outputChannelMaskConv.value();
+    params.SampleRate = lvmFsForSampleRate(mCommon.input.base.sampleRate);
+
+    int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    if (inputChannelCount == 1) {
+        params.SourceFormat = LVM_MONO;
+    } else if (inputChannelCount == 2) {
+        params.SourceFormat = LVM_STEREO;
+    } else if (inputChannelCount > 2 && inputChannelCount <= LVM_MAX_CHANNELS) {
+        params.SourceFormat = LVM_MULTICHANNEL;
+    }
+
     /* General parameters */
     params.OperatingMode = LVM_MODE_ON;
-    params.SampleRate = LVM_FS_44100;
-    params.SourceFormat = LVM_STEREO;
     params.SpeakerType = LVM_HEADPHONES;
 
     /* Concert Sound parameters */
@@ -658,13 +685,7 @@
     params.PSA_Enable = LVM_PSA_OFF;
     params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-    /* TE Control parameters */
-    params.TE_OperatingMode = LVM_TE_OFF;
-    params.TE_EffectLevel = 0;
-
-    params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
-    params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
-    params.SourceFormat = LVM_STEREO;
+    return RetCode::SUCCESS;
 }
 
 void BundleContext::initHeadroomParameter(LVM_HeadroomParams_t& params) const {
@@ -726,7 +747,7 @@
     return angles;
 }
 
-IEffect::Status BundleContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status BundleContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -833,21 +854,34 @@
             LOG(DEBUG) << "Effect_process() processing last frame";
         }
         mNumberEffectsCalled = 0;
-        LVM_UINT16 frames = samples * sizeof(float) / frameSize;
-        float* outTmp = (accumulate ? getWorkBuffer() : out);
-        /* Process the samples */
-        LVM_ReturnStatus_en lvmStatus;
-        {
-            std::lock_guard lg(mMutex);
-            lvmStatus = LVM_Process(mInstance, in, outTmp, frames, 0);
-            if (lvmStatus != LVM_SUCCESS) {
-                LOG(ERROR) << __func__ << lvmStatus;
-                return {EX_UNSUPPORTED_OPERATION, 0, 0};
-            }
-            if (accumulate) {
-                for (int i = 0; i < samples; i++) {
-                    out[i] += outTmp[i];
+        int frames = samples * sizeof(float) / frameSize;
+        int bufferIndex = 0;
+        // LVM library supports max of int16_t frames at a time and should be multiple of
+        // kBlockSizeMultiple.
+        constexpr int kBlockSizeMultiple = 4;
+        constexpr int kMaxBlockFrames =
+                (std::numeric_limits<int16_t>::max() / kBlockSizeMultiple) * kBlockSizeMultiple;
+        while (frames > 0) {
+            float* outTmp = (accumulate ? getWorkBuffer() : out);
+            /* Process the samples */
+            LVM_ReturnStatus_en lvmStatus;
+            {
+                std::lock_guard lg(mMutex);
+                int processFrames = std::min(frames, kMaxBlockFrames);
+                lvmStatus = LVM_Process(mInstance, in + bufferIndex, outTmp + bufferIndex,
+                                        processFrames, 0);
+                if (lvmStatus != LVM_SUCCESS) {
+                    LOG(ERROR) << "LVM lib failed with error: " << lvmStatus;
+                    return {EX_UNSUPPORTED_OPERATION, 0, 0};
                 }
+                if (accumulate) {
+                    for (int i = 0; i < samples; i++) {
+                        out[i] += outTmp[i];
+                    }
+                }
+                frames -= processFrames;
+                int processedSize = processFrames * frameSize / sizeof(float);
+                bufferIndex += processedSize;
             }
         }
     } else {
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 779d53a..d823030 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -41,15 +41,6 @@
     RetCode disable();
     RetCode disableOperatingMode();
 
-    void setSampleRate(const int sampleRate) { mSampleRate = sampleRate; }
-    int getSampleRate() const { return mSampleRate; }
-
-    void setChannelMask(const aidl::android::media::audio::common::AudioChannelLayout& chMask) {
-        mChMask = chMask;
-    }
-    aidl::android::media::audio::common::AudioChannelLayout getChannelMask() const {
-        return mChMask;
-    }
     bool isDeviceSupportedBassBoost(
             const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
                     devices);
@@ -94,7 +85,7 @@
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
     Parameter::VolumeStereo getVolumeStereo() override { return {1.0f, 1.0f}; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
     IEffect::Status processEffect(float* in, float* out, int sampleToProcess);
 
@@ -105,9 +96,7 @@
     LVM_Handle_t mInstance GUARDED_BY(mMutex);
 
     aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
-    aidl::android::media::audio::common::AudioChannelLayout mChMask;
 
-    int mSampleRate = LVM_FS_44100;
     int mSamplesPerSecond = 0;
     int mSamplesToExitCountEq = 0;
     int mSamplesToExitCountBb = 0;
@@ -129,17 +118,17 @@
     int mBassStrengthSaved = 0;
     // Equalizer
     int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
-    std::array<int, lvm::MAX_NUM_BANDS> mBandGainMdB; /* band gain in millibels */
+    std::array<int, lvm::MAX_NUM_BANDS> mBandGainmB; /* band gain in millibels */
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
     ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
     // Volume
-    float mLevelSaved = 0; /* for when mute is set, level must be saved */
-    float mVolume = 0;
+    float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
+    float mVolumedB = 0;
     bool mMuteEnabled = false; /* Must store as mute = -96dB level */
 
-    void initControlParameter(LVM_ControlParams_t& params) const;
+    RetCode initControlParameter(LVM_ControlParams_t& params) const;
     void initHeadroomParameter(LVM_HeadroomParams_t& params) const;
     RetCode limitLevel();
     static float VolToDb(float vol);
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 143329d..daabdb7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -129,8 +129,7 @@
                    .implementor = "NXP Software Ltd."},
         .capability = kVirtualizerCap};
 
-static const std::vector<Range::VolumeRange> kVolumeRanges = {
-        MAKE_RANGE(Volume, levelDb, -9600, 0)};
+static const std::vector<Range::VolumeRange> kVolumeRanges = {MAKE_RANGE(Volume, levelDb, -96, 0)};
 static const Capability kVolumeCap = {.range = kVolumeRanges};
 static const std::string kVolumeEffectName = "Volume";
 static const Descriptor kVolumeDesc = {
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 3148d36..755f57c 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -425,10 +425,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectBundleAidl::getContext() {
-    return mContext;
-}
-
 RetCode EffectBundleAidl::releaseContext() {
     if (mContext) {
         GlobalSession::getGlobalSession().releaseSession(mType, mContext->getSessionId());
@@ -462,7 +458,7 @@
 IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index ec1abe8..429e941 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -36,41 +36,47 @@
     ~EffectBundleAidl() override;
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterCommon(const Parameter& param) REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<BundleContext> mContext;
+    std::shared_ptr<BundleContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     lvm::BundleEffectType mType = lvm::BundleEffectType::EQUALIZER;
 
     IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
 
-    ndk::ScopedAStatus setParameterBassBoost(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Id& id,
-                                             Parameter::Specific* specific);
+    ndk::ScopedAStatus setParameterBassBoost(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterEqualizer(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Id& id,
-                                             Parameter::Specific* specific);
-    ndk::ScopedAStatus setParameterVolume(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterVolume(const Volume::Id& id, Parameter::Specific* specific);
-    ndk::ScopedAStatus setParameterVirtualizer(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterEqualizer(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus setParameterVolume(const Parameter::Specific& specific) REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterVolume(const Volume::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus setParameterVirtualizer(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterVirtualizer(const Virtualizer::Id& id,
-                                               Parameter::Specific* specific);
+                                               Parameter::Specific* specific) REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index fa300d2..781aad6 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,6 @@
 // music bundle wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_wrapper_license",
     ],
@@ -110,9 +111,7 @@
     ],
     static_libs: ["libmusicbundle"],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["Aidl"],
     header_libs: [
@@ -120,14 +119,18 @@
         "libhardware_headers",
     ],
     shared_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudioutils",
         "liblog",
+        "libstagefright_foundation",
     ],
     cflags: [
         "-Wthread-safety",
+        "-DBACKEND_NDK",
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
 
@@ -140,9 +143,7 @@
     ],
     static_libs: ["libreverb"],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["Reverb/aidl"],
     header_libs: [
@@ -160,6 +161,6 @@
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index b49d109..c714bc9 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -358,10 +358,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectReverb::getContext() {
-    return mContext;
-}
-
 RetCode EffectReverb::releaseContext() {
     if (mContext) {
         mContext.reset();
@@ -394,7 +390,7 @@
 IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
index d7d2bbd..e0771a1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -30,35 +30,41 @@
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
 
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<ReverbContext> mContext;
+    std::shared_ptr<ReverbContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     lvm::ReverbEffectType mType;
 
     IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
 
-    ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterPresetReverb(const PresetReverb::Id& id,
-                                                Parameter::Specific* specific);
+                                                Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
-                                                       Parameter::Specific* specific);
+                                                       Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 79e67f2..1c66c78 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -329,7 +329,7 @@
  */
 
 int ReverbContext::convertLevel(int level) {
-    for (int i = 0; i < kLevelMapping.size(); i++) {
+    for (std::size_t i = 0; i < kLevelMapping.size(); i++) {
         if (level <= kLevelMapping[i]) {
             return i;
         }
@@ -352,7 +352,7 @@
     return kDefaultLPF;
 }
 
-IEffect::Status ReverbContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status ReverbContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index d11a081..7d0ccff 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -100,7 +100,7 @@
     }
     bool getReflectionsLevel() const { return mReflectionsLevelMb; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     static constexpr inline float kUnitVolume = 1;
@@ -161,7 +161,7 @@
     std::mutex mMutex;
     const lvm::ReverbEffectType mType;
     bool mEnabled = false;
-    LVREV_Handle_t mInstance GUARDED_BY(mMutex);
+    LVREV_Handle_t mInstance GUARDED_BY(mMutex) = LVM_NULL;
 
     int mRoomLevel = 0;
     int mRoomHfLevel = 0;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index d018c47..d658536 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_preprocessing_license",
     ],
@@ -67,9 +68,7 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["aidl"],
     shared_libs: [
@@ -91,6 +90,6 @@
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index e8ae8b3..1675d97 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -412,10 +412,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectPreProcessing::getContext() {
-    return mContext;
-}
-
 RetCode EffectPreProcessing::releaseContext() {
     if (mContext) {
         PreProcessingSession::getPreProcessingSession().releaseSession(mType,
@@ -450,7 +446,7 @@
 IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
index fad848a..9ce5597 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -31,41 +31,51 @@
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
 
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<PreProcessingContext> mContext;
+    std::shared_ptr<PreProcessingContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     PreProcessingEffectType mType;
 
-    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Id& id,
-                                                        Parameter::Specific* specific);
+                                                        Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Id& id,
-                                                          Parameter::Specific* specific);
+                                                          Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Id& id,
-                                                          Parameter::Specific* specific);
+                                                          Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterNoiseSuppression(const NoiseSuppression::Id& id,
-                                                    Parameter::Specific* specific);
+                                                    Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index c1e4eda..6f671f0 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -141,6 +141,9 @@
 }
 
 RetCode PreProcessingContext::setCommon(const Parameter::Common& common) {
+    if(auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
+        return ret;
+    }
     mCommon = common;
     updateConfigs(common);
     return RetCode::SUCCESS;
@@ -265,7 +268,7 @@
     return mLevel;
 }
 
-IEffect::Status PreProcessingContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status PreProcessingContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 9ba1bbe..811bacf 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -76,7 +76,7 @@
     RetCode setNoiseSuppressionLevel(NoiseSuppression::Level level);
     NoiseSuppression::Level getNoiseSuppressionLevel() const;
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     static constexpr inline int kAgcDefaultTargetLevel = 3;
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index fbbcab4..ca99bf8 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index d80b135..ad8d84d 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing unit test
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
index ab7e468..2d07a9b 100644
--- a/media/libeffects/spatializer/benchmarks/Android.bp
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..ddfcff3 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for spatializer effect
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
 cc_test {
     name: "SpatializerTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     host_supported: false,
     srcs: [
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index cf782f7..66ceadf 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -60,8 +60,6 @@
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
         "visualizer_defaults",
     ],
     cflags: [
@@ -72,6 +70,6 @@
     ],
     relative_install_path: "soundfx",
     visibility: [
-        "//hardware/interfaces/audio/aidl/default",
+        "//hardware/interfaces/audio/aidl/default:__subpackages__",
     ],
 }
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 0303842..9b1bac6 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -59,10 +59,11 @@
 const std::string VisualizerImpl::kEffectName = "Visualizer";
 const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
         MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
-        MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+        MAKE_RANGE(Visualizer, captureSamples, VisualizerContext::kMinCaptureBufSize,
+                   VisualizerContext::kMaxCaptureBufSize),
         /* get only parameters, set invalid range (min > max) to indicate not support set */
-        MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.peak = 1, .rms = 1}),
-                   Visualizer::Measurement({.peak = 0, .rms = 0})),
+        MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.rms = 1, .peak = 1}),
+                   Visualizer::Measurement({.rms = 0, .peak = 0})),
         MAKE_RANGE(Visualizer, captureSampleBuffer, std::vector<uint8_t>({1}),
                    std::vector<uint8_t>({0}))};
 const Capability VisualizerImpl::kCapability = {
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index ec725db..b48c85e 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -35,23 +35,25 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
     static const std::vector<Range::VisualizerRange> kRanges;
-    std::shared_ptr<VisualizerContext> mContext;
+    std::shared_ptr<VisualizerContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterVisualizer(const Visualizer::Tag& tag,
-                                                    Parameter::Specific* specific);
+                                              Parameter::Specific* specific) REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d2bb3a..c763b1a 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -93,7 +93,7 @@
     mCaptureSamples = samples;
     return RetCode::SUCCESS;
 }
-int VisualizerContext::getCaptureSamples() {
+int32_t VisualizerContext::getCaptureSamples() {
     std::lock_guard lg(mMutex);
     return mCaptureSamples;
 }
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 958035f..b03e038 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -18,6 +18,7 @@
 
 #include <android-base/thread_annotations.h>
 #include <audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_visualizer.h>
 
 #include "effect-impl/EffectContext.h"
 
@@ -25,8 +26,11 @@
 
 class VisualizerContext final : public EffectContext {
   public:
-    static const uint32_t kMaxCaptureBufSize = 65536;
-    static const uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
+    // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and
+    // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft.
+    static constexpr int32_t kMinCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MIN;
+    static constexpr int32_t kMaxCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MAX;
+    static constexpr uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
 
     VisualizerContext(int statusDepth, const Parameter::Common& common);
     ~VisualizerContext();
@@ -38,8 +42,8 @@
     // keep all parameters and reset buffer.
     void reset();
 
-    RetCode setCaptureSamples(int captureSize);
-    int getCaptureSamples();
+    RetCode setCaptureSamples(int32_t captureSize);
+    int32_t getCaptureSamples();
     RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
     Visualizer::MeasurementMode getMeasurementMode();
     RetCode setScalingMode(Visualizer::ScalingMode mode);
@@ -86,7 +90,7 @@
     // capture buf with 8 bits mono PCM samples
     std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
     uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
-    uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+    int32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
 
     // to avoid recomputing it every time a buffer is processed
     uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9955862..70a242d 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,18 +12,18 @@
     name: "libheadtracking",
     host_supported: true,
     srcs: [
-      "HeadTrackingProcessor.cpp",
-      "ModeSelector.cpp",
-      "Pose.cpp",
-      "PoseBias.cpp",
-      "PoseDriftCompensator.cpp",
-      "PosePredictor.cpp",
-      "PoseRateLimiter.cpp",
-      "QuaternionUtil.cpp",
-      "ScreenHeadFusion.cpp",
-      "StillnessDetector.cpp",
-      "Twist.cpp",
-      "VectorRecorder.cpp",
+        "HeadTrackingProcessor.cpp",
+        "ModeSelector.cpp",
+        "Pose.cpp",
+        "PoseBias.cpp",
+        "PoseDriftCompensator.cpp",
+        "PosePredictor.cpp",
+        "PoseRateLimiter.cpp",
+        "QuaternionUtil.cpp",
+        "ScreenHeadFusion.cpp",
+        "StillnessDetector.cpp",
+        "Twist.cpp",
+        "VectorRecorder.cpp",
     ],
     shared_libs: [
         "libaudioutils",
@@ -51,7 +52,7 @@
 cc_library {
     name: "libheadtracking-binding",
     srcs: [
-      "SensorPoseProvider.cpp",
+        "SensorPoseProvider.cpp",
     ],
     shared_libs: [
         "libbase",
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 085a7e4..ee4075f 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -32,6 +32,7 @@
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/RefBase.h>
+#include <algorithm>
 #include <vector>
 
 HeifDecoder* createHeifDecoder() {
@@ -42,7 +43,10 @@
 
 void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
     info->mWidth = videoFrame->mDisplayWidth;
-    info->mHeight = videoFrame->mDisplayHeight;
+    // Number of scanlines is mDisplayHeight. Clamp it to mHeight to guard
+    // against malformed streams claiming that mDisplayHeight is greater than
+    // mHeight.
+    info->mHeight = std::min(videoFrame->mDisplayHeight, videoFrame->mHeight);
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
@@ -746,7 +750,9 @@
                    (videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
                    (videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
     mCurScanline++;
-    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
+    // Do not try to copy more than |videoFrame->mWidth| pixels.
+    uint32_t width = std::min(videoFrame->mDisplayWidth, videoFrame->mWidth);
+    memcpy(dst, src, videoFrame->mBytesPerPixel * width);
     return true;
 }
 
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index e33cc0f..4436fb9 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -198,7 +198,9 @@
             ALOGV("@@@ checking %s", name);
             const char *s = mValues.getEntry(i);
             int32_t inputLength = strlen(s);
-            const char *enc;
+            // Use encoding determined from the combination of artist/album/title etc.
+            // as default if there is no better match found.
+            const char *enc = combinedenc;
 
             if (!allprintable && (!strcmp(name, "artist") ||
                     !strcmp(name, "albumartist") ||
@@ -216,13 +218,12 @@
                     const UCharsetMatch** ucma = ucsdet_detectAll(csd, &matches, &status);
                     const UCharsetMatch* bestSingleMatch = getPreferred(s, inputLength,
                             ucma, matches, &goodmatchSingle, &highestSingle);
-                    if (goodmatchSingle || highestSingle > highest)
-                        enc = ucsdet_getName(bestSingleMatch, &status);
-                    else
-                        enc = combinedenc;
-                } else {
-                    // use encoding determined from the combination of artist/album/title etc.
-                    enc = combinedenc;
+                    // getPreferred could return a null. Check for null before calling
+                    // ucsdet_getName.
+                    if (bestSingleMatch != NULL) {
+                        if (goodmatchSingle || highestSingle > highest)
+                            enc = ucsdet_getName(bestSingleMatch, &status);
+                    }
                 }
             } else {
                 if (isPrintableAscii(s, inputLength)) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89348a4..3ab32f0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,9 +111,12 @@
 // To collect the encoder usage for the battery app
 static void addBatteryData(uint32_t params) {
     sp<IBinder> binder =
-        defaultServiceManager()->getService(String16("media.player"));
+        defaultServiceManager()->waitForService(String16("media.player"));
     sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
-    CHECK(service.get() != NULL);
+    if (service.get() == nullptr) {
+        ALOGE("%s: Failed to get media.player service", __func__);
+        return;
+    }
 
     service->addBatteryData(params);
 }
@@ -1453,29 +1456,44 @@
 }
 
 status_t StagefrightRecorder::setupAACRecording() {
-    // FIXME:
-    // Add support for OUTPUT_FORMAT_AAC_ADIF
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
+    // TODO(b/324512842): Add support for OUTPUT_FORMAT_AAC_ADIF
+    if (mOutputFormat != OUTPUT_FORMAT_AAC_ADTS) {
+        ALOGE("Invalid output format %d used for AAC recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
-    CHECK(mAudioEncoder == AUDIO_ENCODER_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_HE_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_AAC_ELD);
-    CHECK(mAudioSource != AUDIO_SOURCE_CNT);
+    if (mAudioEncoder != AUDIO_ENCODER_AAC
+            && mAudioEncoder != AUDIO_ENCODER_HE_AAC
+            && mAudioEncoder != AUDIO_ENCODER_AAC_ELD) {
+        ALOGE("Invalid encoder %d used for AAC recording", mAudioEncoder);
+        return BAD_VALUE;
+    }
+
+    if (mAudioSource == AUDIO_SOURCE_CNT) {
+        ALOGE("Audio source hasn't been set correctly");
+        return BAD_VALUE;
+    }
 
     mWriter = new AACWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupOggRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_OGG);
+    if (mOutputFormat != OUTPUT_FORMAT_OGG) {
+        ALOGE("Invalid output format %d used for OGG recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     mWriter = new OggWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupAMRRecording() {
-    CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
-          mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+    if (mOutputFormat != OUTPUT_FORMAT_AMR_NB
+            && mOutputFormat != OUTPUT_FORMAT_AMR_WB) {
+        ALOGE("Invalid output format %d used for AMR recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if (mOutputFormat == OUTPUT_FORMAT_AMR_NB) {
         if (mAudioEncoder != AUDIO_ENCODER_DEFAULT &&
@@ -1528,7 +1546,10 @@
 }
 
 status_t StagefrightRecorder::setupRTPRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
+    if (mOutputFormat != OUTPUT_FORMAT_RTP_AVP) {
+        ALOGE("Invalid output format %d used for RTP recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if ((mAudioSource != AUDIO_SOURCE_CNT
                 && mVideoSource != VIDEO_SOURCE_LIST_END)
@@ -1571,7 +1592,10 @@
 }
 
 status_t StagefrightRecorder::setupMPEG2TSRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+    if (mOutputFormat != OUTPUT_FORMAT_MPEG2TS) {
+        ALOGE("Invalid output format %d used for MPEG2TS recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
 
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index b511372..74b0a85 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -110,6 +110,17 @@
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
+        "libcameraservice",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V3-ndk",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.4",
+        "libaudiohal@7.0",
     ],
     header_libs: [
         "libaudiohal_headers",
@@ -124,14 +135,19 @@
     ],
     defaults: [
         "libmediaplayerserviceFuzzer_defaults",
+        "libmediaplayerservice_defaults",
     ],
     static_libs: [
         "libplayerservice_datasource",
     ],
     shared_libs: [
+        "libmediaplayerservice",
         "libdatasource",
         "libdrmframework",
+        "libstagefright_httplive",
+        "libmediaextractorservice",
     ],
+    include_dirs: ["frameworks/av/services/mediaextractor"],
 }
 
 cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index fdac1a1..2518c21 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -15,22 +15,22 @@
  *
  */
 
-#include <media/stagefright/foundation/AString.h>
-#include "fuzzer/FuzzedDataProvider.h"
-
 #include <AudioFlinger.h>
 #include <MediaPlayerService.h>
 #include <ResourceManagerService.h>
-#include <fakeservicemanager/FakeServiceManager.h>
 #include <StagefrightRecorder.h>
 #include <camera/Camera.h>
 #include <camera/android/hardware/ICamera.h>
+#include <fakeservicemanager/FakeServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/foundation/AString.h>
 #include <mediametricsservice/MediaMetricsService.h>
 #include <thread>
+#include "CameraService.h"
+#include "fuzzer/FuzzedDataProvider.h"
 
 using namespace std;
 using namespace android;
@@ -46,32 +46,27 @@
     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_SOURCE_VOICE_COMMUNICATION,
     AUDIO_SOURCE_REMOTE_SUBMIX,     AUDIO_SOURCE_UNPROCESSED,
     AUDIO_SOURCE_VOICE_PERFORMANCE, AUDIO_SOURCE_ECHO_REFERENCE,
-    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD};
+    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD,
+    AUDIO_SOURCE_ULTRASOUND};
+
+constexpr output_format kOutputFormat[] = {
+        OUTPUT_FORMAT_DEFAULT,        OUTPUT_FORMAT_THREE_GPP,
+        OUTPUT_FORMAT_MPEG_4,         OUTPUT_FORMAT_AUDIO_ONLY_START,
+        OUTPUT_FORMAT_RAW_AMR,        OUTPUT_FORMAT_AMR_NB,
+        OUTPUT_FORMAT_AMR_WB,         OUTPUT_FORMAT_AAC_ADTS,
+        OUTPUT_FORMAT_AUDIO_ONLY_END, OUTPUT_FORMAT_RTP_AVP,
+        OUTPUT_FORMAT_MPEG2TS,        OUTPUT_FORMAT_WEBM,
+        OUTPUT_FORMAT_HEIF,           OUTPUT_FORMAT_OGG,
+        OUTPUT_FORMAT_LIST_END};
+
+constexpr video_encoder kVideoEncoder[] = {
+        VIDEO_ENCODER_DEFAULT,      VIDEO_ENCODER_H263, VIDEO_ENCODER_H264,
+        VIDEO_ENCODER_MPEG_4_SP,    VIDEO_ENCODER_VP8,  VIDEO_ENCODER_HEVC,
+        VIDEO_ENCODER_DOLBY_VISION, VIDEO_ENCODER_AV1,  VIDEO_ENCODER_LIST_END};
 
 constexpr audio_microphone_direction_t kSupportedMicrophoneDirections[] = {
     MIC_DIRECTION_UNSPECIFIED, MIC_DIRECTION_FRONT, MIC_DIRECTION_BACK, MIC_DIRECTION_EXTERNAL};
 
-struct RecordingConfig {
-    output_format outputFormat;
-    audio_encoder audioEncoder;
-    video_encoder videoEncoder;
-};
-
-const struct RecordingConfig kRecordingConfigList[] = {
-    {OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_HE_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC_ELD, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_RTP_AVP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_MPEG2TS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_WEBM, AUDIO_ENCODER_VORBIS, VIDEO_ENCODER_VP8},
-    {OUTPUT_FORMAT_THREE_GPP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_HEVC}};
-
 const string kParametersList[] = {"max-duration",
                                   "max-filesize",
                                   "interleave-duration-us",
@@ -104,14 +99,16 @@
                                   "rtp-param-ext-cvo-degrees",
                                   "video-param-request-i-frame",
                                   "rtp-param-set-socket-dscp",
-                                  "rtp-param-set-socket-network"};
+                                  "rtp-param-set-socket-network",
+                                  "rtp-param-set-socket-ecn",
+                                  "rtp-param-remote-ip",
+                                  "rtp-param-set-socket-network",
+                                  "log-session-id"};
 
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
 constexpr int32_t kMinVideoSize = 2;
 constexpr int32_t kMaxVideoSize = 8192;
-constexpr int32_t kNumRecordMin = 1;
-constexpr int32_t kNumRecordMax = 10;
+const char kOutputFile[] = "OutputFile";
+const char kNextOutputFile[] = "NextOutputFile";
 
 class TestAudioDeviceCallback : public AudioSystem::AudioDeviceCallback {
    public:
@@ -194,8 +191,7 @@
     int32_t max;
     mStfRecorder->getMaxAmplitude(&max);
 
-    int32_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mStfRecorder->setInputDevice(deviceId);
+    int32_t deviceId;
     mStfRecorder->getRoutedDeviceId(&deviceId);
 
     vector<android::media::MicrophoneInfoFw> activeMicrophones{};
@@ -213,101 +209,189 @@
     sp<IGraphicBufferProducer> buffer = mStfRecorder->querySurfaceMediaSource();
 }
 
-void MediaRecorderClientFuzzer::dumpInfo() {
-    int32_t dumpFd = memfd_create("DumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mStfRecorder->dump(dumpFd, args);
-    close(dumpFd);
-}
-
-void MediaRecorderClientFuzzer::setConfig() {
-    mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
-    mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
-    mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
-    mStfRecorder->setPreferredMicrophoneDirection(
-        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
-    mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool());
-    bool isPrivacySensitive;
-    mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
-    mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize),
-                               mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize));
-    mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>());
-    mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool());
-    mStfRecorder->setPreferredMicrophoneFieldDimension(mFdp.ConsumeFloatingPoint<float>());
-    mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
-
-    int32_t Idx = mFdp.ConsumeIntegralInRange<int32_t>(0, size(kRecordingConfigList) - 1);
-    mStfRecorder->setOutputFormat(kRecordingConfigList[Idx].outputFormat);
-    mStfRecorder->setAudioEncoder(kRecordingConfigList[Idx].audioEncoder);
-    mStfRecorder->setVideoEncoder(kRecordingConfigList[Idx].videoEncoder);
-
-    int32_t nextOutputFd = memfd_create("NextOutputFile", MFD_ALLOW_SEALING);
-    mStfRecorder->setNextOutputFile(nextOutputFd);
-    close(nextOutputFd);
-
-    for (Idx = 0; Idx < size(kParametersList); ++Idx) {
-        if (mFdp.ConsumeBool()) {
-            int32_t value = mFdp.ConsumeIntegral<int32_t>();
-            mStfRecorder->setParameters(
-                String8((kParametersList[Idx] + "=" + to_string(value)).c_str()));
-        }
+template <typename FuncWrapper>
+void callMediaAPI(FuncWrapper funcWrapper, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        funcWrapper();
     }
 }
 
-MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t *data, size_t size)
-    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create("OutputFile", MFD_ALLOW_SEALING)) {
+void MediaRecorderClientFuzzer::setConfig() {
+    callMediaAPI(
+            [this]() {
+                mSurfaceControl = mComposerClient.createSurface(
+                        String8(mFdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* width */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* height */,
+                        mFdp.ConsumeIntegral<int32_t>() /* pixel-format */,
+                        mFdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (mSurfaceControl) {
+                    mSurface = mSurfaceControl->getSurface();
+                    mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
+                }
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setInputDevice(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
+                mStfRecorder->setListener(listener);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestCamera> testCamera = sp<TestCamera>::make();
+                sp<Camera> camera = Camera::create(testCamera);
+                mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
+                mStfRecorder->setInputSurface(persistentSurface);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
+                mStfRecorder->setAudioDeviceCallback(callback);
+                mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneDirection(
+                        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                bool isPrivacySensitive;
+                mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* width */,
+                                           mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* height */);
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneFieldDimension(
+                        mFdp.ConsumeFloatingPoint<float>());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                output_format OutputFormat = mFdp.PickValueInArray(kOutputFormat);
+                audio_encoder AudioEncoderFormat =
+                        (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(AUDIO_ENCODER_DEFAULT,
+                                                                            AUDIO_ENCODER_LIST_END);
+                video_encoder VideoEncoderFormat = mFdp.PickValueInArray(kVideoEncoder);
+                if (OutputFormat == OUTPUT_FORMAT_AMR_NB) {
+                    AudioEncoderFormat =
+                            mFdp.ConsumeBool() ? AUDIO_ENCODER_DEFAULT : AUDIO_ENCODER_AMR_NB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AMR_WB) {
+                    AudioEncoderFormat = AUDIO_ENCODER_AMR_WB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AAC_ADIF ||
+                           OutputFormat == OUTPUT_FORMAT_AAC_ADTS ||
+                           OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                    AudioEncoderFormat = (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_ENCODER_AAC, AUDIO_ENCODER_AAC_ELD);
+                    if (OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                        VideoEncoderFormat = VIDEO_ENCODER_H264;
+                    }
+                }
+                mStfRecorder->setOutputFormat(OutputFormat);
+                mStfRecorder->setAudioEncoder(AudioEncoderFormat);
+                mStfRecorder->setVideoEncoder(VideoEncoderFormat);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                int32_t nextOutputFd = memfd_create(kNextOutputFile, MFD_ALLOW_SEALING);
+                mStfRecorder->setNextOutputFile(nextOutputFd);
+                close(nextOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                for (int32_t idx = 0; idx < size(kParametersList); ++idx) {
+                    if (mFdp.ConsumeBool()) {
+                        int32_t value = mFdp.ConsumeIntegral<int32_t>();
+                        mStfRecorder->setParameters(
+                                String8((kParametersList[idx] + "=" + to_string(value)).c_str()));
+                    }
+                }
+            },
+            &mFdp);
+}
+
+MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t* data, size_t size)
+    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create(kOutputFile, MFD_ALLOW_SEALING)) {
     AttributionSourceState attributionSource;
     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
     attributionSource.token = sp<BBinder>::make();
     mStfRecorder = make_unique<StagefrightRecorder>(attributionSource);
-
-    mSurfaceControl = mComposerClient.createSurface(
-        String8(mFdp.ConsumeRandomLengthString().c_str()), mFdp.ConsumeIntegral<uint32_t>(),
-        mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeIntegral<int32_t>(),
-        mFdp.ConsumeIntegral<int32_t>());
-    if (mSurfaceControl) {
-        mSurface = mSurfaceControl->getSurface();
-        mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
-    }
-
-    sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
-    mStfRecorder->setListener(listener);
-
-    sp<TestCamera> testCamera = sp<TestCamera>::make();
-    sp<Camera> camera = Camera::create(testCamera);
-    mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
-
-    sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
-    mStfRecorder->setInputSurface(persistentSurface);
-
-    sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
-    mStfRecorder->setAudioDeviceCallback(callback);
 }
 
 void MediaRecorderClientFuzzer::process() {
-    setConfig();
-
     mStfRecorder->init();
     mStfRecorder->prepare();
-    size_t numRecord = mFdp.ConsumeIntegralInRange<size_t>(kNumRecordMin, kNumRecordMax);
-    for (size_t Idx = 0; Idx < numRecord; ++Idx) {
-        mStfRecorder->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->resume();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->stop();
+    while (mFdp.remaining_bytes()) {
+        auto invokeMediaPLayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { setConfig(); },
+                [&]() { mStfRecorder->start(); },
+                [&]() { mStfRecorder->pause(); },
+                [&]() { mStfRecorder->resume(); },
+                [&]() { mStfRecorder->stop(); },
+                [&]() { getConfig(); },
+                [&]() { mStfRecorder->close(); },
+                [&]() { mStfRecorder->reset(); },
+        });
+        invokeMediaPLayerApi();
     }
-    dumpInfo();
-    getConfig();
-
-    mStfRecorder->close();
-    mStfRecorder->reset();
 }
 
 extern "C" int LLVMFuzzerInitialize(int /* *argc */, char /* ***argv */) {
@@ -320,6 +404,7 @@
     MediaPlayerService::instantiate();
     AudioFlinger::instantiate();
     ResourceManagerService::instantiate();
+    CameraService::instantiate();
     fakeServiceManager->addService(String16(MediaMetricsService::kServiceName),
                                     new MediaMetricsService());
     return 0;
diff --git a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
index a7cb689..857223d 100644
--- a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
@@ -15,6 +15,8 @@
  *
  */
 
+#include <MediaExtractorService.h>
+#include <MediaPlayerService.h>
 #include <StagefrightMetadataRetriever.h>
 #include <binder/ProcessState.h>
 #include <datasource/FileSource.h>
@@ -54,58 +56,96 @@
                             MEDIA_MIMETYPE_CONTAINER_MPEG2PS,  MEDIA_MIMETYPE_CONTAINER_HEIF,
                             MEDIA_MIMETYPE_TEXT_3GPP,          MEDIA_MIMETYPE_TEXT_SUBRIP,
                             MEDIA_MIMETYPE_TEXT_VTT,           MEDIA_MIMETYPE_TEXT_CEA_608,
-                            MEDIA_MIMETYPE_TEXT_CEA_708,       MEDIA_MIMETYPE_DATA_TIMED_ID3};
+                            MEDIA_MIMETYPE_TEXT_CEA_708,       MEDIA_MIMETYPE_DATA_TIMED_ID3,
+                            MEDIA_MIMETYPE_IMAGE_AVIF,         MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,   MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,  MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,  MEDIA_MIMETYPE_AUDIO_DTS,
+                            MEDIA_MIMETYPE_AUDIO_DTS_HD,       MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+                            MEDIA_MIMETYPE_AUDIO_DTS_UHD,      MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+                            MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,   MEDIA_MIMETYPE_AUDIO_EVRC,
+                            MEDIA_MIMETYPE_AUDIO_EVRCB,        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+                            MEDIA_MIMETYPE_AUDIO_EVRCNW,       MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+                            MEDIA_MIMETYPE_AUDIO_APTX,         MEDIA_MIMETYPE_AUDIO_DRA,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,    MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_MAIN,     MEDIA_MIMETYPE_AUDIO_AAC_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_SSR,      MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+                            MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,    MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ERLC,     MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+                            MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,    MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_XHE,      MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+                            MEDIA_MIMETYPE_AUDIO_AAC_LD,       MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADIF,     MEDIA_MIMETYPE_AUDIO_IEC60958,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ELD,      MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE, MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD, MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+                            MEDIA_MIMETYPE_AUDIO_IEC61937,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,};
+
+constexpr size_t kMaxSize = 100;
 
 class MetadataRetrieverFuzzer {
    public:
     MetadataRetrieverFuzzer(const uint8_t *data, size_t size)
-        : mFdp(data, size),
-          mMdRetriever(new StagefrightMetadataRetriever()),
-          mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)) {}
-    ~MetadataRetrieverFuzzer() { close(mDataSourceFd); }
+        : mFdp(data, size), mMdRetriever(new StagefrightMetadataRetriever()) {}
     bool setDataSource(const uint8_t *data, size_t size);
     void getData();
 
    private:
     FuzzedDataProvider mFdp;
     sp<StagefrightMetadataRetriever> mMdRetriever = nullptr;
-    const int32_t mDataSourceFd;
+    int32_t mDataSourceFd;
 };
 
 void MetadataRetrieverFuzzer::getData() {
-    int64_t timeUs = mFdp.ConsumeIntegral<int64_t>();
-    int32_t option = mFdp.ConsumeIntegral<int32_t>();
-    int32_t colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    bool metaOnly = mFdp.ConsumeBool();
-    mMdRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
-
-    int32_t index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    metaOnly = mFdp.ConsumeBool();
-    bool thumbnail = mFdp.ConsumeBool();
-    mMdRetriever->getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
-
-    index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    int32_t left = mFdp.ConsumeIntegral<int32_t>();
-    int32_t top = mFdp.ConsumeIntegral<int32_t>();
-    int32_t right = mFdp.ConsumeIntegral<int32_t>();
-    int32_t bottom = mFdp.ConsumeIntegral<int32_t>();
-    mMdRetriever->getImageRectAtIndex(index, colorFormat, left, top, right, bottom);
-
-    index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    metaOnly = mFdp.ConsumeBool();
-    mMdRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
-
-    mMdRetriever->extractAlbumArt();
-
-    int32_t keyCode = mFdp.ConsumeIntegral<int32_t>();
-    mMdRetriever->extractMetadata(keyCode);
+    while (mFdp.remaining_bytes()) {
+        auto invokeMediaApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    mMdRetriever->getFrameAtTime(mFdp.ConsumeIntegral<int64_t>() /* timeUs */,
+                                                 mFdp.ConsumeIntegral<int32_t>() /* option */,
+                                                 mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                 mFdp.ConsumeBool() /* metaOnly */);
+                },
+                [&]() {
+                    mMdRetriever->getImageAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+                                                  mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                  mFdp.ConsumeBool() /* metaOnly */,
+                                                  mFdp.ConsumeBool() /* thumbnail */);
+                },
+                [&]() {
+                    mMdRetriever->getImageRectAtIndex(
+                            mFdp.ConsumeIntegral<int32_t>() /* index */,
+                            mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                            mFdp.ConsumeIntegral<int32_t>() /* left */,
+                            mFdp.ConsumeIntegral<int32_t>() /* top */,
+                            mFdp.ConsumeIntegral<int32_t>() /* right */,
+                            mFdp.ConsumeIntegral<int32_t>() /* bottom */);
+                },
+                [&]() {
+                    mMdRetriever->getFrameAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+                                                  mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                  mFdp.ConsumeBool() /* metaOnly */);
+                },
+                [&]() { mMdRetriever->extractAlbumArt(); },
+                [&]() {
+                    mMdRetriever->extractMetadata(mFdp.ConsumeIntegral<int32_t>() /* keyCode */);
+                },
+        });
+        invokeMediaApi();
+    }
 }
 
 bool MetadataRetrieverFuzzer::setDataSource(const uint8_t *data, size_t size) {
     status_t status = -1;
+    std::unique_ptr<std::FILE, decltype(&fclose)> fp(tmpfile(), &fclose);
+    mDataSourceFd = fileno(fp.get());
+    if (mDataSourceFd < 0) {
+        return false;
+    }
 
     enum DataSourceChoice {FromHttp, FromFd, FromFileSource, kMaxValue = FromFileSource};
     switch (mFdp.ConsumeEnum<DataSourceChoice>()) {
@@ -114,7 +154,7 @@
             mHeaders.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
                          String8(mFdp.ConsumeRandomLengthString().c_str()));
 
-            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, min(kMaxSize,size));
             vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
 
             string uri("data:");
@@ -146,6 +186,12 @@
     return true;
 }
 
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    MediaPlayerService::instantiate();
+    MediaExtractorService::instantiate();
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     MetadataRetrieverFuzzer mrtFuzzer(data, size);
     ProcessState::self()->startThreadPool();
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index d58619f..157ebd7 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -32,7 +32,7 @@
     // negotiate with source
     NBAIO_Format counterOffers[1];
     size_t numCounterOffers = 1;
-    ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
+    [[maybe_unused]] ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
     ALOG_ASSERT(index == (ssize_t) NEGOTIATE && numCounterOffers > 0);
     numCounterOffers = 0;
     index = source->negotiate(counterOffers, 1, NULL, numCounterOffers);
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index 6e48078..486a34f 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2145dd9..f9ceef2 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -2188,7 +2188,7 @@
                 int32_t colorFormat = OMX_COLOR_FormatUnused;
                 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
                 if (!outputFormat->findInt32("color-format", &colorFormat)) {
-                    ALOGE("ouptut port did not have a color format (wrong domain?)");
+                    ALOGE("output port did not have a color format (wrong domain?)");
                     return BAD_VALUE;
                 }
                 ALOGD("[%s] Requested output format %#x and got %#x.",
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 67ab188..6e2eef3 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -137,7 +137,7 @@
             cflags: [
                 "-DDISABLE_AUDIO_SYSTEM_OFFLOAD",
             ],
-        }
+        },
     },
 }
 
@@ -276,7 +276,7 @@
         "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
         "VideoRenderQualityTracker.cpp",
-      ],
+    ],
 
     shared_libs: [
         "libstagefright_framecapture_utils",
@@ -318,6 +318,7 @@
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
         "server_configurable_flags",
+        "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
@@ -333,7 +334,7 @@
         "libmedia_ndkformatpriv",
     ],
 
-    header_libs:[
+    header_libs: [
         "libmediadrm_headers",
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 8b5c8ed..0fc78ec 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -30,6 +30,36 @@
 
 namespace android {
 
+CryptoAsync::CryptoAsyncInfo::CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info) {
+    if (info == nullptr) {
+        return;
+    }
+    size_t key_len = (info->mKey != nullptr)? 16 : 0;
+    size_t iv_len = (info->mIv != nullptr)? 16 : 0;
+    mNumSubSamples = info->mNumSubSamples;
+    mMode = info->mMode;
+    mPattern = info->mPattern;
+    if (key_len > 0) {
+        mKeyBuffer = ABuffer::CreateAsCopy((void*)info->mKey, key_len);
+        mKey = (uint8_t*)(mKeyBuffer.get() != nullptr ? mKeyBuffer.get()->data() : nullptr);
+    }
+    if (iv_len > 0) {
+        mIvBuffer = ABuffer::CreateAsCopy((void*)info->mIv, iv_len);
+        mIv = (uint8_t*)(mIvBuffer.get() != nullptr ? mIvBuffer.get()->data() : nullptr);
+    }
+    mSubSamplesBuffer =
+        new ABuffer(sizeof(CryptoPlugin::SubSample) * mNumSubSamples);
+    if (mSubSamplesBuffer.get()) {
+        CryptoPlugin::SubSample * samples =
+           (CryptoPlugin::SubSample *)(mSubSamplesBuffer.get()->data());
+        for (int s = 0 ; s < mNumSubSamples ; s++) {
+            samples[s].mNumBytesOfClearData = info->mSubSamples[s].mNumBytesOfClearData;
+            samples[s].mNumBytesOfEncryptedData = info->mSubSamples[s].mNumBytesOfEncryptedData;
+        }
+        mSubSamples = (CryptoPlugin::SubSample *)mSubSamplesBuffer.get()->data();
+    }
+}
+
 CryptoAsync::~CryptoAsync() {
 }
 
@@ -79,23 +109,27 @@
     sp<ABuffer> keyBuffer;
     sp<ABuffer> ivBuffer;
     sp<ABuffer> subSamplesBuffer;
-    msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
-    msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
-    msg->findBuffer("key", &keyBuffer);
-    msg->findBuffer("iv", &ivBuffer);
-    msg->findBuffer("subSamples", &subSamplesBuffer);
-    msg->findInt32("secure", &secure);
-    msg->findSize("numSubSamples", &numSubSamples);
-    msg->findObject("buffer", &obj);
-    msg->findInt32("mode", (int32_t*)&mode);
     AString errorDetailMsg;
-    const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
-    const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
-    const CryptoPlugin::SubSample * subSamples =
-       (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+    msg->findObject("buffer", &obj);
+    msg->findInt32("secure", &secure);
     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-    err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
-        pattern, subSamples, numSubSamples, &errorDetailMsg);
+    if (buffer->meta()->findObject("cryptoInfos", &obj)) {
+        err = channel->queueSecureInputBuffers(buffer, secure, &errorDetailMsg);
+    } else {
+        msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
+        msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
+        msg->findBuffer("key", &keyBuffer);
+        msg->findBuffer("iv", &ivBuffer);
+        msg->findBuffer("subSamples", &subSamplesBuffer);
+        msg->findSize("numSubSamples", &numSubSamples);
+        msg->findInt32("mode", (int32_t*)&mode);
+        const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
+        const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
+        const CryptoPlugin::SubSample * subSamples =
+           (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+        err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
+            pattern, subSamples, numSubSamples, &errorDetailMsg);
+    }
     if (err != OK) {
         std::list<sp<AMessage>> errorList;
         msg->removeEntryByName("buffer");
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a0a2891..e918b5e 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -172,6 +172,7 @@
     const char *getTrackType() const;
     void resetInternal();
     int64_t trackMetaDataSize();
+    bool isTimestampValid(int64_t timeUs);
 
 private:
     // A helper class to handle faster write box with table entries
@@ -1639,6 +1640,11 @@
     ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
     if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
         ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+        if (mMaxOffsetAppend > offset) {
+            // This has already been appended, skip updating mOffset value.
+            *bytesWritten = buffer->range_length();
+            return offset;
+        }
         if (old_offset == offset) {
             mOffset += buffer->range_length();
         } else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 46a3587..e79e55b 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -271,6 +271,37 @@
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+
+// Multi access unit helpers
+static status_t generateFlagsFromAccessUnitInfo(
+        sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
+    msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
+    msg->setInt32("flags", bufferInfos->value[0].mFlags);
+    // will prevent any access-unit info copy.
+    if (bufferInfos->value.size() > 1) {
+        uint32_t bufferFlags = 0;
+        uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
+        uint32_t andFlags = flagsInAllAU;
+        int infoIdx = 0;
+        bool foundEndOfStream = false;
+        for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
+            bufferFlags |= bufferInfos->value[infoIdx].mFlags;
+            andFlags &= bufferInfos->value[infoIdx].mFlags;
+            if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+                foundEndOfStream = true;
+            }
+        }
+        bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
+        if (infoIdx != bufferInfos->value.size()) {
+            ALOGE("Error: incorrect access-units");
+            return -EINVAL;
+        }
+        msg->setInt32("flags", bufferFlags);
+    }
+    return OK;
+}
+
 static int64_t getId(IResourceManagerClient const * client) {
     return (int64_t) client;
 }
@@ -318,8 +349,8 @@
                 return Status::fromStatus(STATUS_INVALID_OPERATION);
             }
             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(this)};
+                                        .uid = static_cast<int32_t>(mUid),
+                                        .id = getId(this)};
             service->removeClient(clientInfo);
             *_aidl_return = true;
             return Status::ok();
@@ -396,6 +427,10 @@
         mCodecName = name;
     }
 
+    inline void setImportance(int importance) {
+        mImportance = importance;
+    }
+
 private:
     // To get the binder interface to ResourceManagerService.
     void getService() {
@@ -435,12 +470,30 @@
         mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
     }
 
+    /**
+     * Get the ClientInfo to communicate with the ResourceManager.
+     *
+     * ClientInfo includes:
+     *   - {pid, uid} of the process
+     *   - identifier for the client
+     *   - name of the client/codec
+     *   - importance associated with the client
+     */
+    inline ClientInfoParcel getClientInfo() const {
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                    .uid = static_cast<int32_t>(mUid),
+                                    .id = getId(mClient),
+                                    .name = mCodecName,
+                                    .importance = mImportance};
+        return std::move(clientInfo);
+    }
 
 private:
-    std::mutex mLock;
-    pid_t mPid;
-    uid_t mUid;
-    bool mBinderDied = false;
+    std::mutex  mLock;
+    bool        mBinderDied = false;
+    pid_t       mPid;
+    uid_t       mUid;
+    int         mImportance = 0;
     std::string mCodecName;
     /**
      * Reconnecting with the ResourceManagerService, after its binder interface dies,
@@ -548,11 +601,7 @@
     std::vector<MediaResourceParcel> resources;
     std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
               std::back_inserter(resources));
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    mService->addResource(clientInfo, mClient, resources);
+    mService->addResource(getClientInfo(), mClient, resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
@@ -585,11 +634,7 @@
     }
     std::vector<MediaResourceParcel> resources;
     resources.push_back(resource);
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    service->addResource(clientInfo, mClient, resources);
+    service->addResource(getClientInfo(), mClient, resources);
     mMediaResourceParcel.emplace(resource);
 }
 
@@ -603,11 +648,7 @@
     }
     std::vector<MediaResourceParcel> resources;
     resources.push_back(resource);
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    service->removeResource(clientInfo, resources);
+    service->removeResource(getClientInfo(), resources);
     mMediaResourceParcel.erase(resource);
 }
 
@@ -618,11 +659,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    service->removeClient(clientInfo);
+    service->removeClient(getClientInfo());
     mMediaResourceParcel.clear();
 }
 
@@ -633,11 +670,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    service->markClientForPendingRemoval(clientInfo);
+    service->markClientForPendingRemoval(getClientInfo());
     mMediaResourceParcel.clear();
 }
 
@@ -650,11 +683,7 @@
         return false;
     }
     bool success;
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    Status status = service->reclaimResource(clientInfo, resources, &success);
+    Status status = service->reclaimResource(getClientInfo(), resources, &success);
     return status.isOk() && success;
 }
 
@@ -665,11 +694,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
-                                .uid = static_cast<int32_t>(mUid),
-                                .id = getId(mClient),
-                                .name = mCodecName};
-    service->notifyClientCreated(clientInfo);
+    service->notifyClientCreated(getClientInfo());
 }
 
 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
@@ -680,10 +705,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
-    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
-    clientConfig.clientInfo.id = getId(mClient);
-    clientConfig.clientInfo.name = mCodecName;
+    clientConfig.clientInfo = getClientInfo();
     service->notifyClientStarted(clientConfig);
 }
 
@@ -695,10 +717,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
-    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
-    clientConfig.clientInfo.id = getId(mClient);
-    clientConfig.clientInfo.name = mCodecName;
+    clientConfig.clientInfo = getClientInfo();
     service->notifyClientStopped(clientConfig);
 }
 
@@ -710,10 +729,7 @@
         ALOGW("Service isn't available");
         return;
     }
-    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
-    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
-    clientConfig.clientInfo.id = getId(mClient);
-    clientConfig.clientInfo.name = mCodecName;
+    clientConfig.clientInfo = getClientInfo();
     service->notifyClientConfigChanged(clientConfig);
 }
 
@@ -1714,6 +1730,21 @@
     }
 }
 
+void MediaCodec::updateCodecImportance(const sp<AMessage>& msg) {
+    // Update the codec importance.
+    int32_t importance = 0;
+    if (msg->findInt32(KEY_IMPORTANCE, &importance)) {
+        // Ignoring the negative importance.
+        if (importance >= 0) {
+            // Notify RM about the change in the importance.
+            mResourceManagerProxy->setImportance(importance);
+            ClientConfigParcel clientConfig;
+            initClientConfigParcel(clientConfig);
+            mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
+        }
+    }
+}
+
 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
     switch(state) {
         case TunnelPeekState::kLegacyMode:
@@ -2222,23 +2253,12 @@
 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
                       bool reverse);
 
-status_t MediaCodec::configure(
-        const sp<AMessage> &format,
-        const sp<Surface> &nativeWindow,
-        const sp<ICrypto> &crypto,
-        uint32_t flags) {
-    return configure(format, nativeWindow, crypto, NULL, flags);
-}
-
-status_t MediaCodec::configure(
-        const sp<AMessage> &format,
-        const sp<Surface> &surface,
-        const sp<ICrypto> &crypto,
-        const sp<IDescrambler> &descrambler,
-        uint32_t flags) {
-
-    sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
+                                                     uint32_t flags,
+                                                     status_t* err) {
+    *err = OK;
     mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
+    bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
 
     // TODO: validity check log-session-id: it should be a 32-hex-digit.
     format->findString("log-session-id", &mLogSessionId);
@@ -2253,8 +2273,7 @@
         if (format->findInt32("level", &level)) {
             mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
         }
-        mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
-                              (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
+        mediametrics_setInt32(nextMetricsHandle, kCodecEncoder, isEncoder);
 
         if (!mLogSessionId.empty()) {
             mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
@@ -2317,7 +2336,9 @@
             mErrorLog.log(LOG_TAG, base::StringPrintf(
                     "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
             mediametrics_delete(nextMetricsHandle);
-            return BAD_VALUE;
+            // Set the error code and return null handle.
+            *err = BAD_VALUE;
+            return 0;
         }
 
     } else {
@@ -2333,7 +2354,7 @@
         }
     }
 
-    if (flags & CONFIGURE_FLAG_ENCODE) {
+    if (isEncoder) {
         int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
                                                  enableMediaFormatShapingDefault);
         if (!enableShaping) {
@@ -2378,6 +2399,35 @@
 
     updateLowLatency(format);
 
+    return nextMetricsHandle;
+}
+
+status_t MediaCodec::configure(
+        const sp<AMessage> &format,
+        const sp<Surface> &nativeWindow,
+        const sp<ICrypto> &crypto,
+        uint32_t flags) {
+    return configure(format, nativeWindow, crypto, NULL, flags);
+}
+
+status_t MediaCodec::configure(
+        const sp<AMessage> &format,
+        const sp<Surface> &surface,
+        const sp<ICrypto> &crypto,
+        const sp<IDescrambler> &descrambler,
+        uint32_t flags) {
+
+    // Update the codec importance.
+    updateCodecImportance(format);
+
+    // Create and set up metrics for this codec.
+    status_t err = OK;
+    mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
+    if (err != OK) {
+        return err;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatConfigure, this);
     msg->setMessage("format", format);
     msg->setInt32("flags", flags);
     msg->setObject("surface", surface);
@@ -2410,7 +2460,6 @@
 
     sp<AMessage> callback = mCallback;
 
-    status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
             toMediaResourceSubType(mIsHardware, mDomain)));
@@ -3164,7 +3213,49 @@
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
     msg->setPointer("errorDetailMsg", errorDetailMsg);
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
 
+status_t MediaCodec::queueInputBuffers(
+        size_t index,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &infos,
+        AString *errorDetailMsg) {
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    uint32_t bufferFlags = 0;
+    uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+    uint32_t andFlags = flagsinAllAU;
+    if (infos == nullptr || infos->value.empty()) {
+        ALOGE("ERROR: Large Audio frame with no BufferInfo");
+        return BAD_VALUE;
+    }
+    int infoIdx = 0;
+    std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
+    int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+    bool foundEndOfStream = false;
+    for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+        bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+        andFlags &= accessUnitInfo[infoIdx].mFlags;
+        if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+            foundEndOfStream = true;
+        }
+    }
+    bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+    if (infoIdx != accessUnitInfo.size()) {
+        ALOGE("queueInputBuffers has incorrect access-units");
+        return -EINVAL;
+    }
+    msg->setSize("index", index);
+    msg->setSize("offset", offset);
+    msg->setSize("size", size);
+    msg->setInt64("timeUs", minTimeUs);
+    // Make this represent flags for the entire buffer
+    // decodeOnly Flag is set only when all buffers are decodeOnly
+    msg->setInt32("flags", bufferFlags);
+    msg->setObject("accessUnitInfo", infos);
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
@@ -3205,27 +3296,50 @@
     return err;
 }
 
-status_t MediaCodec::queueBuffer(
+status_t MediaCodec::queueSecureInputBuffers(
         size_t index,
-        const std::shared_ptr<C2Buffer> &buffer,
-        int64_t presentationTimeUs,
-        uint32_t flags,
-        const sp<AMessage> &tunings,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &auInfo,
+        const sp<CryptoInfosWrapper> &cryptoInfos,
         AString *errorDetailMsg) {
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
-
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
-    msg->setSize("index", index);
-    sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
-        new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
-    msg->setObject("c2buffer", obj);
-    msg->setInt64("timeUs", presentationTimeUs);
-    msg->setInt32("flags", flags);
-    if (tunings && tunings->countEntries() > 0) {
-        msg->setMessage("tunings", tunings);
+    uint32_t bufferFlags = 0;
+    uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+    uint32_t andFlags = flagsinAllAU;
+    if (auInfo == nullptr
+            || auInfo->value.empty()
+            || cryptoInfos == nullptr
+            || cryptoInfos->value.empty()) {
+        ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
+        return BAD_VALUE;
     }
+    int infoIdx = 0;
+    std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
+    int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+    bool foundEndOfStream = false;
+    for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+        bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+        andFlags &= accessUnitInfo[infoIdx].mFlags;
+        if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+            foundEndOfStream = true;
+        }
+    }
+    bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+    if (infoIdx != accessUnitInfo.size()) {
+        ALOGE("queueInputBuffers has incorrect access-units");
+        return -EINVAL;
+    }
+    msg->setSize("index", index);
+    msg->setSize("offset", offset);
+    msg->setSize("ssize", size);
+    msg->setInt64("timeUs", minTimeUs);
+    msg->setInt32("flags", bufferFlags);
+    msg->setObject("accessUnitInfo", auInfo);
+    msg->setObject("cryptoInfos", cryptoInfos);
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -3234,46 +3348,76 @@
     return err;
 }
 
-status_t MediaCodec::queueEncryptedBuffer(
+status_t MediaCodec::queueBuffer(
         size_t index,
-        const sp<hardware::HidlMemory> &buffer,
-        size_t offset,
-        const CryptoPlugin::SubSample *subSamples,
-        size_t numSubSamples,
-        const uint8_t key[16],
-        const uint8_t iv[16],
-        CryptoPlugin::Mode mode,
-        const CryptoPlugin::Pattern &pattern,
-        int64_t presentationTimeUs,
-        uint32_t flags,
+        const std::shared_ptr<C2Buffer> &buffer,
+        const sp<BufferInfosWrapper> &bufferInfos,
         const sp<AMessage> &tunings,
         AString *errorDetailMsg) {
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
+    if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+        return BAD_VALUE;
+    }
+    status_t err = OK;
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    msg->setSize("index", index);
+    sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+        new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
+    msg->setObject("c2buffer", obj);
+    if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+        return err;
+    }
+    msg->setObject("accessUnitInfo", bufferInfos);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
+    sp<AMessage> response;
+    err = PostAndAwaitResponse(msg, &response);
 
+    return err;
+}
+
+status_t MediaCodec::queueEncryptedBuffer(
+        size_t index,
+        const sp<hardware::HidlMemory> &buffer,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &bufferInfos,
+        const sp<CryptoInfosWrapper> &cryptoInfos,
+        const sp<AMessage> &tunings,
+        AString *errorDetailMsg) {
+    if (errorDetailMsg != NULL) {
+        errorDetailMsg->clear();
+    }
+    if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+        return BAD_VALUE;
+    }
+    if (cryptoInfos == nullptr || cryptoInfos->value.empty()) {
+        return BAD_VALUE;
+    }
+    status_t err = OK;
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
     msg->setSize("index", index);
     sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
     msg->setObject("memory", memory);
     msg->setSize("offset", offset);
-    msg->setPointer("subSamples", (void *)subSamples);
-    msg->setSize("numSubSamples", numSubSamples);
-    msg->setPointer("key", (void *)key);
-    msg->setPointer("iv", (void *)iv);
-    msg->setInt32("mode", mode);
-    msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
-    msg->setInt32("skipBlocks", pattern.mSkipBlocks);
-    msg->setInt64("timeUs", presentationTimeUs);
-    msg->setInt32("flags", flags);
+    msg->setSize("ssize", size);
+    msg->setObject("cryptoInfos", cryptoInfos);
+    msg->setObject("accessUnitInfo", bufferInfos);
+    if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+        return err;
+    }
     if (tunings && tunings->countEntries() > 0) {
         msg->setMessage("tunings", tunings);
     }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
-    status_t err = PostAndAwaitResponse(msg, &response);
+    err = PostAndAwaitResponse(msg, &response);
 
     return err;
 }
@@ -4748,6 +4892,35 @@
                     }
                 }
             }
+            int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
+            if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
+                    format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+                    &largeFrameParamThreshold)) {
+                if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
+                    if(mComponentName.startsWith("OMX")) {
+                        mErrorLog.log(LOG_TAG,
+                                "Large Frame params are not supported on OMX codecs."
+                                "Currently only supported on C2 audio codec.");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                    AString mime;
+                    CHECK(format->findString("mime", &mime));
+                    if (!mime.startsWith("audio")) {
+                        mErrorLog.log(LOG_TAG,
+                                "Large Frame params only works with audio codec");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                    if (!(mFlags & kFlagIsAsync)) {
+                            mErrorLog.log(LOG_TAG, "Large Frame audio" \
+                                    "config works only with async mode");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                }
+            }
+
             mReplyID = replyID;
             setState(CONFIGURING);
 
@@ -5896,10 +6069,10 @@
 
 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
     size_t index;
-    size_t offset;
-    size_t size;
-    int64_t timeUs;
-    uint32_t flags;
+    size_t offset = 0;
+    size_t size = 0;
+    int64_t timeUs = 0;
+    uint32_t flags = 0;
     CHECK(msg->findSize("index", &index));
     CHECK(msg->findInt64("timeUs", &timeUs));
     CHECK(msg->findInt32("flags", (int32_t *)&flags));
@@ -5944,22 +6117,26 @@
             mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
             return -EINVAL;
         }
-        CHECK(msg->findPointer("subSamples", (void **)&subSamples));
-        CHECK(msg->findSize("numSubSamples", &numSubSamples));
-        CHECK(msg->findPointer("key", (void **)&key));
-        CHECK(msg->findPointer("iv", (void **)&iv));
-        CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
-        CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
+        sp<RefBase> obj;
+        if (msg->findObject("cryptoInfos", &obj)) {
+            CHECK(msg->findSize("ssize", &size));
+        } else {
+            CHECK(msg->findPointer("subSamples", (void **)&subSamples));
+            CHECK(msg->findSize("numSubSamples", &numSubSamples));
+            CHECK(msg->findPointer("key", (void **)&key));
+            CHECK(msg->findPointer("iv", (void **)&iv));
+            CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
+            CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
 
-        int32_t tmp;
-        CHECK(msg->findInt32("mode", &tmp));
+            int32_t tmp;
+            CHECK(msg->findInt32("mode", &tmp));
 
-        mode = (CryptoPlugin::Mode)tmp;
-
-        size = 0;
-        for (size_t i = 0; i < numSubSamples; ++i) {
-            size += subSamples[i].mNumBytesOfClearData;
-            size += subSamples[i].mNumBytesOfEncryptedData;
+            mode = (CryptoPlugin::Mode)tmp;
+            size = 0;
+            for (size_t i = 0; i < numSubSamples; ++i) {
+                size += subSamples[i].mNumBytesOfClearData;
+                size += subSamples[i].mNumBytesOfEncryptedData;
+            }
         }
     }
 
@@ -5981,9 +6158,13 @@
                 "client does not own the buffer #%zu", index));
         return -EACCES;
     }
-    auto setInputBufferParams = [this, &buffer]
+    auto setInputBufferParams = [this, &msg, &buffer]
         (int64_t timeUs, uint32_t flags = 0) -> status_t {
         status_t err = OK;
+        sp<RefBase> obj;
+        if (msg->findObject("accessUnitInfo", &obj)) {
+            buffer->meta()->setObject("accessUnitInfo", obj);
+        }
         buffer->meta()->setInt64("timeUs", timeUs);
         if (flags & BUFFER_FLAG_EOS) {
             buffer->meta()->setInt32("eos", true);
@@ -6020,35 +6201,48 @@
      return err;
     };
     auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
-        size_t key_len = (key != nullptr)? 16 : 0;
-        size_t iv_len = (iv != nullptr)? 16 : 0;
-        sp<ABuffer> shared_key;
-        sp<ABuffer> shared_iv;
-        if (key_len > 0) {
-            shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
-        }
-        if (iv_len > 0) {
-            shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
-        }
-        sp<ABuffer> subSamples_buffer =
-            new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
-        CryptoPlugin::SubSample * samples =
-           (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
-        for (int s = 0 ; s < numSubSamples ; s++) {
-            samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
-            samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
-        }
         // set decrypt Action
         cryptoInfo->setInt32("action", action);
         cryptoInfo->setObject("buffer", buffer);
         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
-        cryptoInfo->setBuffer("key", shared_key);
-        cryptoInfo->setBuffer("iv", shared_iv);
-        cryptoInfo->setInt32("mode", (int)mode);
-        cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
-        cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
-        cryptoInfo->setBuffer("subSamples", subSamples_buffer);
-        cryptoInfo->setSize("numSubSamples", numSubSamples);
+        sp<RefBase> obj;
+        if (msg->findObject("cryptoInfos", &obj)) {
+            sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
+            sp<CryptoInfosWrapper> asyncInfos{
+                    new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
+            for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
+                if (info) {
+                    asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
+                }
+            }
+            buffer->meta()->setObject("cryptoInfos", asyncInfos);
+        } else {
+            size_t key_len = (key != nullptr)? 16 : 0;
+            size_t iv_len = (iv != nullptr)? 16 : 0;
+            sp<ABuffer> shared_key;
+            sp<ABuffer> shared_iv;
+            if (key_len > 0) {
+                shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
+            }
+            if (iv_len > 0) {
+                shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
+            }
+            sp<ABuffer> subSamples_buffer =
+                new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
+            CryptoPlugin::SubSample * samples =
+               (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
+            for (int s = 0 ; s < numSubSamples ; s++) {
+                samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
+                samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
+            }
+            cryptoInfo->setBuffer("key", shared_key);
+            cryptoInfo->setBuffer("iv", shared_iv);
+            cryptoInfo->setInt32("mode", (int)mode);
+            cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+            cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
+            cryptoInfo->setBuffer("subSamples", subSamples_buffer);
+            cryptoInfo->setSize("numSubSamples", numSubSamples);
+        }
     };
     if (c2Buffer || memory) {
         sp<AMessage> tunings = NULL;
@@ -6058,15 +6252,37 @@
         status_t err = OK;
         if (c2Buffer) {
             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
+            // to prevent unnecessary copy for single info case.
+            if (msg->findObject("accessUnitInfo", &obj)) {
+                sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
+                if (infos->value.size() == 1) {
+                   msg->removeEntryByName("accessUnitInfo");
+                }
+            }
         } else if (memory) {
             AString errorDetailMsg;
-            err = mBufferChannel->attachEncryptedBuffer(
-                    memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
-                    offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            if (msg->findObject("cryptoInfos", &obj)) {
+                buffer->meta()->setSize("ssize", size);
+                buffer->meta()->setObject("cryptoInfos", obj);
+                if (msg->findObject("accessUnitInfo", &obj)) {
+                    // the reference will be same here and
+                    // setBufferParams
+                    buffer->meta()->setObject("accessUnitInfo", obj);
+                }
+                err = mBufferChannel->attachEncryptedBuffers(
+                    memory,
+                    offset,
+                    buffer,
+                    (mFlags & kFlagIsSecure),
+                    &errorDetailMsg);
+            } else {
+                err = mBufferChannel->attachEncryptedBuffer(
+                        memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
+                        offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            }
             if (err != OK && hasCryptoOrDescrambler()
                     && (mFlags & kFlagUseCryptoAsync)) {
                 // create error detail
-                AString errorDetailMsg;
                 sp<AMessage> cryptoErrorInfo = new AMessage();
                 buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
                 cryptoErrorInfo->setInt32("err", err);
@@ -6138,10 +6354,17 @@
             }
         }
         if (mCryptoAsync) {
+            // TODO b/316565675 - enable async path for audio
             // prepare a message and enqueue
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
             mCryptoAsync->decrypt(cryptoInfo);
+        } else if (msg->findObject("cryptoInfos", &obj)) {
+                buffer->meta()->setObject("cryptoInfos", obj);
+                err = mBufferChannel->queueSecureInputBuffers(
+                        buffer,
+                        (mFlags & kFlagIsSecure),
+                        errorDetailMsg);
         } else {
             err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
@@ -6493,10 +6716,11 @@
         if (discardDecodeOnlyOutputBuffer(index)) {
             continue;
         }
+        sp<AMessage> msg = mCallback->dup();
         const sp<MediaCodecBuffer> &buffer =
             mPortBuffers[kPortIndexOutput][index].mData;
-        sp<AMessage> msg = mCallback->dup();
-        msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+        int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
+        sp<RefBase> accessUnitInfoObj;
         msg->setInt32("index", index);
         msg->setSize("offset", buffer->offset());
         msg->setSize("size", buffer->size());
@@ -6510,6 +6734,15 @@
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         msg->setInt32("flags", flags);
+        buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
+        if (accessUnitInfoObj) {
+            outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
+            msg->setObject("accessUnitInfo", accessUnitInfoObj);
+            sp<BufferInfosWrapper> auInfo(
+                    (decltype(auInfo.get()))accessUnitInfoObj.get());
+             auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
+        }
+        msg->setInt32("callbackID", outputCallbackID);
 
         statsBufferReceived(timeUs, buffer);
 
@@ -6589,6 +6822,7 @@
         return NO_INIT;
     }
     updateLowLatency(params);
+    updateCodecImportance(params);
     mapFormat(mComponentName, params, nullptr, false);
     updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index f16e635..604dcb0 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -298,6 +298,11 @@
             ALOGE("error pushing blank frames: lock failed: %s (%d)", strerror(-err), -err);
             break;
         }
+        if (img == nullptr) {
+            (void)buf->unlock(); // Since lock() was successful.
+            ALOGE("error pushing blank frames: lock succeeded: buf mapping is nullptr");
+            break;
+        }
 
         *img = 0;
 
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 22885c9..5dd8423 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,8 +1,5 @@
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     // writerTest fails about 5 out of 66
     // { "name": "writerTest" },
 
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
index 76b054a..237e715 100644
--- a/media/libstagefright/colorconversion/fuzzer/Android.bp
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -47,9 +47,15 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-fwk-video@google.com",
         ],
-        componentid: 155276,
+        componentid: 42195,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libstagefright_color_conversion",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index 509f7a9..0d9e0ec 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -66,7 +66,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
             <Alias name="OMX.google.raw.decoder" />
-            <Limit name="channel-count" max="8" />
+            <Limit name="channel-count" max="12" />
             <Limit name="sample-rate" ranges="8000-192000" />
             <Limit name="bitrate" range="1-10000000" />
         </MediaCodec>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index ee41867..dc7d787 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -80,7 +80,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
             <Alias name="OMX.google.raw.decoder" />
-            <Limit name="channel-count" max="8" />
+            <Limit name="channel-count" max="12" />
             <Limit name="sample-rate" ranges="8000-192000" />
             <Limit name="bitrate" range="1-10000000" />
             <Attribute name="software-codec" />
@@ -330,6 +330,7 @@
             <!-- Video Quality control -->
                     <!-- supports QP bounding with standard keys -->
             <Feature name="qp-bounds" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 0927653..bffb294 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,36 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct AccessUnitInfo {
+    uint32_t mFlags;
+    uint32_t mSize;
+    int64_t mTimestamp;
+    AccessUnitInfo(uint32_t flags, uint32_t size, int64_t ptsUs)
+            :mFlags(flags), mSize(size), mTimestamp(ptsUs) {
+    }
+    ~AccessUnitInfo() {}
+};
+
+struct CodecCryptoInfo {
+    size_t mNumSubSamples{0};
+    CryptoPlugin::SubSample *mSubSamples{nullptr};
+    uint8_t *mIv{nullptr};
+    uint8_t *mKey{nullptr};
+    enum CryptoPlugin::Mode mMode;
+    CryptoPlugin::Pattern mPattern;
+
+    virtual ~CodecCryptoInfo() {}
+protected:
+    CodecCryptoInfo():
+            mNumSubSamples(0),
+            mSubSamples(nullptr),
+            mIv(nullptr),
+            mKey(nullptr),
+            mMode{CryptoPlugin::kMode_Unencrypted},
+            mPattern{0, 0} {
+    }
+};
+
 struct CodecParameterDescriptor {
     std::string name;
     AMessage::Type type;
@@ -362,6 +392,30 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) = 0;
+
+    /**
+     * Queue a secure input buffer with multiple access units into the buffer channel.
+     *
+     * @param buffer The buffer to queue. The access unit delimiters and crypto
+     *               subsample information is included in the buffer metadata.
+     * @param secure Whether the buffer is secure.
+     * @param errorDetailMsg The error message to be set in case of error.
+     * @return OK if successful;
+     *         -ENOENT of the buffer is not known
+     *         -ENOSYS if mCrypto is not set so that decryption is not
+     *         possible;
+     *         other errors if decryption failed.
+     */
+     virtual status_t queueSecureInputBuffers(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString *errorDetailMsg) {
+        (void)buffer;
+        (void)secure;
+        (void)errorDetailMsg;
+        return -ENOSYS;
+     }
+
     /**
      * Attach a Codec 2.0 buffer to MediaCodecBuffer.
      *
@@ -408,6 +462,34 @@
         (void)errorDetailMsg;
         return -ENOSYS;
     }
+
+    /**
+     * Attach an encrypted HidlMemory buffer containing multiple access units to an index
+     *
+     * @param memory The memory to attach.
+     * @param offset index???
+     * @param buffer The MediaCodecBuffer to attach the memory to. The access
+     *               unit delimiters and crypto subsample information is included
+     *               in the buffer metadata.
+     * @param secure Whether the buffer is secure.
+     * @param errorDetailMsg The error message to be set if an error occurs.
+     * @return    OK if successful;
+     *            -ENOENT if index is not recognized
+     *            -ENOSYS if attaching buffer is not possible or not supported
+     */
+    virtual status_t attachEncryptedBuffers(
+            const sp<hardware::HidlMemory> &memory,
+            size_t offset,
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString* errorDetailMsg) {
+        (void)memory;
+        (void)offset;
+        (void)buffer;
+        (void)secure;
+        (void)errorDetailMsg;
+        return -ENOSYS;
+    }
     /**
      * Request buffer rendering at specified time.
      *
diff --git a/media/libstagefright/include/media/stagefright/CryptoAsync.h b/media/libstagefright/include/media/stagefright/CryptoAsync.h
index b675518..acb3dae 100644
--- a/media/libstagefright/include/media/stagefright/CryptoAsync.h
+++ b/media/libstagefright/include/media/stagefright/CryptoAsync.h
@@ -85,6 +85,18 @@
         kActionDecrypt                 = (1 <<  0),
         kActionAttachEncryptedBuffer   = (1 <<  1)
     };
+
+    // This struct is meant to copy the mapped contents from the original info.
+    struct CryptoAsyncInfo : public CodecCryptoInfo {
+        public:
+            explicit CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info);
+            virtual ~CryptoAsyncInfo() = default;
+        protected:
+            // all backup buffers for the base object.
+            sp<ABuffer> mKeyBuffer;
+            sp<ABuffer> mIvBuffer;
+            sp<ABuffer> mSubSamplesBuffer;
+    };
 protected:
 
     // Message types for the looper
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index baa5b7e..9ecb12e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -28,6 +28,7 @@
 #include <media/MediaMetrics.h>
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/CodecErrorLog.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaHistogram.h>
@@ -56,7 +57,9 @@
 struct AString;
 struct BatteryChecker;
 class BufferChannelBase;
+struct AccessUnitInfo;
 struct CodecBase;
+struct CodecCryptoInfo;
 struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
@@ -78,6 +81,9 @@
 using aidl::android::media::MediaResourceParcel;
 using aidl::android::media::ClientConfigParcel;
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+
 struct MediaCodec : public AHandler {
     enum Domain {
         DOMAIN_UNKNOWN = 0,
@@ -115,6 +121,7 @@
         CB_OUTPUT_FORMAT_CHANGED = 4,
         CB_RESOURCE_RECLAIMED = 5,
         CB_CRYPTO_ERROR = 6,
+        CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
     };
 
     static const pid_t kNoPid = -1;
@@ -185,6 +192,13 @@
             uint32_t flags,
             AString *errorDetailMsg = NULL);
 
+    status_t queueInputBuffers(
+            size_t index,
+            size_t offset,
+            size_t size,
+            const sp<BufferInfosWrapper> &accessUnitInfo,
+            AString *errorDetailMsg = NULL);
+
     status_t queueSecureInputBuffer(
             size_t index,
             size_t offset,
@@ -198,11 +212,18 @@
             uint32_t flags,
             AString *errorDetailMsg = NULL);
 
+    status_t queueSecureInputBuffers(
+            size_t index,
+            size_t offset,
+            size_t size,
+            const sp<BufferInfosWrapper> &accessUnitInfo,
+            const sp<CryptoInfosWrapper> &cryptoInfos,
+            AString *errorDetailMsg = NULL);
+
     status_t queueBuffer(
             size_t index,
             const std::shared_ptr<C2Buffer> &buffer,
-            int64_t presentationTimeUs,
-            uint32_t flags,
+            const sp<BufferInfosWrapper> &bufferInfos,
             const sp<AMessage> &tunings,
             AString *errorDetailMsg = NULL);
 
@@ -210,14 +231,9 @@
             size_t index,
             const sp<hardware::HidlMemory> &memory,
             size_t offset,
-            const CryptoPlugin::SubSample *subSamples,
-            size_t numSubSamples,
-            const uint8_t key[16],
-            const uint8_t iv[16],
-            CryptoPlugin::Mode mode,
-            const CryptoPlugin::Pattern &pattern,
-            int64_t presentationTimeUs,
-            uint32_t flags,
+            size_t size,
+            const sp<BufferInfosWrapper> &bufferInfos,
+            const sp<CryptoInfosWrapper> &cryptoInfos,
             const sp<AMessage> &tunings,
             AString *errorDetailMsg = NULL);
 
@@ -320,6 +336,12 @@
     status_t reclaim(bool force = false);
     friend struct ResourceManagerClient;
 
+    // to create the metrics associated with this codec.
+    // Any error in this function will be captured by the output argument err.
+    mediametrics_handle_t createMediaMetrics(const sp<AMessage>& format,
+                                             uint32_t flags,
+                                             status_t* err);
+
 private:
     enum State {
         UNINITIALIZED,
@@ -462,6 +484,7 @@
     void resetMetricsFields();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    void updateCodecImportance(const sp<AMessage>& msg);
     void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 7334639..24ac2e8 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -697,6 +697,7 @@
 inline constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
 inline constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
 inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
 inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
 inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
 inline constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
@@ -794,6 +795,7 @@
 inline constexpr char KEY_HDR10_PLUS_INFO[] = "hdr10-plus-info";
 inline constexpr char KEY_HEIGHT[] = "height";
 inline constexpr char KEY_I_FRAME_INTERVAL[] = "i-frame-interval";
+inline constexpr char KEY_IMPORTANCE[] = "importance";
 inline constexpr char KEY_INTRA_REFRESH_PERIOD[] = "intra-refresh-period";
 inline constexpr char KEY_IS_ADTS[] = "is-adts";
 inline constexpr char KEY_IS_AUTOSELECT[] = "is-autoselect";
@@ -809,6 +811,9 @@
 inline constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
 inline constexpr char KEY_MAX_HEIGHT[] = "max-height";
 inline constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
+inline constexpr char KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE[] = "buffer-batch-max-output-size";
+inline constexpr char KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE[] =
+        "buffer-batch-threshold-output-size";
 inline constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
 inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
 inline constexpr char KEY_MAX_WIDTH[] = "max-width";
diff --git a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
index 0fb5690..66f0bb1 100644
--- a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
+++ b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
@@ -59,6 +59,12 @@
     int32_t mReadHead;
     int32_t mCapacity;
     char* mCutBuffer;
+
+    /*
+     * Added to use Access unit skip cut in Codec2 framework
+     */
+    friend class MultiAccessUnitSkipCutBuffer;
+
     DISALLOW_EVIL_CONSTRUCTORS(SkipCutBuffer);
 };
 
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 54c5697..79ab009 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -184,6 +184,9 @@
 
 cc_defaults {
     name: "libstagefright_softomx-defaults",
+    // TODO (b/316432618) Software OMX codecs are no longer used, disable building them till
+    // this code is removed completely.
+    enabled: false,
     vendor_available: true,
 
     cflags: [
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
index 35a5b11..f011d04 100644
--- a/media/libstagefright/timedtext/TEST_MAPPING
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/timedtext
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "TimedTextUnitTest" }
   ]
 }
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 19f9549..2341af1 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,3 @@
-
 package {
     default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
 }
@@ -86,7 +85,16 @@
         "-Wall",
     ],
 
-    vintf_fragments: ["manifest_media_c2_software.xml"],
+    // AIDL is only used when release_aidl_use_unfrozen is true
+    // because the swcodec mainline module is a prebuilt from an
+    // Android U branch in that case.
+    // TODO(b/327508501)
+    vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
+    product_variables: {
+        release_aidl_use_unfrozen: {
+            vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
+        },
+    },
 
     soong_config_variables: {
         TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/mediaserver/manifest_media_c2_software_aidl.xml b/media/mediaserver/manifest_media_c2_software_aidl.xml
new file mode 100644
index 0000000..e6bcafa
--- /dev/null
+++ b/media/mediaserver/manifest_media_c2_software_aidl.xml
@@ -0,0 +1,7 @@
+<manifest version="1.0" type="framework">
+    <hal format="aidl">
+        <name>android.hardware.media.c2</name>
+        <version>1</version>
+        <fqname>IComponentStore/software</fqname>
+    </hal>
+</manifest>
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software_hidl.xml
similarity index 64%
rename from media/mediaserver/manifest_media_c2_software.xml
rename to media/mediaserver/manifest_media_c2_software_hidl.xml
index d7fb1a0..69a27be 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software_hidl.xml
@@ -1,5 +1,5 @@
 <manifest version="1.0" type="framework">
-    <hal>
+    <hal format="hidl" max-level="8">
         <name>android.hardware.media.c2</name>
         <transport>hwbinder</transport>
         <version>1.2</version>
@@ -8,9 +8,4 @@
             <instance>software</instance>
         </interface>
     </hal>
-    <hal format="aidl">
-        <name>android.hardware.media.c2</name>
-        <version>1</version>
-        <fqname>IComponentStore/software</fqname>
-    </hal>
 </manifest>
diff --git a/media/module/codecs/amrnb/TEST_MAPPING b/media/module/codecs/amrnb/TEST_MAPPING
index 343d08a..306921f 100644
--- a/media/module/codecs/amrnb/TEST_MAPPING
+++ b/media/module/codecs/amrnb/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrnb
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "AmrnbDecoderTest"},
     { "name": "AmrnbEncoderTest"}
   ]
diff --git a/media/module/codecs/amrwb/TEST_MAPPING b/media/module/codecs/amrwb/TEST_MAPPING
new file mode 100644
index 0000000..3f05c90
--- /dev/null
+++ b/media/module/codecs/amrwb/TEST_MAPPING
@@ -0,0 +1,6 @@
+{
+  "postsubmit": [
+    { "name": "AmrwbDecoderTest"},
+    { "name": "AmrwbEncoderTest"}
+  ]
+}
diff --git a/media/module/codecs/amrwb/dec/TEST_MAPPING b/media/module/codecs/amrwb/dec/TEST_MAPPING
deleted file mode 100644
index 0278d26..0000000
--- a/media/module/codecs/amrwb/dec/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwb
-{
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-    { "name": "AmrwbDecoderTest"}
-
-  ]
-}
diff --git a/media/module/codecs/amrwb/enc/TEST_MAPPING b/media/module/codecs/amrwb/enc/TEST_MAPPING
deleted file mode 100644
index 045e8b3..0000000
--- a/media/module/codecs/amrwb/enc/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
-{
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-    { "name": "AmrwbEncoderTest"}
-
-  ]
-}
diff --git a/media/module/codecs/flac/TEST_MAPPING b/media/module/codecs/flac/TEST_MAPPING
new file mode 100644
index 0000000..725ea90
--- /dev/null
+++ b/media/module/codecs/flac/TEST_MAPPING
@@ -0,0 +1,5 @@
+{
+  "postsubmit": [
+    { "name": "FlacDecoderTest"}
+  ]
+}
diff --git a/media/module/codecs/flac/dec/test/Android.bp b/media/module/codecs/flac/dec/test/Android.bp
index a4c2735..8004c4a 100644
--- a/media/module/codecs/flac/dec/test/Android.bp
+++ b/media/module/codecs/flac/dec/test/Android.bp
@@ -28,6 +28,7 @@
 cc_test {
     name: "FlacDecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "FlacDecoderTest.cpp",
diff --git a/media/module/codecs/m4v_h263/TEST_MAPPING b/media/module/codecs/m4v_h263/TEST_MAPPING
index ba3ff1c..8599fa5 100644
--- a/media/module/codecs/m4v_h263/TEST_MAPPING
+++ b/media/module/codecs/m4v_h263/TEST_MAPPING
@@ -1,18 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-
-    // the decoder reports something bad about an unexpected newline in the *config file
-    // and the config file looks like the AndroidTest.xml file that we put in there.
-    // I don't get this from the Encoder -- and I don't see any substantive difference
-    // between decode and encode AndroidTest.xml files -- except that encode does NOT
-    // finish with a newline.
-    // strange.
+  "postsubmit": [
     { "name": "Mpeg4H263DecoderTest"},
     { "name": "Mpeg4H263EncoderTest"}
-
   ]
 }
diff --git a/media/module/codecs/mp3dec/TEST_MAPPING b/media/module/codecs/mp3dec/TEST_MAPPING
index 4ef4317..5faece6 100644
--- a/media/module/codecs/mp3dec/TEST_MAPPING
+++ b/media/module/codecs/mp3dec/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "Mp3DecoderTest"}
   ]
 }
diff --git a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
index e8fea73..fb9f1e9 100644
--- a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -310,26 +310,31 @@
 }
 
 // Check if the input is valid by checking if it contains a sync word
-static bool isInputValid(uint8 *buf, uint32 inSize)
+static ERROR_CODE validate_input(uint8 *buf, uint32 inSize)
 {
-    // Buffer needs to contain at least 4 bytes which is the size of
-    // the header
-    if (inSize < 4) return false;
+    /*
+     * Verify that at least the header is complete
+     * Note that SYNC_WORD_LNGTH is in unit of bits, but inSize is in unit of bytes.
+     */
+    if (inSize < ((SYNC_WORD_LNGTH + 21) >> 3))
+    {
+        return NO_ENOUGH_MAIN_DATA_ERROR;
+    }
 
     size_t totalInSize = 0;
     size_t frameSize = 0;
     while (totalInSize <= (inSize - 4)) {
         if (!parseHeader(U32_AT(buf + totalInSize), &frameSize)) {
-            return false;
+            return SYNCH_LOST_ERROR;
         }
         // Buffer needs to be large enough to include complete frame
         if ((frameSize > inSize) || (totalInSize > (inSize - frameSize))) {
-            return false;
+            return SYNCH_LOST_ERROR;
         }
         totalInSize += frameSize;
     }
 
-    return true;
+    return NO_DECODING_ERROR;
 }
 
 ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
@@ -348,10 +353,11 @@
     mp3Header info_data;
     mp3Header *info = &info_data;
 
-    if (!isInputValid(pExt->pInputBuffer, pExt->inputBufferCurrentLength))
+    errorCode = validate_input(pExt->pInputBuffer, pExt->inputBufferCurrentLength);
+    if (errorCode != NO_DECODING_ERROR)
     {
         pExt->outputFrameSize = 0;
-        return SYNCH_LOST_ERROR;
+        return errorCode;
     }
 
     pVars->inputStream.pBuffer  = pExt->pInputBuffer;
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 65d537b..4fbfab1 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -25,6 +25,7 @@
 #include <C2Component.h>
 #include <C2PlatformSupport.h>
 
+#include <android/hidl/manager/1.2/IServiceManager.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.2/ComponentStore.h>
@@ -148,6 +149,11 @@
                 return C2_TRANSACTION_FAILED;
             }
         }
+        status = static_cast<c2_status_t>(configResult.status.status);
+        if (status != C2_BAD_INDEX) {
+            LOG(DEBUG) << "config -- call failed: "
+                       << status << ".";
+        }
         size_t i = failures->size();
         failures->resize(i + configResult.failures.size());
         for (const c2_aidl::SettingResult& sf : configResult.failures) {
@@ -320,8 +326,8 @@
             heapParams->reserve(heapParams->size() + numIndices);
         }
         c2_status_t status = C2_OK;
-        c2_aidl::Params aidlParams;
-        ScopedAStatus transResult = mAidlConfigurable->query(indices, true, &aidlParams);
+        c2_aidl::IConfigurable::QueryResult aidlResult;
+        ScopedAStatus transResult = mAidlConfigurable->query(indices, true, &aidlResult);
         if (!transResult.isOk()) {
             if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
                 status = c2_status_t(transResult.getServiceSpecificError());
@@ -332,8 +338,12 @@
                 return C2_TRANSACTION_FAILED;
             }
         }
+        status = static_cast<c2_status_t>(aidlResult.status.status);
+        if (status != C2_OK) {
+            LOG(DEBUG) << "query -- call failed: " << status << ".";
+        }
         std::vector<C2Param*> paramPointers;
-        if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, aidlParams)) {
+        if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, aidlResult.params)) {
             LOG(ERROR) << "query -- error while parsing params.";
             return C2_CORRUPTED;
         }
@@ -488,9 +498,9 @@
         }
 
         c2_status_t status = C2_OK;
-        std::vector<c2_aidl::FieldSupportedValuesQueryResult> queryResults;
+        c2_aidl::IConfigurable::QuerySupportedValuesResult queryResult;
         ScopedAStatus transResult = mAidlConfigurable->querySupportedValues(
-                aidlFields, true, &queryResults);
+                aidlFields, true, &queryResult);
         if (!transResult.isOk()) {
             if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
                 status = c2_status_t(transResult.getServiceSpecificError());
@@ -502,14 +512,19 @@
                 return C2_TRANSACTION_FAILED;
             }
         }
-        if (queryResults.size() != fields.size()) {
+        status = static_cast<c2_status_t>(queryResult.status.status);
+        if (status != C2_OK) {
+            LOG(DEBUG) << "querySupportedValues -- call failed: "
+                       << status << ".";
+        }
+        if (queryResult.values.size() != fields.size()) {
             LOG(ERROR) << "querySupportedValues -- "
                           "input and output lists "
                           "have different sizes.";
             return C2_CORRUPTED;
         }
         for (size_t i = 0; i < fields.size(); ++i) {
-            if (!c2_aidl::utils::FromAidl(&fields[i], aidlFields[i], queryResults[i])) {
+            if (!c2_aidl::utils::FromAidl(&fields[i], aidlFields[i], queryResult.values[i])) {
                 LOG(ERROR) << "querySupportedValues -- "
                               "invalid returned value.";
                 return C2_CORRUPTED;
@@ -803,32 +818,50 @@
         }
     }
 
-    if (platformVersion >= __ANDROID_API_V__) {
-        if (!aidlStore) {
-            aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
+    bool registered = false;
+    const std::string aidlServiceName =
+        std::string(c2_aidl::IComponentStore::descriptor) + "/software";
+    if (__builtin_available(android __ANDROID_API_S__, *)) {
+        if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
+            if (!aidlStore) {
+                aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
+                        std::make_shared<H2C2ComponentStore>(nullptr));
+            }
+            binder_exception_t ex = AServiceManager_addService(
+                    aidlStore->asBinder().get(), aidlServiceName.c_str());
+            if (ex == EX_NONE) {
+                registered = true;
+            } else {
+                LOG(WARNING) << "Cannot register software Codec2 AIDL service. Exception: " << ex;
+            }
+        }
+    }
+
+    // If the software component store isn't declared in the manifest, we don't
+    // need to create the service and register it.
+    using ::android::hidl::manager::V1_2::IServiceManager;
+    IServiceManager::Transport transport =
+            android::hardware::defaultServiceManager1_2()->getTransport(
+                    V1_2::utils::ComponentStore::descriptor, "software");
+    if (transport == IServiceManager::Transport::HWBINDER) {
+        if (!hidlStore) {
+            hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
                     std::make_shared<H2C2ComponentStore>(nullptr));
+            hidlVer = "1.2";
         }
-        const std::string serviceName =
-            std::string(c2_aidl::IComponentStore::descriptor) + "/software";
-        binder_exception_t ex = AServiceManager_addService(
-                aidlStore->asBinder().get(), serviceName.c_str());
-        if (ex != EX_NONE) {
-            LOG(ERROR) << "Cannot register software Codec2 AIDL service.";
-            return;
+        if (hidlStore->registerAsService("software") == android::OK) {
+            registered = true;
+        } else {
+            LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
         }
+    } else {
+        LOG(INFO) << "The HIDL software Codec2 service is deprecated"
+                     " so it is not being registered with hwservicemanager.";
     }
 
-    if (!hidlStore) {
-        hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(
-                std::make_shared<H2C2ComponentStore>(nullptr));
-        hidlVer = "1.0";
+    if (registered) {
+        LOG(INFO) << "Software Codec2 service created and registered.";
     }
-    if (hidlStore->registerAsService("software") != android::OK) {
-        LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
-        return;
-    }
-
-    LOG(INFO) << "Software Codec2 service created and registered.";
 
     ABinderProcess_joinThreadPool();
     ::android::hardware::joinRpcThreadpool();
diff --git a/media/module/esds/TEST_MAPPING b/media/module/esds/TEST_MAPPING
index 9368b6d..0337743 100644
--- a/media/module/esds/TEST_MAPPING
+++ b/media/module/esds/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/module/esds
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "ESDSTest" }
   ]
 }
diff --git a/media/module/esds/tests/Android.bp b/media/module/esds/tests/Android.bp
index aea611e..427b275 100644
--- a/media/module/esds/tests/Android.bp
+++ b/media/module/esds/tests/Android.bp
@@ -25,6 +25,7 @@
 cc_test {
     name: "ESDSTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "ESDSTest.cpp",
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 0a8d2ab..d096d63 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -189,6 +189,8 @@
     ],
 
     dictionary: "mkv_extractor_fuzzer.dict",
+
+    corpus: ["corpus/*"],
 }
 
 cc_fuzz {
diff --git a/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c b/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c
new file mode 100755
index 0000000..8b31683
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e b/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e
new file mode 100755
index 0000000..f4d475d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3 b/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3
new file mode 100755
index 0000000..8438e66
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69 b/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69
new file mode 100755
index 0000000..2f622cd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600 b/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600
new file mode 100755
index 0000000..f053c01
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65 b/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65
new file mode 100755
index 0000000..872451c
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8 b/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8
new file mode 100755
index 0000000..2f7e3ea
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687 b/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687
new file mode 100755
index 0000000..10b5f9a
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620 b/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620
new file mode 100755
index 0000000..969b7a2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498 b/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498
new file mode 100755
index 0000000..bd146b1
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da b/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da
new file mode 100755
index 0000000..7cf844e
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da
Binary files differ
diff --git a/media/module/extractors/wav/WAVExtractor.cpp b/media/module/extractors/wav/WAVExtractor.cpp
index 9c3bac6..d278103 100644
--- a/media/module/extractors/wav/WAVExtractor.cpp
+++ b/media/module/extractors/wav/WAVExtractor.cpp
@@ -219,7 +219,7 @@
 
             mNumChannels = U16_LE_AT(&formatSpec[2]);
 
-            if (mNumChannels < 1 || mNumChannels > FCC_8) {
+            if (mNumChannels < 1 || mNumChannels > FCC_12) {
                 ALOGE("Unsupported number of channels (%d)", mNumChannels);
                 return AMEDIA_ERROR_UNSUPPORTED;
             }
diff --git a/media/module/foundation/TEST_MAPPING b/media/module/foundation/TEST_MAPPING
index a70c352..ea4e4fd 100644
--- a/media/module/foundation/TEST_MAPPING
+++ b/media/module/foundation/TEST_MAPPING
@@ -1,9 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/foundation
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
+    { "name": "AVCUtilsUnitTest" },
     { "name": "OpusHeaderTest" }
   ],
 
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 7594565..b301c53 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -356,6 +356,16 @@
     DISALLOW_EVIL_CONSTRUCTORS(AMessage);
 };
 
+/*
+ * Helper struct for wrapping any object with RefBase.
+ */
+template <typename T>
+struct WrapperObject : public RefBase {
+    WrapperObject(const T& v) : value(v) {}
+    WrapperObject(T&& v) : value(std::move(v)) {}
+    T value;
+};
+
 }  // namespace android
 
 #endif  // A_MESSAGE_H_
diff --git a/media/module/foundation/tests/AVCUtils/Android.bp b/media/module/foundation/tests/AVCUtils/Android.bp
index ee7db21..c306c73 100644
--- a/media/module/foundation/tests/AVCUtils/Android.bp
+++ b/media/module/foundation/tests/AVCUtils/Android.bp
@@ -28,6 +28,7 @@
 cc_test {
     name: "AVCUtilsUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AVCUtilsUnitTest.cpp",
diff --git a/media/module/foundation/tests/AVCUtils/AndroidTest.xml b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
index e30bfbf..315373f 100644
--- a/media/module/foundation/tests/AVCUtils/AndroidTest.xml
+++ b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
@@ -16,7 +16,7 @@
 <configuration description="Test module config for AVC Utils unit tests">
     <option name="test-suite-tag" value="AVCUtilsUnitTest" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="false" />
+        <option name="cleanup" value="true" />
         <option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
     </target_preparer>
 
diff --git a/media/module/id3/TEST_MAPPING b/media/module/id3/TEST_MAPPING
index 6106908..497d984 100644
--- a/media/module/id3/TEST_MAPPING
+++ b/media/module/id3/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/id3
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "ID3Test" }
   ],
 
diff --git a/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index c3a0ced..f8906dc 100644
--- a/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -15,6 +15,7 @@
 -->
 <configuration description="Unit test configuration for {MODULE}">
     <option name="test-suite-tag" value="TranscoderTests" />
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
         <option name="push-file" key="TranscodingTestAssets" value="/data/local/tmp/TranscodingTestAssets" />
diff --git a/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 88c3fd3..fed8fc9 100644
--- a/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -20,6 +20,7 @@
 #define LOG_TAG "VideoTrackTranscoderTests"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <fcntl.h>
 #include <gtest/gtest.h>
 #include <media/MediaSampleReaderNDK.h>
@@ -221,5 +222,6 @@
 
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 86187bd..88c3cc2 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -440,6 +440,10 @@
         ATSParser::CADescriptor *caDescriptor) {
     bool found = false;
     while (infoLength > 2) {
+        if (br->numBitsLeft() < 16) {
+            ALOGE("Not enough data left in bitreader");
+            return false;
+        }
         unsigned descriptor_tag = br->getBits(8);
         ALOGV("      tag = 0x%02x", descriptor_tag);
 
@@ -452,6 +456,10 @@
         }
         if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
             found = true;
+            if (br->numBitsLeft() < 32) {
+                ALOGE("Not enough data left in bitreader");
+                return false;
+            }
             caDescriptor->mSystemID = br->getBits(16);
             caDescriptor->mPID = br->getBits(16) & 0x1fff;
             infoLength -= 4;
@@ -460,14 +468,24 @@
             break;
         } else {
             infoLength -= descriptor_length;
-            br->skipBits(descriptor_length * 8);
+            if (!br->skipBits(descriptor_length * 8)) {
+                ALOGE("Not enough data left in bitreader");
+                return false;
+            }
         }
     }
-    br->skipBits(infoLength * 8);
+    if (!br->skipBits(infoLength * 8)) {
+        ALOGE("Not enough data left in bitreader");
+        return false;
+    }
     return found;
 }
 
 status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
+    if (br->numBitsLeft() < 10) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned table_id = br->getBits(8);
     ALOGV("  table_id = %u", table_id);
     if (table_id != 0x02u) {
@@ -482,6 +500,10 @@
     }
 
     br->skipBits(1);  // '0'
+    if (br->numBitsLeft() < 86) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     MY_LOGV("  reserved = %u", br->getBits(2));
 
     unsigned section_length = br->getBits(12);
@@ -526,6 +548,10 @@
 
     while (infoBytesRemaining >= 5) {
         StreamInfo info;
+        if (br->numBitsLeft() < 40) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
         info.mType = br->getBits(8);
         ALOGV("    stream_type = 0x%02x", info.mType);
         MY_LOGV("    reserved = %u", br->getBits(3));
@@ -545,6 +571,10 @@
         info.mAudioPresentations.clear();
         bool hasStreamCA = false;
         while (ES_info_length > 2 && infoBytesRemaining >= 0) {
+            if (br->numBitsLeft() < 16) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             unsigned descriptor_tag = br->getBits(8);
             ALOGV("      tag = 0x%02x", descriptor_tag);
 
@@ -562,21 +592,39 @@
             if (descriptor_tag == DESCRIPTOR_DTS) {
                 info.mType = STREAMTYPE_DTS;
                 ES_info_length -= descriptor_length;
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             } else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
                 hasStreamCA = true;
+                if (br->numBitsLeft() < 32) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 streamCA.mSystemID = br->getBits(16);
                 streamCA.mPID = br->getBits(16) & 0x1fff;
                 ES_info_length -= descriptor_length;
                 descriptor_length -= 4;
                 streamCA.mPrivateData.assign(br->data(), br->data() + descriptor_length);
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             } else if (info.mType == STREAMTYPE_PES_PRIVATE_DATA &&
                        descriptor_tag == DESCRIPTOR_DVB_EXTENSION && descriptor_length >= 1) {
+                if (br->numBitsLeft() < 8) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 unsigned descTagExt = br->getBits(8);
                 ALOGV("      tag_ext = 0x%02x", descTagExt);
                 ES_info_length -= descriptor_length;
                 descriptor_length--;
+                if (br->numBitsLeft() < (descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 // The AC4 descriptor is used in the PSI PMT to identify streams which carry AC4
                 // audio.
                 if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
@@ -594,6 +642,10 @@
                     br->skipBits(descriptor_length * 8);
                 } else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
                            descriptor_length >= 1) {
+                    if (br->numBitsLeft() < 8) {
+                        ALOGE("Not enough data left in bitreader!");
+                        return ERROR_MALFORMED;
+                    }
                     // DVB BlueBook A038 Table 110
                     unsigned num_preselections = br->getBits(5);
                     br->skipBits(3);  // reserved
@@ -671,11 +723,17 @@
                         info.mAudioPresentations.push_back(std::move(ap));
                     }
                 } else {
-                    br->skipBits(descriptor_length * 8);
+                    if (!br->skipBits(descriptor_length * 8)) {
+                        ALOGE("Not enough data left in bitreader!");
+                        return ERROR_MALFORMED;
+                    }
                 }
             } else {
                 ES_info_length -= descriptor_length;
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             }
         }
         if (hasStreamCA && !mParser->mCasManager->addStream(
@@ -694,6 +752,10 @@
     if (infoBytesRemaining != 0) {
         ALOGW("Section data remains unconsumed");
     }
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned crc = br->getBits(32);
     if (crc != mPMT_CRC) {
         audioPresentationsChanged = true;
@@ -1261,6 +1323,10 @@
 status_t ATSParser::Stream::parsePES(ABitReader *br, SyncEvent *event) {
     const uint8_t *basePtr = br->data();
 
+    if (br->numBitsLeft() < 48) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned packet_startcode_prefix = br->getBits(24);
 
     ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1286,10 +1352,14 @@
             && stream_id != 0xff  // program_stream_directory
             && stream_id != 0xf2  // DSMCC
             && stream_id != 0xf8) {  // H.222.1 type E
-        if (br->getBits(2) != 2u) {
+        if (br->numBitsLeft() < 2 || br->getBits(2) != 2u) {
             return ERROR_MALFORMED;
         }
 
+        if (br->numBitsLeft() < 22) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
         unsigned PES_scrambling_control = br->getBits(2);
         ALOGV("PES_scrambling_control = %u", PES_scrambling_control);
 
@@ -1328,19 +1398,19 @@
                 return ERROR_MALFORMED;
             }
 
-            if (br->getBits(4) != PTS_DTS_flags) {
+            if (br->numBitsLeft() < 7 || br->getBits(4) != PTS_DTS_flags) {
                 return ERROR_MALFORMED;
             }
             PTS = ((uint64_t)br->getBits(3)) << 30;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             PTS |= ((uint64_t)br->getBits(15)) << 15;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             PTS |= br->getBits(15);
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
@@ -1353,20 +1423,20 @@
                     return ERROR_MALFORMED;
                 }
 
-                if (br->getBits(4) != 1u) {
+                if (br->numBitsLeft() < 7 || br->getBits(4) != 1u) {
                     return ERROR_MALFORMED;
                 }
 
                 DTS = ((uint64_t)br->getBits(3)) << 30;
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
                 DTS |= ((uint64_t)br->getBits(15)) << 15;
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
                 DTS |= br->getBits(15);
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
 
@@ -1381,22 +1451,30 @@
                 return ERROR_MALFORMED;
             }
 
+            if (br->numBitsLeft() < 5) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             br->getBits(2);
 
             uint64_t ESCR = ((uint64_t)br->getBits(3)) << 30;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             ESCR |= ((uint64_t)br->getBits(15)) << 15;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             ESCR |= br->getBits(15);
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
             ALOGV("ESCR = %" PRIu64, ESCR);
+            if (br->numBitsLeft() < 10) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             MY_LOGV("ESCR_extension = %u", br->getBits(9));
 
             if (br->getBits(1) != 1u) {
@@ -1411,18 +1489,25 @@
                 return ERROR_MALFORMED;
             }
 
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
+                return ERROR_MALFORMED;
+            }
+            if (br->numBitsLeft() < 22) {
+                ALOGE("Not enough data left in bitreader!");
                 return ERROR_MALFORMED;
             }
             MY_LOGV("ES_rate = %u", br->getBits(22));
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
             optional_bytes_remaining -= 3;
         }
 
-        br->skipBits(optional_bytes_remaining * 8);
+        if (!br->skipBits(optional_bytes_remaining * 8)) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
 
         // ES data follows.
         int32_t pesOffset = br->data() - basePtr;
@@ -1450,7 +1535,10 @@
                     PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
                     br->data(), dataLength, pesOffset, event);
 
-            br->skipBits(dataLength * 8);
+            if (!br->skipBits(dataLength * 8)) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
         } else {
             onPayloadData(
                     PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
@@ -1465,15 +1553,13 @@
                     payloadSizeBits / 8, pesOffset);
         }
     } else if (stream_id == 0xbe) {  // padding_stream
-        if (PES_packet_length == 0u) {
+        if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
             return ERROR_MALFORMED;
         }
-        br->skipBits(PES_packet_length * 8);
     } else {
-        if (PES_packet_length == 0u) {
+        if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
             return ERROR_MALFORMED;
         }
-        br->skipBits(PES_packet_length * 8);
     }
 
     return OK;
@@ -1481,6 +1567,10 @@
 
 uint32_t ATSParser::Stream::getPesScramblingControl(
         ABitReader *br, int32_t *pesOffset) {
+    if (br->numBitsLeft() < 24) {
+        ALOGE("Not enough data left in bitreader!");
+        return 0;
+    }
     unsigned packet_startcode_prefix = br->getBits(24);
 
     ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1491,6 +1581,7 @@
     }
 
     if (br->numBitsLeft() < 48) {
+        ALOGE("Not enough data left in bitreader!");
         return 0;
     }
 
@@ -1987,12 +2078,20 @@
 }
 
 void ATSParser::parseProgramAssociationTable(ABitReader *br) {
+    if (br->numBitsLeft() < 8) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     unsigned table_id = br->getBits(8);
     ALOGV("  table_id = %u", table_id);
     if (table_id != 0x00u) {
         ALOGE("PAT data error!");
         return ;
     }
+    if (br->numBitsLeft() < 56) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     unsigned section_syntax_indictor = br->getBits(1);
     ALOGV("  section_syntax_indictor = %u", section_syntax_indictor);
 
@@ -2009,9 +2108,17 @@
     MY_LOGV("  section_number = %u", br->getBits(8));
     MY_LOGV("  last_section_number = %u", br->getBits(8));
 
+    // check for unsigned integer overflow before assigning it to numProgramBytes
+    if (section_length < 9) {
+        return;
+    }
     size_t numProgramBytes = (section_length - 5 /* header */ - 4 /* crc */);
 
     for (size_t i = 0; i < numProgramBytes / 4; ++i) {
+        if (br->numBitsLeft() < 32) {
+            ALOGE("Not enough data left in bitreader!");
+            return;
+        }
         unsigned program_number = br->getBits(16);
         ALOGV("    program_number = %u", program_number);
 
@@ -2049,6 +2156,10 @@
         }
     }
 
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     MY_LOGV("  CRC = 0x%08x", br->getBits(32));
 }
 
@@ -2070,9 +2181,16 @@
                 section->clear();
             }
 
+            if (br->numBitsLeft() < 8) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             unsigned skip = br->getBits(8);
             section->setSkipBytes(skip + 1);  // skip filler bytes + pointer field itself
-            br->skipBits(skip * 8);
+            if (!br->skipBits(skip * 8)) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
         }
 
         if (br->numBitsLeft() % 8 != 0) {
@@ -2157,6 +2275,10 @@
 status_t ATSParser::parseAdaptationField(
         ABitReader *br, unsigned PID, unsigned *random_access_indicator) {
     *random_access_indicator = 0;
+    if (br->numBitsLeft() < 8) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned adaptation_field_length = br->getBits(8);
 
     if (adaptation_field_length > 0) {
@@ -2227,6 +2349,10 @@
 status_t ATSParser::parseTS(ABitReader *br, SyncEvent *event) {
     ALOGV("---");
 
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned sync_byte = br->getBits(8);
     if (sync_byte != 0x47u) {
         ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
diff --git a/media/module/mpeg2ts/TEST_MAPPING b/media/module/mpeg2ts/TEST_MAPPING
index 9f4bbdf..536450f 100644
--- a/media/module/mpeg2ts/TEST_MAPPING
+++ b/media/module/mpeg2ts/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/mpeg2ts
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "Mpeg2tsUnitTest" }
   ]
 }
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockHandle.h b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
index 111485c..aa632a4 100644
--- a/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
+++ b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
@@ -45,18 +45,18 @@
                   pkt.size());
 
             // packet is bigger than what the caller can handle,
-            if (pkt.size() > len) {
+            if (pkt.size() - mPacketOffset > len) {
                 memcpy(data, pkt.data() + mPacketOffset, len);
 
                 mPacketOffset += len;
                 readAmt = len;
                 // packet is equal or smaller than the caller buffer
             } else {
-                memcpy(data, pkt.data() + mPacketOffset, pkt.size());
+                memcpy(data, pkt.data() + mPacketOffset, pkt.size() - mPacketOffset);
 
                 mPacketNumber++;
                 mPacketOffset = 0;
-                readAmt = pkt.size();
+                readAmt = pkt.size() - mPacketOffset;
             }
 
             return readAmt;
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8b9dde3..9ec7700 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -208,19 +208,21 @@
 }
 
 cc_test {
-    name: "AImageReaderWindowHandleTest",
+    name: "AImageReaderWindowTest",
     test_suites: ["device-tests"],
-    srcs: ["tests/AImageReaderWindowHandleTest.cpp"],
+    srcs: ["tests/AImageReaderWindowTest.cpp"],
     shared_libs: [
         "libbinder",
         "libmediandk",
         "libmediautils",
         "libnativewindow",
         "libgui",
+        "libhidlbase",
         "libutils",
         "libui",
         "libcutils",
         "android.hardware.graphics.bufferqueue@1.0",
+        "android.hidl.token@1.0",
     ],
 
     header_libs: [
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 161b5e3..a26681e 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -428,6 +428,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_HDR10_PLUS_INFO = "hdr10-plus-info";
 EXPORT const char* AMEDIAFORMAT_KEY_HEIGHT = "height";
 EXPORT const char* AMEDIAFORMAT_KEY_ICC_PROFILE = "icc-profile";
+EXPORT const char* AMEDIAFORMAT_KEY_IMPORTANCE = "importance";
 EXPORT const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD = "intra-refresh-period";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_ADTS = "is-adts";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT = "is-autoselect";
@@ -449,6 +450,10 @@
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER = "max-fps-to-encoder";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
+EXPORT const char* AMEDIAFORMAT_KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE =
+        "buffer-batch-max-output-size";
+EXPORT const char* AMEDIAFORMAT_KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE =
+        "buffer-batch-threshold-output-size";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER = "max-pts-gap-to-encoder";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_WIDTH = "max-width";
 EXPORT const char* AMEDIAFORMAT_KEY_MIME = "mime";
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
index e420812..1a15728 100644
--- a/media/ndk/TEST_MAPPING
+++ b/media/ndk/TEST_MAPPING
@@ -1,7 +1,7 @@
 // mappings for frameworks/av/media/ndk
 {
   "presubmit": [
-    { "name": "AImageReaderWindowHandleTest" },
+    { "name": "AImageReaderWindowTest" },
     { "name": "libmediandk_test" }
   ]
 }
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 304879d..e6c1338 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -18,6 +18,8 @@
 #include <media/NdkMediaCrypto.h>
 #include <functional>
 
+#include <functional>
+
 constexpr size_t kMaxString = 256;
 constexpr size_t kMinBytes = 0;
 constexpr size_t kMaxBytes = 1000;
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index b722b74..4fc9918 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -534,7 +534,8 @@
  * Get the native_handle_t corresponding to the ANativeWindow owned by the
  * AImageReader provided.
  *
- * This is deprecated in API level 35 and will return AMEDIA_ERROR_UNKNOWN.
+ * This is deprecated on devices with vendor API level greater than 34 and
+ * will return AMEDIA_ERROR_UNKNOWN on those devices.
  * The native_handle_t is no longer used with AIDL interfaces and
  * ANativeWindow is used directly instead.
  * Use AImageRead_getWindow to get the ANativeWindow and use that object.
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index b2cdf8d..cc1dd9f 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -168,6 +168,7 @@
 extern const char* AMEDIAFORMAT_KEY_GRID_ROWS __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_HEIGHT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_IMPORTANCE __INTRODUCED_IN(35);
 extern const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_IS_ADTS __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT __INTRODUCED_IN(21);
@@ -186,6 +187,8 @@
 extern const char* AMEDIAFORMAT_KEY_MAX_B_FRAMES __INTRODUCED_IN(34);
 extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE __INTRODUCED_IN(35);
+extern const char* AMEDIAFORMAT_KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE __INTRODUCED_IN(35);
 extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_MIME __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_MPEG_USER_DATA __INTRODUCED_IN(28);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4f045fd..262c169 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -185,11 +185,11 @@
     AMediaCodecCryptoInfo_setPattern; # introduced=24
     AMediaCodec_configure;
     AMediaCodec_createCodecByName;
-    AMediaCodec_createCodecByNameForClient; # systemapi # introduced=31
+    AMediaCodec_createCodecByNameForClient; # systemapi introduced=31
     AMediaCodec_createDecoderByType;
-    AMediaCodec_createDecoderByTypeForClient; # systemapi # introduced=31
+    AMediaCodec_createDecoderByTypeForClient; # systemapi introduced=31
     AMediaCodec_createEncoderByType;
-    AMediaCodec_createEncoderByTypeForClient; # systemapi # introduced=31
+    AMediaCodec_createEncoderByTypeForClient; # systemapi introduced=31
     AMediaCodec_delete;
     AMediaCodec_dequeueInputBuffer;
     AMediaCodec_dequeueOutputBuffer;
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
deleted file mode 100644
index 27864c2..0000000
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-#include <media/NdkImageReader.h>
-#include <media/NdkImage.h>
-#include <mediautils/AImageReaderUtils.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
-#include <NdkImagePriv.h>
-#include <NdkImageReaderPriv.h>
-#include <vndk/hardware_buffer.h>
-#include <memory>
-
-namespace android {
-
-using HGraphicBufferProducer = hardware::graphics::bufferqueue::V1_0::
-        IGraphicBufferProducer;
-using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
-using aimg::AImageReader_getHGBPFromHandle;
-
-typedef IGraphicBufferProducer::QueueBufferInput QueueBufferInput;
-typedef IGraphicBufferProducer::QueueBufferOutput QueueBufferOutput;
-
-static constexpr uint64_t kImageBufferUsage =
-    AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
-static constexpr int kImageWidth = 640;
-static constexpr int kImageHeight = 480;
-static constexpr int kImageFormat = AIMAGE_FORMAT_RGBA_8888;
-static constexpr int kMaxImages = 1;
-
-static constexpr int64_t kQueueBufferInputTimeStamp = 1384888611;
-static constexpr bool kQueueBufferInputIsAutoTimeStamp = false;
-static constexpr android_dataspace kQueueBufferInputDataspace = HAL_DATASPACE_UNKNOWN;
-static const Rect kQueueBufferInputRect = Rect(kImageWidth, kImageHeight);
-static constexpr int kQueueBufferInputScalingMode = 0;
-static constexpr int kQueueBufferInputTransform = 0;
-static const sp<Fence> kQueueBufferInputFence = Fence::NO_FENCE;
-
-static constexpr int kOnImageAvailableWaitUs = 100 * 1000;
-
-class AImageReaderWindowHandleTest : public ::testing::Test {
-   public:
-    void SetUp() override {
-        AImageReader_newWithUsage(kImageWidth, kImageHeight, kImageFormat,
-                                  kImageBufferUsage , kMaxImages, &imageReader_);
-        media_status_t ret = AMEDIA_ERROR_UNKNOWN;
-        ASSERT_NE(imageReader_, nullptr);
-        ret = AImageReader_setImageListener(imageReader_,
-                                            &imageReaderAvailableCb_);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        ret = AImageReader_setBufferRemovedListener(imageReader_,
-                                                    &imageReaderDetachedCb_);
-        ASSERT_EQ(ret, AMEDIA_OK);
-    }
-    void TearDown() override {
-        if (imageReader_) {
-            AImageReader_delete(imageReader_);
-        }
-    }
-
-    void HandleImageAvailable() {
-        AImage *outImage = nullptr;
-        media_status_t ret = AMEDIA_OK;
-        auto imageDeleter = [](AImage *img) { AImage_delete(img); };
-        std::unique_ptr<AImage, decltype(imageDeleter)> img(nullptr, imageDeleter);
-
-        // Test that the image can be acquired.
-        ret = AImageReader_acquireNextImage(imageReader_, &outImage);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        img.reset(outImage);
-        ASSERT_NE(img, nullptr);
-
-        // Test that we can get a handle to the image's hardware buffer and a
-        // native handle to it.
-        AHardwareBuffer *hardwareBuffer = nullptr;
-        ret = AImage_getHardwareBuffer(img.get(), &hardwareBuffer);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        ASSERT_NE(hardwareBuffer, nullptr);
-        const native_handle_t *nh = AHardwareBuffer_getNativeHandle(hardwareBuffer);
-        ASSERT_NE(nh, nullptr);
-        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
-        imageAvailable_ = true;
-        imageCondVar_.notify_one();
-    }
-
-    static void onImageAvailable(void *context, AImageReader *reader) {
-        (void)reader;
-        AImageReaderWindowHandleTest *thisContext =
-            reinterpret_cast<AImageReaderWindowHandleTest *>(context);
-        thisContext->HandleImageAvailable();
-    }
-
-    static void onBufferRemoved(void *, AImageReader *, AHardwareBuffer *) {
-    }
-
-    AImageReader *imageReader_ = nullptr;
-    AImageReader_ImageListener imageReaderAvailableCb_{this, onImageAvailable};
-    AImageReader_BufferRemovedListener imageReaderDetachedCb_{this, onBufferRemoved};
-    std::mutex imageAvailableMutex_;
-    std::condition_variable imageCondVar_;
-    bool imageAvailable_ = false;
-};
-
-static void fillRGBA8Buffer(uint8_t* buf, int w, int h, int stride) {
-    const size_t PIXEL_SIZE = 4;
-    for (int x = 0; x < w; x++) {
-        for (int y = 0; y < h; y++) {
-            off_t offset = (y * stride + x) * PIXEL_SIZE;
-            for (int c = 0; c < 4; c++) {
-                int parityX = (x / (1 << (c+2))) & 1;
-                int parityY = (y / (1 << (c+2))) & 1;
-                buf[offset + c] = (parityX ^ parityY) ? 231 : 35;
-            }
-        }
-    }
-}
-
-TEST_F(AImageReaderWindowHandleTest, CreateWindowNativeHandle) {
-    // Check that we can create a native_handle_t corresponding to the
-    // AImageReader.
-    native_handle_t *nh = nullptr;
-    AImageReader_getWindowNativeHandle(imageReader_, &nh);
-    ASSERT_NE(nh, nullptr);
-
-    // Check that there are only ints in the handle.
-    ASSERT_EQ(nh->numFds, 0);
-    ASSERT_NE(nh->numInts, 0);
-
-    // Check that the HGBP can be retrieved from the handle.
-    sp<HGraphicBufferProducer> hgbp =  AImageReader_getHGBPFromHandle(nh);
-    ASSERT_NE(hgbp, nullptr);
-    sp<IGraphicBufferProducer> igbp = new H2BGraphicBufferProducer(hgbp);
-    int dequeuedSlot = -1;
-    sp<Fence> dequeuedFence;
-    IGraphicBufferProducer::QueueBufferOutput output;
-    ASSERT_EQ(OK, igbp->connect(nullptr, NATIVE_WINDOW_API_CPU, false, &output));
-
-    // Test that we can dequeue a buffer.
-    ASSERT_EQ(OK,
-              ~IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION &
-                      (igbp->dequeueBuffer(&dequeuedSlot, &dequeuedFence,
-                                           kImageWidth, kImageHeight,
-                                           kImageFormat, kImageBufferUsage,
-                                           nullptr, nullptr)));
-    EXPECT_LE(0, dequeuedSlot);
-    EXPECT_GT(BufferQueue::NUM_BUFFER_SLOTS, dequeuedSlot);
-
-    sp<GraphicBuffer> dequeuedBuffer;
-    igbp->requestBuffer(dequeuedSlot, &dequeuedBuffer);
-    uint8_t* img = nullptr;
-    ASSERT_EQ(NO_ERROR, dequeuedBuffer->lock(kImageBufferUsage, (void**)(&img)));
-
-    // Write in some dummy image data.
-    fillRGBA8Buffer(img, dequeuedBuffer->getWidth(), dequeuedBuffer->getHeight(),
-                    dequeuedBuffer->getStride());
-    ASSERT_EQ(NO_ERROR, dequeuedBuffer->unlock());
-    QueueBufferInput queueBufferInput(kQueueBufferInputTimeStamp,
-                                      kQueueBufferInputIsAutoTimeStamp,
-                                      kQueueBufferInputDataspace,
-                                      kQueueBufferInputRect,
-                                      kQueueBufferInputScalingMode,
-                                      kQueueBufferInputTransform,
-                                      kQueueBufferInputFence);
-    QueueBufferOutput queueBufferOutput;
-    ASSERT_EQ(OK, igbp->queueBuffer(dequeuedSlot, queueBufferInput,
-                                    &queueBufferOutput));
-    // wait until the onImageAvailable callback is called, or timeout completes.
-    std::unique_lock<std::mutex> lock(imageAvailableMutex_);
-    imageCondVar_.wait_for(lock, std::chrono::microseconds(kOnImageAvailableWaitUs),
-                           [this]{ return this->imageAvailable_;});
-    EXPECT_TRUE(imageAvailable_) <<  "Timed out waiting for image data to be handled!\n";
-}
-
-class AImageReaderPrivateFormatTest : public ::testing::Test {
-  public:
-    void SetUp() override {
-        auto status = AImageReader_new(kImageWidth, kImageHeight, AIMAGE_FORMAT_RAW_DEPTH,
-                                       kMaxImages, &imgReader);
-        EXPECT_TRUE(status == AMEDIA_OK);
-    }
-
-    void TearDown() override {
-        if (imgReader) {
-            AImageReader_delete(imgReader);
-        }
-    }
-    AImageReader *imgReader = nullptr;
-};
-
-TEST_F(AImageReaderPrivateFormatTest, CreateTest) {
-    EXPECT_TRUE(imgReader != nullptr);
-}
-
-
-}  // namespace android
diff --git a/media/ndk/tests/AImageReaderWindowTest.cpp b/media/ndk/tests/AImageReaderWindowTest.cpp
new file mode 100644
index 0000000..650b990
--- /dev/null
+++ b/media/ndk/tests/AImageReaderWindowTest.cpp
@@ -0,0 +1,240 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hidl/token/1.0/ITokenManager.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <gtest/gtest.h>
+#include <hidl/ServiceManagement.h>
+#include <media/NdkImageReader.h>
+#include <media/NdkImage.h>
+#include <mediautils/AImageReaderUtils.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <NdkImagePriv.h>
+#include <NdkImageReaderPriv.h>
+#include <vndk/hardware_buffer.h>
+#include <memory>
+
+namespace android {
+
+using HGraphicBufferProducer = hardware::graphics::bufferqueue::V1_0::
+        IGraphicBufferProducer;
+using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+using hidl::manager::V1_2::IServiceManager;
+using hidl::token::V1_0::ITokenManager;
+using aimg::AImageReader_getHGBPFromHandle;
+
+typedef IGraphicBufferProducer::QueueBufferInput QueueBufferInput;
+typedef IGraphicBufferProducer::QueueBufferOutput QueueBufferOutput;
+
+static constexpr uint64_t kImageBufferUsage =
+    AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
+static constexpr int kImageWidth = 640;
+static constexpr int kImageHeight = 480;
+static constexpr int kImageFormat = AIMAGE_FORMAT_RGBA_8888;
+static constexpr int kMaxImages = 1;
+
+static constexpr int64_t kQueueBufferInputTimeStamp = 1384888611;
+static constexpr bool kQueueBufferInputIsAutoTimeStamp = false;
+static constexpr android_dataspace kQueueBufferInputDataspace = HAL_DATASPACE_UNKNOWN;
+static const Rect kQueueBufferInputRect = Rect(kImageWidth, kImageHeight);
+static constexpr int kQueueBufferInputScalingMode = 0;
+static constexpr int kQueueBufferInputTransform = 0;
+static const sp<Fence> kQueueBufferInputFence = Fence::NO_FENCE;
+
+static constexpr int kOnImageAvailableWaitUs = 100 * 1000;
+
+class AImageReaderWindowTest : public ::testing::Test {
+   public:
+    void SetUp() override {
+        AImageReader_newWithUsage(kImageWidth, kImageHeight, kImageFormat,
+                                  kImageBufferUsage , kMaxImages, &imageReader_);
+        media_status_t ret = AMEDIA_ERROR_UNKNOWN;
+        ASSERT_NE(imageReader_, nullptr);
+        ret = AImageReader_setImageListener(imageReader_,
+                                            &imageReaderAvailableCb_);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        ret = AImageReader_setBufferRemovedListener(imageReader_,
+                                                    &imageReaderDetachedCb_);
+        ASSERT_EQ(ret, AMEDIA_OK);
+    }
+    void TearDown() override {
+        if (imageReader_) {
+            AImageReader_delete(imageReader_);
+        }
+    }
+
+    void HandleImageAvailable() {
+        AImage *outImage = nullptr;
+        media_status_t ret = AMEDIA_OK;
+        auto imageDeleter = [](AImage *img) { AImage_delete(img); };
+        std::unique_ptr<AImage, decltype(imageDeleter)> img(nullptr, imageDeleter);
+
+        // Test that the image can be acquired.
+        ret = AImageReader_acquireNextImage(imageReader_, &outImage);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        img.reset(outImage);
+        ASSERT_NE(img, nullptr);
+
+        // Test that we can get a handle to the image's hardware buffer and a
+        // native handle to it.
+        AHardwareBuffer *hardwareBuffer = nullptr;
+        ret = AImage_getHardwareBuffer(img.get(), &hardwareBuffer);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        ASSERT_NE(hardwareBuffer, nullptr);
+        const native_handle_t *nh = AHardwareBuffer_getNativeHandle(hardwareBuffer);
+        ASSERT_NE(nh, nullptr);
+        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
+        imageAvailable_ = true;
+        imageCondVar_.notify_one();
+    }
+
+    static void onImageAvailable(void *context, AImageReader *reader) {
+        (void)reader;
+        AImageReaderWindowTest *thisContext =
+            reinterpret_cast<AImageReaderWindowTest *>(context);
+        thisContext->HandleImageAvailable();
+    }
+
+    static void onBufferRemoved(void *, AImageReader *, AHardwareBuffer *) {
+    }
+
+    static void fillRGBA8Buffer(uint8_t* buf, int w, int h, int stride) {
+        const size_t PIXEL_SIZE = 4;
+        for (int x = 0; x < w; x++) {
+            for (int y = 0; y < h; y++) {
+                off_t offset = (y * stride + x) * PIXEL_SIZE;
+                for (int c = 0; c < 4; c++) {
+                    int parityX = (x / (1 << (c+2))) & 1;
+                    int parityY = (y / (1 << (c+2))) & 1;
+                    buf[offset + c] = (parityX ^ parityY) ? 231 : 35;
+                }
+            }
+        }
+    }
+
+    void validateIGBP(sp<IGraphicBufferProducer>& igbp) {
+        int dequeuedSlot = -1;
+        sp<Fence> dequeuedFence;
+        IGraphicBufferProducer::QueueBufferOutput output;
+        ASSERT_EQ(OK, igbp->connect(nullptr, NATIVE_WINDOW_API_CPU, false, &output));
+
+        // Test that we can dequeue a buffer.
+        ASSERT_EQ(OK,
+                  ~IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION &
+                          (igbp->dequeueBuffer(&dequeuedSlot, &dequeuedFence,
+                                               kImageWidth, kImageHeight,
+                                               kImageFormat, kImageBufferUsage,
+                                               nullptr, nullptr)));
+        EXPECT_LE(0, dequeuedSlot);
+        EXPECT_GT(BufferQueue::NUM_BUFFER_SLOTS, dequeuedSlot);
+
+        sp<GraphicBuffer> dequeuedBuffer;
+        igbp->requestBuffer(dequeuedSlot, &dequeuedBuffer);
+        uint8_t* img = nullptr;
+        ASSERT_EQ(NO_ERROR, dequeuedBuffer->lock(kImageBufferUsage, (void**)(&img)));
+
+        // Write in some placeholder image data.
+        fillRGBA8Buffer(img, dequeuedBuffer->getWidth(), dequeuedBuffer->getHeight(),
+                        dequeuedBuffer->getStride());
+        ASSERT_EQ(NO_ERROR, dequeuedBuffer->unlock());
+        QueueBufferInput queueBufferInput(kQueueBufferInputTimeStamp,
+                                          kQueueBufferInputIsAutoTimeStamp,
+                                          kQueueBufferInputDataspace,
+                                          kQueueBufferInputRect,
+                                          kQueueBufferInputScalingMode,
+                                          kQueueBufferInputTransform,
+                                          kQueueBufferInputFence);
+        QueueBufferOutput queueBufferOutput;
+        ASSERT_EQ(OK, igbp->queueBuffer(dequeuedSlot, queueBufferInput,
+                                        &queueBufferOutput));
+        // wait until the onImageAvailable callback is called, or timeout completes.
+        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
+        imageCondVar_.wait_for(lock, std::chrono::microseconds(kOnImageAvailableWaitUs),
+                               [this]{ return this->imageAvailable_;});
+        EXPECT_TRUE(imageAvailable_) <<  "Timed out waiting for image data to be handled!\n";
+    }
+
+    AImageReader *imageReader_ = nullptr;
+    AImageReader_ImageListener imageReaderAvailableCb_{this, onImageAvailable};
+    AImageReader_BufferRemovedListener imageReaderDetachedCb_{this, onBufferRemoved};
+    std::mutex imageAvailableMutex_;
+    std::condition_variable imageCondVar_;
+    bool imageAvailable_ = false;
+};
+
+
+TEST_F(AImageReaderWindowTest, CreateWindowNativeHandle) {
+    // Check that we can create a native_handle_t corresponding to the
+    // AImageReader.
+    native_handle_t *nh = nullptr;
+    media_status_t status = AImageReader_getWindowNativeHandle(imageReader_, &nh);
+
+    // On newer devices without the HIDL TokenManager service this API is
+    // deprecated and will return an error.
+    if (IServiceManager::Transport::EMPTY ==
+        hardware::defaultServiceManager1_2()->getTransport(ITokenManager::descriptor, "default")) {
+      EXPECT_EQ(status, AMEDIA_ERROR_UNKNOWN);
+      return;
+    }
+    ASSERT_NE(nh, nullptr);
+
+    // Check that there are only ints in the handle.
+    ASSERT_EQ(nh->numFds, 0);
+    ASSERT_NE(nh->numInts, 0);
+
+    // Check that the HGBP can be retrieved from the handle.
+    sp<HGraphicBufferProducer> hgbp =  AImageReader_getHGBPFromHandle(nh);
+    ASSERT_NE(hgbp, nullptr);
+    sp<IGraphicBufferProducer> igbp = new H2BGraphicBufferProducer(hgbp);
+
+    validateIGBP(igbp);
+}
+
+TEST_F(AImageReaderWindowTest, CreateWindow) {
+    ANativeWindow* window = nullptr;
+    media_status_t status = AImageReader_getWindow(imageReader_, &window);
+
+    ASSERT_NE(window, nullptr);
+
+    sp<IGraphicBufferProducer> igbp = Surface::getIGraphicBufferProducer(window);
+
+    validateIGBP(igbp);
+}
+
+class AImageReaderPrivateFormatTest : public ::testing::Test {
+  public:
+    void SetUp() override {
+        auto status = AImageReader_new(kImageWidth, kImageHeight, AIMAGE_FORMAT_RAW_DEPTH,
+                                       kMaxImages, &imgReader);
+        EXPECT_TRUE(status == AMEDIA_OK);
+    }
+
+    void TearDown() override {
+        if (imgReader) {
+            AImageReader_delete(imgReader);
+        }
+    }
+    AImageReader *imgReader = nullptr;
+};
+
+TEST_F(AImageReaderPrivateFormatTest, CreateTest) {
+    EXPECT_TRUE(imgReader != nullptr);
+}
+
+
+}  // namespace android
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
index 086757b..80f0fc4 100644
--- a/media/utils/MethodStatistics.cpp
+++ b/media/utils/MethodStatistics.cpp
@@ -20,6 +20,8 @@
 
 // Repository for MethodStatistics Objects
 
+// It's important to have the HAL class name defined with suffix "Hidl/Aidl" because
+// TimerThread::isRequestFromHal use this string to match binder call to/from hal.
 std::shared_ptr<std::vector<std::string>>
 getStatisticsClassesForModule(std::string_view moduleName) {
     static const std::map<std::string, std::shared_ptr<std::vector<std::string>>,
@@ -34,6 +36,15 @@
                 "StreamOutHalHidl",
               })
         },
+        {
+            METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL,
+            std::shared_ptr<std::vector<std::string>>(
+                new std::vector<std::string>{
+                "DeviceHalAidl",
+                "EffectHalAidl",
+                "StreamHalAidl",
+              })
+        },
     };
     auto it = m.find(moduleName);
     if (it == m.end()) return {};
@@ -61,6 +72,9 @@
             addClassesToMap(
                     getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
                     m);
+            addClassesToMap(
+                    getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL),
+                    m);
             return m;
         }();
 
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 2946398..c4f2808 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -27,7 +27,6 @@
 #include <media/AidlConversionUtil.h>
 #include <android/content/AttributionSourceState.h>
 
-#include <com_android_media_audio.h>
 #include <iterator>
 #include <algorithm>
 #include <pwd.h>
@@ -388,10 +387,6 @@
  */
 bool mustAnonymizeBluetoothAddress(
         const AttributionSourceState& attributionSource, const String16& caller) {
-    if (!com::android::media::audio::bluetooth_mac_address_anonymization()) {
-        return false;
-    }
-
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     if (isAudioServerOrSystemServerUid(uid)) {
         return false;
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index ad27af3..1c94691 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -129,11 +129,16 @@
 //
 /* static */
 bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
-    const size_t hidlPos = request->tag.asStringView().find("Hidl");
-    if (hidlPos == std::string::npos) return false;
-    // should be a separator afterwards Hidl which indicates the string was in the class.
-    const size_t separatorPos = request->tag.asStringView().find("::", hidlPos);
-    return separatorPos != std::string::npos;
+    for (const auto& s : {"Hidl", "Aidl"}) {
+        const auto& tagSV = request->tag.asStringView();
+        const size_t halStrPos = tagSV.find(s);
+        // should be a separator afterwards Hidl/Aidl which indicates the string was in the class.
+        if (halStrPos != std::string::npos && tagSV.find("::", halStrPos) != std::string::npos) {
+            return true;
+        }
+    }
+
+    return false;
 }
 
 struct TimerThread::SnapshotAnalysis TimerThread::getSnapshotAnalysis(size_t retiredCount) const {
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index bd9a462..0a047c1 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
index c8b36d8..2543dfa 100644
--- a/media/utils/include/mediautils/MethodStatistics.h
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -124,6 +124,7 @@
 // Managed Statistics support.
 // Supported Modules
 #define METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL "AudioHidl"
+#define METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL "AudioAidl"
 
 // Returns a vector of class names for the module, or a nullptr if module not found.
 std::shared_ptr<std::vector<std::string>>
diff --git a/media/utils/include/mediautils/StaticStringView.h b/media/utils/include/mediautils/StaticStringView.h
index 14be240..e9a5deb 100644
--- a/media/utils/include/mediautils/StaticStringView.h
+++ b/media/utils/include/mediautils/StaticStringView.h
@@ -21,15 +21,15 @@
 
 #pragma push_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
 #undef EXPLICIT_CONVERSION_GENERATE_OPERATOR
-#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)               \
-    friend constexpr bool operator op(T lhs, T rhs) {                 \
-        return operator op(static_cast<U>(lhs), static_cast<U>(rhs)); \
-    }                                                                 \
-    friend constexpr bool operator op(T lhs, U rhs) {                 \
-        return operator op(static_cast<U>(lhs), rhs);                 \
-    }                                                                 \
-    friend constexpr bool operator op(U lhs, T rhs) {                 \
-        return operator op(lhs, static_cast<U>(rhs));                 \
+#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)    \
+    friend constexpr bool operator op(T lhs, T rhs) {      \
+        return static_cast<U>(lhs) op static_cast<U>(rhs); \
+    }                                                      \
+    friend constexpr bool operator op(T lhs, U rhs) {      \
+        return static_cast<U>(lhs) op rhs;                 \
+    }                                                      \
+    friend constexpr bool operator op(U lhs, T rhs) {      \
+        return lhs op static_cast<U>(rhs);                 \
     }
 
 #pragma push_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 3fdc6eb..a68569a 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/Android.mk b/services/Android.mk
deleted file mode 100644
index c86a226..0000000
--- a/services/Android.mk
+++ /dev/null
@@ -1 +0,0 @@
-$(eval $(call declare-1p-copy-files,frameworks/av/services/audiopolicy,))
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index afd28e5..129541f 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -145,6 +145,7 @@
         "audioflinger-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
+        "com.android.media.audio-aconfig-cc",
         "effect-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libactivitymanager_aidl",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 9420bf1..725e5a6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -286,9 +286,6 @@
     BatteryNotifier::getInstance().noteResetAudio();
 
     mMediaLogNotifier->run("MediaLogNotifier");
-    std::vector<pid_t> halPids;
-    mDevicesFactoryHal->getHalPids(&halPids);
-    mediautils::TimeCheck::setAudioHalPids(halPids);
 
     // Notify that we have started (also called when audioserver service restarts)
     mediametrics::LogItem(mMetricsId)
@@ -314,7 +311,8 @@
     }
 
     mPatchPanel = IAfPatchPanel::create(sp<IAfPatchPanelCallback>::fromExisting(this));
-    mMelReporter = sp<MelReporter>::make(sp<IAfMelReporterCallback>::fromExisting(this));
+    mMelReporter = sp<MelReporter>::make(sp<IAfMelReporterCallback>::fromExisting(this),
+                                         mPatchPanel);
 }
 
 status_t AudioFlinger::setAudioHalPids(const std::vector<pid_t>& pids) {
@@ -875,12 +873,15 @@
             dprintf(fd, "\nIEffect binder call profile:\n");
             write(fd, timeCheckStats.c_str(), timeCheckStats.size());
 
-            // Automatically fetch HIDL statistics.
-            std::shared_ptr<std::vector<std::string>> hidlClassNames =
-                    mediautils::getStatisticsClassesForModule(
-                            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL);
-            if (hidlClassNames) {
-                for (const auto& className : *hidlClassNames) {
+            // Automatically fetch HIDL or AIDL statistics.
+            const std::string_view halType = (mDevicesFactoryHal->getHalVersion().getType() ==
+                                      AudioHalVersionInfo::Type::HIDL)
+                                             ? METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL
+                                             : METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL;
+            const std::shared_ptr<std::vector<std::string>> halClassNames =
+                    mediautils::getStatisticsClassesForModule(halType);
+            if (halClassNames) {
+                for (const auto& className : *halClassNames) {
                     auto stats = mediautils::getStatisticsForClass(className);
                     if (stats) {
                         timeCheckStats = stats->dump();
@@ -1956,7 +1957,7 @@
     mHardwareStatus = AUDIO_HW_IDLE;
 
     // Change parameters of the configuration each iteration until we find a
-    // configuration that the device will support.
+    // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     for (auto testChannelMask : channelMasks) {
         config.channel_mask = testChannelMask;
@@ -1966,11 +1967,16 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
+                audio_config_t loopConfig = config;
                 status_t result = dev->getInputBufferSize(&config, &bytes);
+                if (result == BAD_VALUE) {
+                    // Retry with the config suggested by the HAL.
+                    result = dev->getInputBufferSize(&config, &bytes);
+                }
                 if (result != OK || bytes == 0) {
+                    config = loopConfig;
                     continue;
                 }
-
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2170,30 +2176,24 @@
     sp<IAfThreadBase> thread;
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        if (mPlaybackThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mPlaybackThreads.valueAt(i);
+        thread = mPlaybackThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
-        if (mRecordThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mRecordThreads.valueAt(i);
+        thread = mRecordThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
-        if (mMmapThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mMmapThreads.valueAt(i);
+        thread = mMmapThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    return thread;
+    return nullptr;
 }
 
 // ----------------------------------------------------------------------------
@@ -2546,6 +2546,7 @@
         bool mm;
         if (OK == dev->getMasterMute(&mm)) {
             mMasterMute = mm;
+            ALOGI_IF(mMasterMute, "%s: applying mute from HAL %s", __func__, name);
         }
     }
 
@@ -3222,41 +3223,19 @@
         return 0;
     }
 
-    audio_config_t halconfig = *config;
-    sp<DeviceHalInterface> inHwHal = inHwDev->hwDevice();
-    sp<StreamInHalInterface> inStream;
-    status_t status = inHwHal->openInputStream(
-            *input, devices, &halconfig, flags, address, source,
-            outputDevice, outputDeviceAddress, &inStream);
-    ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
-           ", Format %#x, Channels %#x, flags %#x, status %d addr %s",
-            inStream.get(),
+    AudioStreamIn *inputStream = nullptr;
+    status_t status = inHwDev->openInputStream(
+            &inputStream,
+            *input,
             devices,
-            halconfig.sample_rate,
-            halconfig.format,
-            halconfig.channel_mask,
             flags,
-            status, address);
+            config,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress.c_str());
 
-    // If the input could not be opened with the requested parameters and we can handle the
-    // conversion internally, try to open again with the proposed parameters.
-    if (status == BAD_VALUE &&
-        audio_is_linear_pcm(config->format) &&
-        audio_is_linear_pcm(halconfig.format) &&
-        (halconfig.sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
-        (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_LIMIT) &&
-        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_LIMIT)) {
-        // FIXME describe the change proposed by HAL (save old values so we can log them here)
-        ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
-        inStream.clear();
-        status = inHwHal->openInputStream(
-                *input, devices, &halconfig, flags, address, source,
-                outputDevice, outputDeviceAddress, &inStream);
-        // FIXME log this new status; HAL should not propose any further changes
-    }
-
-    if (status == NO_ERROR && inStream != 0) {
-        AudioStreamIn *inputStream = new AudioStreamIn(inHwDev, inStream, flags);
+    if (status == NO_ERROR) {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             const sp<IAfMmapCaptureThread> thread =
                     IAfMmapCaptureThread::create(this, *input, inHwDev, inputStream, mSystemReady);
@@ -4166,7 +4145,7 @@
         }
 
         // Only audio policy service can create a spatializer effect
-        if ((memcmp(&descOut.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0) &&
+        if (IAfEffectModule::isSpatializer(&descOut.type) &&
             (callingUid != AID_AUDIOSERVER || currentPid != getpid())) {
             ALOGW("%s: attempt to create a spatializer effect from uid/pid %d/%d",
                     __func__, callingUid, currentPid);
@@ -4408,11 +4387,12 @@
 
     sp<IAfThreadBase> thread = getEffectThread_l(sessionId, effectId);
     if (thread == nullptr) {
-      return;
+        return;
     }
     audio_utils::lock_guard _sl(thread->mutex());
-    sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
-    thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    if (const auto& effect = thread->getEffect_l(sessionId, effectId)) {
+        thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    }
 }
 
 
@@ -4504,7 +4484,7 @@
             if (effect->state() == IAfEffectModule::ACTIVE ||
                     effect->state() == IAfEffectModule::STOPPING) {
                 ++started;
-                effect->start();
+                effect->start_l();
             }
         }
         dstChain->mutex().unlock();
@@ -4607,7 +4587,7 @@
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
         if (effect->state() == IAfEffectModule::ACTIVE ||
             effect->state() == IAfEffectModule::STOPPING) {
-            effect->start();
+            effect->start_l();
         }
     }
 
@@ -4805,7 +4785,7 @@
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
             ALOGW("%s: transaction %d received from PID %d",
-                  __func__, code, IPCThreadState::self()->getCallingPid());
+                  __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
             switch (code) {
                 case TransactionCode::SET_RECORD_SILENCED:
@@ -4838,7 +4818,8 @@
         case TransactionCode::SUPPORTS_BLUETOOTH_VARIABLE_LATENCY: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
-                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      __func__, static_cast<int>(code),
+                      IPCThreadState::self()->getCallingPid(),
                       IPCThreadState::self()->getCallingUid());
                 // return status only for non-void methods
                 switch (code) {
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 201d147..feae97e 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -143,7 +143,7 @@
         if (lStatus == NO_ERROR) {
             lStatus = effect->addHandle(handle.get());
             if (lStatus == NO_ERROR) {
-                lStatus = effect->init(patches);
+                lStatus = effect->init_l(patches);
                 if (lStatus == NAME_NOT_FOUND) {
                     lStatus = NO_ERROR;
                 }
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 7045c8b..287d838 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -139,7 +139,7 @@
     // check if effects should be suspended or restored when a given effect is enable or disabled
     void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                           bool enabled __unused, bool threadLocked __unused) final {}
-    void resetVolume() final {}
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex) {}
     product_strategy_t strategy() const final { return static_cast<product_strategy_t>(0); }
     int32_t activeTrackCnt() const final { return 0; }
     void onEffectEnable(const sp<IAfEffectBase>& effect __unused) final {}
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 73a89e5..b270813 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -570,10 +570,10 @@
     : EffectBase(callback, desc, id, sessionId, pinned),
       // clear mConfig to ensure consistent initial value of buffer framecount
       // in case buffers are associated by setInBuffer() or setOutBuffer()
-      // prior to configure().
+      // prior to configure_l().
       mConfig{{}, {}},
       mStatus(NO_INIT),
-      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
+      mMaxDisableWaitCnt(1), // set by configure_l(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
       mOffloaded(false),
       mIsOutput(false)
@@ -588,13 +588,13 @@
     if (mStatus != NO_ERROR) {
         return;
     }
-    lStatus = init();
+    lStatus = init_l();
     if (lStatus < 0) {
         mStatus = lStatus;
         goto Error;
     }
 
-    setOffloaded(callback->isOffload(), callback->io());
+    setOffloaded_l(callback->isOffload(), callback->io());
     ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
 
     return;
@@ -616,7 +616,7 @@
 
 }
 
-bool EffectModule::updateState() {
+bool EffectModule::updateState_l() {
     audio_utils::lock_guard _l(mutex());
 
     bool started = false;
@@ -632,7 +632,7 @@
                    0,
                    mConfig.inputCfg.buffer.frameCount*sizeof(int32_t));
         }
-        if (start_l() == NO_ERROR) {
+        if (start_ll() == NO_ERROR) {
             mState = ACTIVE;
             started = true;
         } else {
@@ -641,8 +641,8 @@
         break;
     case STOPPING:
         // volume control for offload and direct threads must take effect immediately.
-        if (stop_l() == NO_ERROR
-            && !(isVolumeControl() && isOffloadedOrDirect())) {
+        if (stop_ll() == NO_ERROR
+            && !(isVolumeControl() && isOffloadedOrDirect_l())) {
             mDisableWaitCnt = mMaxDisableWaitCnt;
         } else {
             mDisableWaitCnt = 1; // will cause immediate transition to IDLE
@@ -836,9 +836,9 @@
     mEffectInterface->command(EFFECT_CMD_RESET, 0, NULL, &replySize, &reply);
 }
 
-status_t EffectModule::configure()
+status_t EffectModule::configure_l()
 {
-    ALOGVV("configure() started");
+    ALOGVV("%s started", __func__);
     status_t status;
     uint32_t size;
     audio_channel_mask_t channelMask;
@@ -879,7 +879,7 @@
     mConfig.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
-    if (callback->isOffloadOrDirect() && !isOffloaded()) {
+    if (callback->isOffloadOrDirect() && !isOffloaded_l()) {
         mConfig.inputCfg.samplingRate = DEFAULT_OUTPUT_SAMPLE_RATE;
         ALOGV("Overriding effect input as 48kHz");
     } else {
@@ -909,9 +909,9 @@
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
     mIsOutput = callback->isOutput();
 
-    ALOGV("configure() %p chain %p buffer %p framecount %zu",
-          this, callback->chain().promote().get(),
-          mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+    ALOGV("%s %p chain %p buffer %p framecount %zu", __func__, this,
+          callback->chain().promote().get(), mConfig.inputCfg.buffer.raw,
+          mConfig.inputCfg.buffer.frameCount);
 
     status_t cmdStatus;
     size = sizeof(int);
@@ -1012,11 +1012,11 @@
 exit:
     // TODO: consider clearing mConfig on error.
     mStatus = status;
-    ALOGVV("configure ended");
+    ALOGVV("%s ended", __func__);
     return status;
 }
 
-status_t EffectModule::init()
+status_t EffectModule::init_l()
 {
     audio_utils::lock_guard _l(mutex());
     if (mEffectInterface == 0) {
@@ -1048,21 +1048,21 @@
     }
 }
 
-// start() must be called with PlaybackThread::mutex() or EffectChain::mutex() held
-status_t EffectModule::start()
+// start_l() must be called with EffectChain::mutex() held
+status_t EffectModule::start_l()
 {
     status_t status;
     {
         audio_utils::lock_guard _l(mutex());
-        status = start_l();
+        status = start_ll();
     }
     if (status == NO_ERROR) {
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
     }
     return status;
 }
 
-status_t EffectModule::start_l()
+status_t EffectModule::start_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1086,13 +1086,13 @@
     return status;
 }
 
-status_t EffectModule::stop()
+status_t EffectModule::stop_l()
 {
     audio_utils::lock_guard _l(mutex());
-    return stop_l();
+    return stop_ll();
 }
 
-status_t EffectModule::stop_l()
+status_t EffectModule::stop_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1103,11 +1103,11 @@
     status_t cmdStatus = NO_ERROR;
     uint32_t size = sizeof(status_t);
 
-    if (isVolumeControl() && isOffloadedOrDirect()) {
+    if (isVolumeControl() && isOffloadedOrDirect_l()) {
         // We have the EffectChain and EffectModule lock, permit a reentrant call to setVolume:
         // resetVolume_l --> setVolume_l --> EffectModule::setVolume
         mSetVolumeReentrantTid = gettid();
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
         mSetVolumeReentrantTid = INVALID_PID;
     }
 
@@ -1162,7 +1162,7 @@
                      std::vector<uint8_t>* reply)
 {
     audio_utils::lock_guard _l(mutex());
-    ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
+    ALOGVV("%s, cmdCode: %d, mEffectInterface: %p", __func__, cmdCode, mEffectInterface.get());
 
     if (mState == DESTROYED || mEffectInterface == 0) {
         return NO_INIT;
@@ -1258,20 +1258,20 @@
     }
 }
 
-bool EffectModule::isOffloadedOrDirect() const
+bool EffectModule::isOffloadedOrDirect_l() const
 {
     return getCallback()->isOffloadOrDirect();
 }
 
-bool EffectModule::isVolumeControlEnabled() const
+bool EffectModule::isVolumeControlEnabled_l() const
 {
-    return (isVolumeControl() && (isOffloadedOrDirect() ? isEnabled() : isProcessEnabled()));
+    return (isVolumeControl() && (isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()));
 }
 
 void EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setInBuffer %p",(&buffer));
 
-    // mConfig.inputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.inputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
@@ -1317,7 +1317,7 @@
 void EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setOutBuffer %p",(&buffer));
 
-    // mConfig.outputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.outputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
@@ -1356,8 +1356,7 @@
     }
 }
 
-status_t EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
-{
+status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller) {
     AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1480,7 +1479,7 @@
     return status;
 }
 
-status_t EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+status_t EffectModule::setOffloaded_l(bool offloaded, audio_io_handle_t io)
 {
     audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
@@ -1509,11 +1508,11 @@
         }
         mOffloaded = false;
     }
-    ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status);
+    ALOGV("%s offloaded %d io %d status %d", __func__, offloaded, io, status);
     return status;
 }
 
-bool EffectModule::isOffloaded() const
+bool EffectModule::isOffloaded_l() const
 {
     audio_utils::lock_guard _l(mutex());
     return mOffloaded;
@@ -1528,8 +1527,16 @@
     return IAfEffectModule::isHapticGenerator(&mDescriptor.type);
 }
 
-status_t EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
-{
+/*static*/
+bool IAfEffectModule::isSpatializer(const effect_uuid_t *type) {
+    return memcmp(type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+}
+
+bool EffectModule::isSpatializer() const {
+    return IAfEffectModule::isSpatializer(&mDescriptor.type);
+}
+
+status_t EffectModule::setHapticScale_l(int id, os::HapticScale hapticScale) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1544,7 +1551,7 @@
     param->vsize = sizeof(int32_t) * 2;
     *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
     *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
+    *((int32_t*)param->data + 2) = static_cast<int32_t>(hapticScale.getLevel());
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1554,8 +1561,7 @@
     return status;
 }
 
-status_t EffectModule::setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo)
-{
+status_t EffectModule::setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1584,8 +1590,8 @@
     return status;
 }
 
-status_t EffectModule::getConfigs(
-        audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
+status_t EffectModule::getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                    bool* isOutput) const {
     audio_utils::lock_guard _l(mutex());
     if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
         return NO_INIT;
@@ -1600,6 +1606,35 @@
     return NO_ERROR;
 }
 
+status_t EffectModule::sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) {
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    // TODO b/307368176: send all metadata to effects if requested by the implementation.
+    // For now only send channel mask to Spatializer.
+    if (!isSpatializer()) {
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(
+            sizeof(effect_param_t) + sizeof(int32_t) + metadata.size() * sizeof(uint32_t));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = metadata.size() * sizeof(uint32_t);
+    *(int32_t*)param->data = SPATIALIZER_PARAM_INPUT_CHANNEL_MASK;
+    uint32_t* channelMasks = reinterpret_cast<uint32_t*>(param->data + sizeof(int32_t));
+    for (auto m : metadata) {
+        *channelMasks++ = m.channel_mask;
+    }
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != sizeof(status_t));
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -1910,7 +1945,7 @@
     audio_config_base_t inputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     audio_config_base_t outputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     bool isOutput;
-    status_t status = effectModule->getConfigs(&inputCfg, &outputCfg, &isOutput);
+    status_t status = effectModule->getConfigs_l(&inputCfg, &outputCfg, &isOutput);
     if (status == NO_ERROR) {
         constexpr bool isInput = false; // effects always use 'OUT' channel masks.
         _config->inputCfg = VALUE_OR_RETURN_STATUS_AS_OUT(
@@ -2176,7 +2211,7 @@
     return 0;
 }
 
-std::vector<int> EffectChain::getEffectIds() const
+std::vector<int> EffectChain::getEffectIds_l() const
 {
     std::vector<int> ids;
     audio_utils::lock_guard _l(mutex());
@@ -2206,8 +2241,7 @@
 }
 
 // Must be called with EffectChain::mutex() locked
-void EffectChain::process_l()
-{
+void EffectChain::process_l() {
     // never process effects when:
     // - on an OFFLOAD thread
     // - no more tracks are on the session and the effect tail has been rendered
@@ -2250,7 +2284,7 @@
     }
     bool doResetVolume = false;
     for (size_t i = 0; i < size; i++) {
-        doResetVolume = mEffects[i]->updateState() || doResetVolume;
+        doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
     }
     if (doResetVolume) {
         resetVolume_l();
@@ -2304,14 +2338,14 @@
                 numSamples * sizeof(float), &halBuffer);
         if (result != OK) return result;
 
-        effect->configure();
+        effect->configure_l();
 
         effect->setInBuffer(halBuffer);
         // auxiliary effects output samples to chain input buffer for further processing
         // by insert effects
         effect->setOutBuffer(mInBuffer);
     } else {
-        ssize_t idx_insert = getInsertIndex(desc);
+        ssize_t idx_insert = getInsertIndex_ll(desc);
         if (idx_insert < 0) {
             return INVALID_OPERATION;
         }
@@ -2319,7 +2353,7 @@
         size_t previousSize = mEffects.size();
         mEffects.insertAt(effect, idx_insert);
 
-        effect->configure();
+        effect->configure_l();
 
         // - By default:
         //   All effects read samples from chain input buffer.
@@ -2334,9 +2368,9 @@
             effect->setOutBuffer(mOutBuffer);
             if (idx_insert == 0) {
                 if (previousSize != 0) {
-                    mEffects[1]->configure();
+                    mEffects[1]->configure_l();
                     mEffects[1]->setInBuffer(mOutBuffer);
-                    mEffects[1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setInBuffer(mInBuffer);
             } else {
@@ -2346,9 +2380,9 @@
             effect->setInBuffer(mInBuffer);
             if (idx_insert == static_cast<ssize_t>(previousSize)) {
                 if (idx_insert != 0) {
-                    mEffects[idx_insert-1]->configure();
+                    mEffects[idx_insert-1]->configure_l();
                     mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
-                    mEffects[idx_insert - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[idx_insert - 1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setOutBuffer(mOutBuffer);
             } else {
@@ -2358,21 +2392,21 @@
         ALOGV("%s effect %p, added in chain %p at rank %zu",
                 __func__, effect.get(), this, idx_insert);
     }
-    effect->configure();
+    effect->configure_l();
 
     return NO_ERROR;
 }
 
 std::optional<size_t> EffectChain::findVolumeControl_l(size_t from, size_t to) const {
     for (size_t i = std::min(to, mEffects.size()); i > from; i--) {
-        if (mEffects[i - 1]->isVolumeControlEnabled()) {
+        if (mEffects[i - 1]->isVolumeControlEnabled_l()) {
             return i - 1;
         }
     }
     return std::nullopt;
 }
 
-ssize_t EffectChain::getInsertIndex(const effect_descriptor_t& desc) {
+ssize_t EffectChain::getInsertIndex_ll(const effect_descriptor_t& desc) {
     // Insert effects are inserted at the end of mEffects vector as they are processed
     //  after track and auxiliary effects.
     // Insert effect order as a function of indicated preference:
@@ -2387,7 +2421,7 @@
     // already present
     // Spatializer or Downmixer effects are inserted in first position because
     // they adapt the channel count for all other effects in the chain
-    if ((memcmp(&desc.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0)
+    if (IAfEffectModule::isSpatializer(&desc.type)
             || (memcmp(&desc.type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0)) {
         return 0;
     }
@@ -2460,7 +2494,7 @@
             // the middle of a read from audio HAL
             if (mEffects[i]->state() == EffectModule::ACTIVE ||
                     mEffects[i]->state() == EffectModule::STOPPING) {
-                mEffects[i]->stop();
+                mEffects[i]->stop_l();
             }
             if (release) {
                 mEffects[i]->release_l();
@@ -2468,9 +2502,9 @@
 
             if (type != EFFECT_FLAG_TYPE_AUXILIARY) {
                 if (i == size - 1 && i != 0) {
-                    mEffects[i - 1]->configure();
+                    mEffects[i - 1]->configure_l();
                     mEffects[i - 1]->setOutBuffer(mOutBuffer);
-                    mEffects[i - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[i - 1]->updateAccessMode_l();      // reconfig if needed.
                 }
             }
             mEffects.removeAt(i);
@@ -2479,9 +2513,9 @@
             // is updated if needed (can switch from HAL channel mask to mixer channel mask)
             if (type != EFFECT_FLAG_TYPE_AUXILIARY // TODO(b/284522658) breaks for aux FX, why?
                     && i == 0 && size > 1) {
-                mEffects[0]->configure();
+                mEffects[0]->configure_l();
                 mEffects[0]->setInBuffer(mInBuffer);
-                mEffects[0]->updateAccessMode();      // reconfig if neeeded.
+                mEffects[0]->updateAccessMode_l();      // reconfig if needed.
             }
 
             ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %zu", effect.get(),
@@ -2531,14 +2565,19 @@
 
 bool EffectChain::hasVolumeControlEnabled_l() const {
     for (const auto &effect : mEffects) {
-        if (effect->isVolumeControlEnabled()) return true;
+        if (effect->isVolumeControlEnabled_l()) return true;
     }
     return false;
 }
 
-// setVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
-bool EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
-{
+// setVolume() must be called without EffectChain::mutex()
+bool EffectChain::setVolume(uint32_t* left, uint32_t* right, bool force) {
+    audio_utils::lock_guard _l(mutex());
+    return setVolume_l(left, right, force);
+}
+
+// setVolume_l() must be called with EffectChain::mutex() held
+bool EffectChain::setVolume_l(uint32_t* left, uint32_t* right, bool force) {
     uint32_t newLeft = *left;
     uint32_t newRight = *right;
     const size_t size = mEffects.size();
@@ -2613,7 +2652,7 @@
     return volumeControlIndex.has_value();
 }
 
-// resetVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
+// resetVolume_l() must be called with EffectChain::mutex() held
 void EffectChain::resetVolume_l()
 {
     if ((mLeftVolume != UINT_MAX) && (mRightVolume != UINT_MAX)) {
@@ -2635,15 +2674,15 @@
     return false;
 }
 
-void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
+void EffectChain::setHapticScale_l(int id, os::HapticScale hapticScale)
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); ++i) {
-        mEffects[i]->setHapticIntensity(id, intensity);
+        mEffects[i]->setHapticScale_l(id, hapticScale);
     }
 }
 
-void EffectChain::syncHalEffectsState()
+void EffectChain::syncHalEffectsState_l()
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2712,7 +2751,7 @@
         }
 
         if (desc->mRefCount++ == 0) {
-            sp<IAfEffectModule> effect = getEffectIfEnabled(type);
+            sp<IAfEffectModule> effect = getEffectIfEnabled_l(type);
             if (effect != 0) {
                 desc->mEffect = effect;
                 effect->setSuspended(true);
@@ -2765,7 +2804,7 @@
         }
         if (desc->mRefCount++ == 0) {
             Vector< sp<IAfEffectModule> > effects;
-            getSuspendEligibleEffects(effects);
+            getSuspendEligibleEffects_l(effects);
             for (size_t i = 0; i < effects.size(); i++) {
                 setEffectSuspended_l(&effects[i]->desc().type, true);
             }
@@ -2806,8 +2845,7 @@
 #endif //OPENSL_ES_H_
 
 /* static */
-bool EffectChain::isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type)
-{
+bool EffectChain::isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type) {
     // Only NS and AEC are suspended when BtNRec is off
     if ((memcmp(type, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) ||
         (memcmp(type, FX_IID_NS, sizeof(effect_uuid_t)) == 0)) {
@@ -2816,7 +2854,7 @@
     return false;
 }
 
-bool EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc)
+bool EffectChain::isEffectEligibleForSuspend_l(const effect_descriptor_t& desc)
 {
     // auxiliary effects and visualizer are never suspended on output mix
     if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) &&
@@ -2829,26 +2867,24 @@
     return true;
 }
 
-void EffectChain::getSuspendEligibleEffects(
+void EffectChain::getSuspendEligibleEffects_l(
         Vector< sp<IAfEffectModule> > &effects)
 {
     effects.clear();
     for (size_t i = 0; i < mEffects.size(); i++) {
-        if (isEffectEligibleForSuspend(mEffects[i]->desc())) {
+        if (isEffectEligibleForSuspend_l(mEffects[i]->desc())) {
             effects.add(mEffects[i]);
         }
     }
 }
 
-sp<IAfEffectModule> EffectChain::getEffectIfEnabled(const effect_uuid_t *type)
+sp<IAfEffectModule> EffectChain::getEffectIfEnabled_l(const effect_uuid_t *type)
 {
     sp<IAfEffectModule> effect = getEffectFromType_l(type);
     return effect != 0 && effect->isEnabled() ? effect : 0;
 }
 
-void EffectChain::checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect,
-                                                            bool enabled)
-{
+void EffectChain::checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) {
     ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
     if (enabled) {
         if (index < 0) {
@@ -2857,18 +2893,17 @@
             if (index < 0) {
                 return;
             }
-            if (!isEffectEligibleForSuspend(effect->desc())) {
+            if (!isEffectEligibleForSuspend_l(effect->desc())) {
                 return;
             }
             setEffectSuspended_l(&effect->desc().type, enabled);
             index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
             if (index < 0) {
-                ALOGW("checkSuspendOnEffectEnabled() Fx should be suspended here!");
+                ALOGW("%s Fx should be suspended here!", __func__);
                 return;
             }
         }
-        ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s enable suspending fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         // if effect is requested to suspended but was not yet enabled, suspend it now.
         if (desc->mEffect == 0) {
@@ -2880,8 +2915,7 @@
         if (index < 0) {
             return;
         }
-        ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s disable restoring fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         desc->mEffect.clear();
         effect->setSuspended(false);
@@ -2983,6 +3017,20 @@
     return true;
 }
 
+// sendMetadata_l() must be called with thread->mutex() held
+void EffectChain::sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& effect : mEffects) {
+        if (spatializedMetadata.has_value()
+                && IAfEffectModule::isSpatializer(&effect->desc().type)) {
+            effect->sendMetadata_ll(spatializedMetadata.value());
+        } else {
+            effect->sendMetadata_ll(allMetadata);
+        }
+    }
+}
+
 // EffectCallbackInterface implementation
 status_t EffectChain::EffectCallback::createEffectHal(
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
@@ -3196,8 +3244,9 @@
     t->setVolumeForOutput_l(left, right);
 }
 
-void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
-        const sp<IAfEffectBase>& effect, bool enabled, bool threadLocked) {
+void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
+                                                              bool enabled, bool threadLocked)
+        NO_THREAD_SAFETY_ANALYSIS {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
@@ -3209,7 +3258,7 @@
         return;
     }
     // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
-    c->checkSuspendOnEffectEnabled(effect->asEffectModule(), enabled);
+    c->checkSuspendOnEffectEnabled_l(effect->asEffectModule(), enabled);
 }
 
 void EffectChain::EffectCallback::onEffectEnable(const sp<IAfEffectBase>& effect) {
@@ -3241,7 +3290,7 @@
     return true;
 }
 
-void EffectChain::EffectCallback::resetVolume() {
+void EffectChain::EffectCallback::resetVolume_l() {
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
@@ -3302,7 +3351,7 @@
     return status;
 }
 
-status_t DeviceEffectProxy::init(
+status_t DeviceEffectProxy::init_l(
         const std::map <audio_patch_handle_t, IAfPatchPanel::Patch>& patches) {
 //For all audio patches
 //If src or sink device match
@@ -3406,7 +3455,7 @@
             } else {
                 mHalEffect->setDevices({mDevice});
             }
-            mHalEffect->configure();
+            mHalEffect->configure_l();
         }
         *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
                                    mNotifyFramesProcessed);
@@ -3695,7 +3744,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->start();
+    effect->start_l();
 }
 
 void DeviceEffectProxy::ProxyCallback::onEffectDisable(
@@ -3704,7 +3753,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->stop();
+    effect->stop_l();
 }
 
 } // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 9208c88..46c44a6 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -78,11 +78,11 @@
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_MONITOR; }
 
-    status_t setEnabled(bool enabled, bool fromHandle) override;
-    status_t setEnabled_l(bool enabled) final;
+    status_t setEnabled(bool enabled, bool fromHandle) override EXCLUDES_EffectBase_Mutex;
+    status_t setEnabled_l(bool enabled) final REQUIRES(audio_utils::EffectBase_Mutex);
     bool isEnabled() const final;
-    void setSuspended(bool suspended) final;
-    bool suspended() const final;
+    void setSuspended(bool suspended) final EXCLUDES_EffectBase_Mutex;
+    bool suspended() const final EXCLUDES_EffectBase_Mutex;
 
     status_t command(int32_t __unused,
                              const std::vector<uint8_t>& __unused,
@@ -99,36 +99,40 @@
         return mCallback.load();
     }
 
-    status_t addHandle(IAfEffectHandle *handle) final;
-    ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) final;
-    ssize_t removeHandle(IAfEffectHandle *handle) final;
-    ssize_t removeHandle_l(IAfEffectHandle *handle) final;
-    IAfEffectHandle* controlHandle_l() final;
-    bool purgeHandles() final;
+    status_t addHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t disconnectHandle(IAfEffectHandle* handle,
+                             bool unpinIfLast) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle_l(IAfEffectHandle* handle) final REQUIRES(audio_utils::EffectBase_Mutex);
+    IAfEffectHandle* controlHandle_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    bool purgeHandles() final EXCLUDES_EffectBase_Mutex;
 
-    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
+    void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
 
-    bool             isPinned() const final { return mPinned; }
-    void             unPin() final { mPinned = false; }
+    bool isPinned() const final { return mPinned; }
+    void unPin() final { mPinned = false; }
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) {
+        return mMutex;
+    }
 
-    status_t         updatePolicyState() final;
+    status_t updatePolicyState() final EXCLUDES_EffectBase_Mutex;
 
     sp<IAfEffectModule> asEffectModule() override { return nullptr; }
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() override { return nullptr; }
 
-    void             dump(int fd, const Vector<String16>& args) const override;
+    void dump(int fd, const Vector<String16>& args) const override;
 
 protected:
-    bool             isInternal_l() const {
-                         for (auto handle : mHandles) {
-                            if (handle->client() != nullptr) {
-                                return false;
-                            }
-                         }
-                         return true;
-                     }
+    bool isInternal_l() const REQUIRES(audio_utils::EffectBase_Mutex) {
+        for (auto handle : mHandles) {
+            if (handle->client() != nullptr) {
+                return false;
+            }
+        }
+        return true;
+    }
 
     bool             mPinned = false;
 
@@ -150,7 +154,10 @@
     // Audio policy effect state management
     // Mutex protecting transactions with audio policy manager as mutex() cannot
     // be held to avoid cross deadlocks with audio policy mutex
-    audio_utils::mutex& policyMutex() const { return mPolicyMutex; }
+    audio_utils::mutex& policyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_PolicyMutex) {
+        return mPolicyMutex;
+    }
     mutable audio_utils::mutex mPolicyMutex{audio_utils::MutexOrder::kEffectBase_PolicyMutex};
     // Effect is registered in APM or not
     bool                      mPolicyRegistered = false;
@@ -175,25 +182,23 @@
                     int id,
                     audio_session_t sessionId,
                     bool pinned,
-                    audio_port_handle_t deviceId);
-    ~EffectModule() override;
+                    audio_port_handle_t deviceId) REQUIRES(audio_utils::EffectChain_Mutex);
+    ~EffectModule() override REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void process() final;
-    bool updateState() final;
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    void process() final EXCLUDES_EffectBase_Mutex;
+    bool updateState_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_EffectBase_Mutex;
 
-    void reset_l() final;
-    status_t configure() final;
-    status_t init() final;
+    void reset_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    status_t configure_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    status_t init_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
     uint32_t status() const final {
         return mStatus;
     }
     bool isProcessEnabled() const final;
-    bool isOffloadedOrDirect() const final;
-    bool isVolumeControlEnabled() const final;
+    bool isOffloadedOrDirect_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
+    bool isVolumeControlEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final;
     int16_t *inBuffer() const final {
         return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
@@ -203,34 +208,42 @@
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    void updateAccessMode() final {
-                    if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
-                        configure();
-                    }
-                }
-    status_t setDevices(const AudioDeviceTypeAddrVector &devices) final;
-    status_t setInputDevice(const AudioDeviceTypeAddr &device) final;
+    void updateAccessMode_l() final REQUIRES(audio_utils::EffectChain_Mutex) {
+        if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
+            configure_l();
+        }
+    }
+    status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
+    status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
     status_t setVolume(uint32_t *left, uint32_t *right, bool controller) final;
-    status_t setMode(audio_mode_t mode) final;
-    status_t setAudioSource(audio_source_t source) final;
-    status_t start() final;
-    status_t stop() final;
+    status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
+    status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
+    status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t stop_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t setOffloaded(bool offloaded, audio_io_handle_t io) final;
-    bool isOffloaded() const final;
-    void addEffectToHal_l() final;
-    void release_l() final;
+    status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    bool isOffloaded_l() const final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void release_l() final REQUIRES(audio_utils::EffectChain_Mutex);
 
     sp<IAfEffectModule> asEffectModule() final { return this; }
 
     bool isHapticGenerator() const final;
+    bool isSpatializer() const final;
 
-    status_t setHapticIntensity(int id, os::HapticScale intensity) final;
-    status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) final;
+    status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) final
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t getConfigs(audio_config_base_t* inputCfg,
-                                audio_config_base_t* outputCfg,
-                                bool* isOutput) const final;
+    status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                          bool* isOutput) const final
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex;
 
     void dump(int fd, const Vector<String16>& args) const final;
 
@@ -241,9 +254,9 @@
 
     DISALLOW_COPY_AND_ASSIGN(EffectModule);
 
-    status_t start_l();
-    status_t stop_l();
-    status_t removeEffectFromHal_l();
+    status_t start_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t stop_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t removeEffectFromHal_l() REQUIRES(audio_utils::EffectChain_Mutex);
     status_t sendSetAudioDevicesCommand(const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode);
     effect_buffer_access_e requiredEffectBufferAccessMode() const {
         return mConfig.inputCfg.buffer.raw == mConfig.outputCfg.buffer.raw
@@ -366,7 +379,9 @@
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    audio_utils::mutex& mutex() const { return mMutex; }
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
+        return mMutex;
+    }
     // protects IEffect method calls
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
@@ -399,34 +414,43 @@
 public:
     EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId);
 
-    void process_l() final;
+    void process_l() final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) {
+        return mMutex;
+    }
 
-    status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) final;
-    status_t addEffect_l(const sp<IAfEffectModule>& handle) final;
-    status_t addEffect_ll(const sp<IAfEffectModule>& handle) final;
-    size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final;
+    status_t createEffect_l(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                            audio_session_t sessionId, bool pinned) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    status_t addEffect_l(const sp<IAfEffectModule>& handle) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    status_t addEffect_ll(const sp<IAfEffectModule>& handle) final
+            REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
+    size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
     audio_session_t sessionId() const final { return mSessionId; }
     void setSessionId(audio_session_t sessionId) final { mSessionId = sessionId; }
 
-    sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const final;
-    sp<IAfEffectModule> getEffectFromId_l(int id) const final;
-    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const final;
-    std::vector<int> getEffectIds() const final;
+    sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t* descriptor) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
+    sp<IAfEffectModule> getEffectFromId_l(int id) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
+    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
+    std::vector<int> getEffectIds_l() const final REQUIRES(audio_utils::ThreadBase_Mutex);
     // FIXME use float to improve the dynamic range
 
-    bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) final;
-    void resetVolume_l() final;
-    void setDevices_l(const AudioDeviceTypeAddrVector &devices) final;
-    void setInputDevice_l(const AudioDeviceTypeAddr &device) final;
-    void setMode_l(audio_mode_t mode) final;
-    void setAudioSource_l(audio_source_t source) final;
+    bool setVolume(uint32_t* left, uint32_t* right,
+                   bool force = false) final EXCLUDES_EffectChain_Mutex;
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void setDevices_l(const AudioDeviceTypeAddrVector& devices) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
+    void setInputDevice_l(const AudioDeviceTypeAddr& device) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
+    void setMode_l(audio_mode_t mode) final REQUIRES(audio_utils::ThreadBase_Mutex);
+    void setAudioSource_l(audio_source_t source) final REQUIRES(audio_utils::ThreadBase_Mutex);
 
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final {
         mInBuffer = buffer;
@@ -457,21 +481,22 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    void setEffectSuspended_l(const effect_uuid_t *type,
-                              bool suspend) final;
+    void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // suspend all eligible effects
-    void setEffectSuspendedAll_l(bool suspend) final;
+    void setEffectSuspendedAll_l(bool suspend) final REQUIRES(audio_utils::ThreadBase_Mutex);
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    void checkSuspendOnEffectEnabled(
-            const sp<IAfEffectModule>& effect, bool enabled) final;
+    void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer() final;
+    void clearInputBuffer() final EXCLUDES_EffectChain_Mutex;
 
     // At least one non offloadable effect in the chain is enabled
-    bool isNonOffloadableEnabled() const final;
-    bool isNonOffloadableEnabled_l() const final;
+    bool isNonOffloadableEnabled() const final EXCLUDES_EffectChain_Mutex;
+    bool isNonOffloadableEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void syncHalEffectsState() final;
+    void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex final;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     void checkOutputFlagCompatibility(audio_output_flags_t *flags) const final;
@@ -490,12 +515,13 @@
 
     // isCompatibleWithThread_l() must be called with thread->mutex() held
     bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
-            REQUIRES(audio_utils::ThreadBase_Mutex);
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
     // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
     bool containsHapticGeneratingEffect_l() final;
 
-    void setHapticIntensity_l(int id, os::HapticScale intensity) final;
+    void setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
     sp<EffectCallbackInterface> effectCallback() const final { return mEffectCallback; }
 
@@ -513,9 +539,15 @@
         return mEffects[index];
     }
 
-    void setThread(const sp<IAfThreadBase>& thread) final;
+    void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata)
+            final REQUIRES(audio_utils::ThreadBase_Mutex);
 
-private:
+    void setThread(const sp<IAfThreadBase>& thread) final EXCLUDES_EffectChain_Mutex;
+
+  private:
+    bool setVolume_l(uint32_t* left, uint32_t* right, bool force = false)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // For transaction consistency, please consider holding the EffectChain lock before
     // calling the EffectChain::EffectCallback methods, excepting
@@ -562,9 +594,10 @@
         void setVolumeForOutput(float left, float right) const override;
 
         // check if effects should be suspended/restored when a given effect is enable/disabled
-        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
-                              bool enabled, bool threadLocked) override;
-        void resetVolume() override;
+        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect, bool enabled,
+                                         bool threadLocked) override;
+        void resetVolume_l() override
+                REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
         product_strategy_t strategy() const override;
         int32_t activeTrackCnt() const override;
         void onEffectEnable(const sp<IAfEffectBase>& effect) override;
@@ -604,27 +637,34 @@
 
     // get a list of effect modules to suspend when an effect of the type
     // passed is enabled.
-    void  getSuspendEligibleEffects(Vector<sp<IAfEffectModule>> &effects);
+    void getSuspendEligibleEffects_l(Vector<sp<IAfEffectModule>>& effects)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
     // get an effect module if it is currently enable
-    sp<IAfEffectModule> getEffectIfEnabled(const effect_uuid_t *type);
+    sp<IAfEffectModule> getEffectIfEnabled_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // true if the effect whose descriptor is passed can be suspended
     // OEMs can modify the rules implemented in this method to exclude specific effect
     // types or implementations from the suspend/restore mechanism.
-    bool isEffectEligibleForSuspend(const effect_descriptor_t& desc);
+    bool isEffectEligibleForSuspend_l(const effect_descriptor_t& desc)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    static bool isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type);
+    static bool isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer_l();
+    void clearInputBuffer_l() REQUIRES(audio_utils::EffectChain_Mutex);
 
     // true if any effect module within the chain has volume control
-    bool hasVolumeControlEnabled_l() const;
+    bool hasVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void setVolumeForOutput_l(uint32_t left, uint32_t right);
+    void setVolumeForOutput_l(uint32_t left, uint32_t right)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
-    ssize_t getInsertIndex(const effect_descriptor_t& desc);
+    ssize_t getInsertIndex_ll(const effect_descriptor_t& desc)
+            REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
 
-    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const;
+    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // mutex protecting effect list
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectChain_Mutex};
@@ -668,11 +708,11 @@
     status_t setEnabled(bool enabled, bool fromHandle) final;
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() final { return this; }
 
-    status_t init(const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) final;
+    status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches) final
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex;
 
     status_t onCreatePatch(audio_patch_handle_t patchHandle,
-            const IAfPatchPanel::Patch& patch) final;
+                           const IAfPatchPanel::Patch& patch) final;
 
     status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
            const IAfPatchPanel::Patch& patch) final;
@@ -690,10 +730,8 @@
     audio_channel_mask_t channelMask() const final;
     uint32_t channelCount() const final;
 
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_DeviceEffectProxy_ProxyMutex;
 
     void dump2(int fd, int spaces) const final;
 
@@ -739,7 +777,7 @@
 
         void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
-        void resetVolume() override {}
+        void resetVolume_l() override REQUIRES(audio_utils::EffectChain_Mutex) {}
         product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
         int32_t activeTrackCnt() const override { return 0; }
         void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override;
@@ -759,13 +797,16 @@
     };
 
     status_t checkPort(const IAfPatchPanel::Patch& patch,
-            const struct audio_port_config *port, sp<IAfEffectHandle> *handle);
+            const struct audio_port_config* port, sp<IAfEffectHandle>* handle);
 
     const AudioDeviceTypeAddr mDevice;
     const sp<DeviceEffectManagerCallback> mManagerCallback;
     const sp<ProxyCallback> mMyCallback;
 
-    audio_utils::mutex& proxyMutex() const { return mProxyMutex; }
+    audio_utils::mutex& proxyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectProxy_ProxyMutex) {
+        return mProxyMutex;
+    }
     mutable audio_utils::mutex mProxyMutex{
             audio_utils::MutexOrder::kDeviceEffectProxy_ProxyMutex};
     std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyMutex
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 8c5bc4b..fd4dd62 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -80,7 +80,7 @@
     // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
     virtual product_strategy_t strategy() const = 0;
     virtual int32_t activeTrackCnt() const = 0;
-    virtual void resetVolume() = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual wp<IAfEffectChain> chain() const = 0;
     virtual bool isAudioPolicyReady() const = 0;
 };
@@ -106,43 +106,45 @@
     virtual bool isOffloadable() const = 0;
     virtual bool isImplementationSoftware() const = 0;
     virtual bool isProcessImplemented() const = 0;
-    virtual bool isVolumeControl() const = 0;
+    virtual bool isVolumeControl() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual bool isVolumeMonitor() const = 0;
     virtual bool isEnabled() const = 0;
     virtual bool isPinned() const = 0;
     virtual void unPin() = 0;
-    virtual status_t updatePolicyState() = 0;
-    virtual bool purgeHandles() = 0;
+    virtual status_t updatePolicyState() EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool purgeHandles() EXCLUDES_EffectBase_Mutex = 0;
     virtual void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) = 0;
 
     // mCallback is atomic so this can be lock-free.
     virtual void setCallback(const sp<EffectCallbackInterface>& callback) = 0;
     virtual sp<EffectCallbackInterface> getCallback() const = 0;
 
-    virtual status_t addHandle(IAfEffectHandle *handle) = 0;
-    virtual ssize_t removeHandle(IAfEffectHandle *handle) = 0;
+    virtual status_t addHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
 
     virtual sp<IAfEffectModule> asEffectModule() = 0;
     virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
 
-    virtual status_t command(int32_t cmdCode,
-            const std::vector<uint8_t>& cmdData,
-            int32_t maxReplySize,
-            std::vector<uint8_t>* reply) = 0;
+    virtual status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData,
+                             int32_t maxReplySize, std::vector<uint8_t>* reply)
+            EXCLUDES(audio_utils::EffectBase_Mutex) = 0;
 
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 
 private:
-    virtual status_t setEnabled(bool enabled, bool fromHandle) = 0;
-    virtual status_t setEnabled_l(bool enabled) = 0;
-    virtual void setSuspended(bool suspended) = 0;
-    virtual bool suspended() const = 0;
+    virtual status_t setEnabled(bool enabled, bool fromHandle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setEnabled_l(bool enabled) REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual void setSuspended(bool suspended) EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool suspended() const EXCLUDES_EffectBase_Mutex = 0;
 
-    virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
-    virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
-    virtual IAfEffectHandle* controlHandle_l() = 0;
+    virtual ssize_t disconnectHandle(IAfEffectHandle* handle,
+                                     bool unpinIfLast) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle_l(IAfEffectHandle* handle)
+            REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual IAfEffectHandle* controlHandle_l() REQUIRES(audio_utils::EffectBase_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) = 0;
 };
 
 class IAfEffectModule : public virtual IAfEffectBase {
@@ -162,41 +164,51 @@
     virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
     virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
     virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller) = 0;
-    virtual status_t setOffloaded(bool offloaded, audio_io_handle_t io) = 0;
-    virtual bool isOffloaded() const = 0;
+    virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
+    virtual bool isOffloaded_l() const = 0;
 
     virtual status_t setAudioSource(audio_source_t source) = 0;
     virtual status_t setMode(audio_mode_t mode) = 0;
 
-    virtual status_t start() = 0;
-    virtual status_t getConfigs(audio_config_base_t* inputCfg,
-            audio_config_base_t* outputCfg,
-            bool* isOutput) const = 0;
+    virtual status_t start_l() = 0;
+    virtual status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                  bool* isOutput) const
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
     static bool isHapticGenerator(const effect_uuid_t* type);
     virtual bool isHapticGenerator() const = 0;
-    virtual status_t setHapticIntensity(int id, os::HapticScale intensity) = 0;
-    virtual status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) = 0;
+    static bool isSpatializer(const effect_uuid_t* type);
+    virtual bool isSpatializer() const = 0;
+
+    virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
 private:
     virtual void process() = 0;
-    virtual bool updateState() = 0;
-    virtual void reset_l() = 0;
-    virtual status_t configure() = 0;
-    virtual status_t init() = 0;
+    virtual bool updateState_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t init_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual uint32_t status() const = 0;
     virtual bool isProcessEnabled() const = 0;
-    virtual bool isOffloadedOrDirect() const = 0;
-    virtual bool isVolumeControlEnabled() const = 0;
+    virtual bool isOffloadedOrDirect_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual bool isVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual int16_t *outBuffer() const = 0;
 
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    virtual void updateAccessMode() = 0;
+    virtual void updateAccessMode_l() = 0;
 
-    virtual status_t stop() = 0;
+    virtual status_t stop_l() = 0;
     virtual void addEffectToHal_l() = 0;
     virtual void release_l() = 0;
 };
@@ -216,33 +228,41 @@
     // a session is stopped or removed to allow effect tail to be rendered
     static constexpr int kProcessTailDurationMs = 1000;
 
-    virtual void process_l() = 0;
+    virtual void process_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) = 0;
+    virtual status_t createEffect_l(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                                    audio_session_t sessionId, bool pinned)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle) = 0;
-    virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle) = 0;
-    virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) = 0;
+    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle)
+            REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex) = 0;
+    virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle,
+                                  bool release = false) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual audio_session_t sessionId() const = 0;
     virtual void setSessionId(audio_session_t sessionId) = 0;
 
-    virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const = 0;
-    virtual std::vector<int> getEffectIds() const = 0;
-    virtual bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) = 0;
-    virtual void resetVolume_l() = 0;
-    virtual void setDevices_l(const AudioDeviceTypeAddrVector &devices) = 0;
-    virtual void setInputDevice_l(const AudioDeviceTypeAddr &device) = 0;
-    virtual void setMode_l(audio_mode_t mode) = 0;
-    virtual void setAudioSource_l(audio_source_t source) = 0;
+    virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t* descriptor) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual std::vector<int> getEffectIds_l() const = 0;
+    virtual bool setVolume(uint32_t* left, uint32_t* right,
+                           bool force = false) EXCLUDES_EffectChain_Mutex = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual void setDevices_l(const AudioDeviceTypeAddrVector& devices)
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual void setInputDevice_l(const AudioDeviceTypeAddr& device)
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual void setMode_l(audio_mode_t mode) REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+    virtual void setAudioSource_l(audio_source_t source)
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual float *inBuffer() const = 0;
@@ -262,20 +282,21 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    virtual void setEffectSuspended_l(
-            const effect_uuid_t *type, bool suspend) = 0;
+    virtual void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) = 0;
     // suspend all eligible effects
     virtual void setEffectSuspendedAll_l(bool suspend) = 0;
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    virtual void checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect, bool enabled) = 0;
+    virtual void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled)
+            REQUIRES(audio_utils::ThreadBase_Mutex) REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void clearInputBuffer() = 0;
+    virtual void clearInputBuffer() EXCLUDES_EffectChain_Mutex = 0;
 
     // At least one non offloadable effect in the chain is enabled
-    virtual bool isNonOffloadableEnabled() const = 0;
-    virtual bool isNonOffloadableEnabled_l() const = 0;
+    virtual bool isNonOffloadableEnabled() const EXCLUDES_EffectChain_Mutex = 0;
+    virtual bool isNonOffloadableEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual void syncHalEffectsState() = 0;
+    virtual void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     virtual void checkOutputFlagCompatibility(audio_output_flags_t *flags) const = 0;
@@ -293,22 +314,28 @@
     virtual bool isBitPerfectCompatible() const = 0;
 
     // isCompatibleWithThread_l() must be called with thread->mLock held
-    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const = 0;
+    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual bool containsHapticGeneratingEffect_l() = 0;
 
-    virtual void setHapticIntensity_l(int id, os::HapticScale intensity) = 0;
+    virtual void setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual sp<EffectCallbackInterface> effectCallback() const = 0;
 
     virtual wp<IAfThreadBase> thread() const = 0;
-    virtual void setThread(const sp<IAfThreadBase>& thread) = 0;
+    virtual void setThread(const sp<IAfThreadBase>& thread) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual bool isFirstEffect(int id) const = 0;
 
     virtual size_t numberOfEffects() const = 0;
     virtual sp<IAfEffectModule> getEffectModule(size_t index) const = 0;
 
+    // sendMetadata_l() must be called with thread->mLock held
+    virtual void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata);
+
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 };
 
@@ -352,9 +379,8 @@
                 const sp<DeviceEffectManagerCallback>& callback,
                 effect_descriptor_t *desc, int id, bool notifyFramesProcessed);
 
-    virtual status_t init(
-            const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) = 0;
+    virtual status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches)
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual const AudioDeviceTypeAddr& device() const = 0;
 
     virtual status_t onCreatePatch(
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 7084be9..d701288 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -279,7 +279,7 @@
     // integrity of the chains during the process.
     // Also sets the parameter 'effectChains' to current value of mEffectChains.
     virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
-            REQUIRES(mutex()) = 0;
+            REQUIRES(mutex()) EXCLUDES_EffectChain_Mutex = 0;
     // unlock effect chains after process
     virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
             EXCLUDES_ThreadBase_Mutex = 0;
@@ -386,6 +386,12 @@
             const effect_uuid_t* type, bool suspend, audio_session_t sessionId)
             REQUIRES(mutex()) = 0;
 
+    // Wait while the Thread is busy.  This is done to ensure that
+    // the Thread is not busy releasing the Tracks, during which the Thread mutex
+    // may be temporarily unlocked.  Some Track methods will use this method to
+    // avoid races.
+    virtual void waitWhileThreadBusy_l(audio_utils::unique_lock& ul)
+            REQUIRES(mutex()) = 0;
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
     virtual sp<IAfDuplicatingThread> asIAfDuplicatingThread() { return nullptr; }
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 2302e13..8ed44c6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -18,6 +18,7 @@
 
 #include <android/media/BnAudioRecord.h>
 #include <android/media/BnAudioTrack.h>
+#include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
 #include <fastpath/FastMixerDumpState.h>
@@ -338,12 +339,12 @@
     /** Set haptic playback of the track is enabled or not, should be
      * set after query or get callback from vibrator service */
     virtual void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) = 0;
-    /** Return at what intensity to play haptics, used in mixer. */
-    virtual os::HapticScale getHapticIntensity() const = 0;
+    /** Return the haptics scale, used in mixer. */
+    virtual os::HapticScale getHapticScale() const = 0;
     /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     virtual float getHapticMaxAmplitude() const = 0;
-    /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    virtual void setHapticIntensity(os::HapticScale hapticIntensity) = 0;
+    /** Set scale for haptic playback, should be set after querying vibrator service. */
+    virtual void setHapticScale(os::HapticScale hapticScale) = 0;
     /** Set maximum amplitude allowed for haptic data, should be set after querying
      *  vibrator service.
      */
@@ -351,7 +352,8 @@
     virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
 
     // This function should be called with holding thread lock.
-    virtual void updateTeePatches_l() = 0;
+    virtual void updateTeePatches_l() REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex = 0;
 
     // Argument teePatchesToUpdate is by value, use std::move to optimize.
     virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 41c5096..1d38306 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -307,6 +307,22 @@
 
 }
 
+void MelReporter::applyAllAudioPatches() {
+    ALOGV("%s", __func__);
+
+    std::vector<IAfPatchPanel::Patch> patchesCopy;
+    {
+        audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+        for (const auto& patch : mAfPatchPanel->patches_l()) {
+            patchesCopy.emplace_back(patch.second);
+        }
+    }
+
+    for (const auto& patch : patchesCopy) {
+        onCreateAudioPatch(patch.mHalHandle, patch);
+    }
+}
+
 std::optional<audio_patch_handle_t> MelReporter::activePatchStreamHandle_l(
         audio_io_handle_t streamHandle) {
     for(const auto& patchIt : mActiveMelPatches) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 235dd11..0aeb225 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -27,8 +27,6 @@
 
 namespace android {
 
-constexpr static int kMaxTimestampDeltaInSec = 120;
-
 class IAfMelReporterCallback : public virtual RefBase {
 public:
     virtual audio_utils::mutex& mutex() const
@@ -45,8 +43,10 @@
 class MelReporter : public PatchCommandThread::PatchCommandListener,
                     public IMelReporterCallback {
 public:
-    explicit MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback)
-        : mAfMelReporterCallback(afMelReporterCallback) {}
+    MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback,
+                const sp<IAfPatchPanel>& afPatchPanel)
+        : mAfMelReporterCallback(afMelReporterCallback),
+          mAfPatchPanel(afPatchPanel) {}
 
     void onFirstRef() override;
 
@@ -80,9 +80,10 @@
 
     // IMelReporterCallback methods
     void stopMelComputationForDeviceId(audio_port_handle_t deviceId) final
-            EXCLUDES_MelReporter_Mutex;
+            EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
     void startMelComputationForDeviceId(audio_port_handle_t deviceId) final
-            EXCLUDES_MelReporter_Mutex;
+            EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
+    void applyAllAudioPatches() final EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
 
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
@@ -131,6 +132,7 @@
     bool useHalSoundDoseInterface_l() REQUIRES(mutex());
 
     const sp<IAfMelReporterCallback> mAfMelReporterCallback;
+    const sp<IAfPatchPanel> mAfPatchPanel;
 
     /* const */ sp<SoundDoseManager> mSoundDoseManager;  // set onFirstRef
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 4a1948c..6c22e21 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -23,6 +23,7 @@
 #include <audio_utils/mutex.h>
 #include <audio_utils/LinearMap.h>
 #include <binder/AppOpsManager.h>
+#include <utils/RWLock.h>
 
 namespace android {
 
@@ -173,15 +174,15 @@
     void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) final {
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
-            /** Return at what intensity to play haptics, used in mixer. */
-    os::HapticScale getHapticIntensity() const final { return mHapticIntensity; }
+            /** Return the haptics scale, used in mixer. */
+    os::HapticScale getHapticScale() const final { return mHapticScale; }
             /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     float getHapticMaxAmplitude() const final { return mHapticMaxAmplitude; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    void setHapticIntensity(os::HapticScale hapticIntensity) final {
-                if (os::isValidHapticScale(hapticIntensity)) {
-                    mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
+    void setHapticScale(os::HapticScale hapticScale) final {
+                if (os::isValidHapticScale(hapticScale)) {
+                    mHapticScale = hapticScale;
+                    setHapticPlaybackEnabled(!mHapticScale.isScaleMute());
                 }
             }
             /** Set maximum amplitude allowed for haptic data, should be set after querying
@@ -193,7 +194,8 @@
     sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
 
             // This function should be called with holding thread lock.
-    void updateTeePatches_l() final;
+    void updateTeePatches_l() final REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex;
     void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
 
     void tallyUnderrunFrames(size_t frames) final {
@@ -327,8 +329,8 @@
     sp<OpPlayAudioMonitor>  mOpPlayAudioMonitor;
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
-    // intensity to play haptic data
-    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
+    // scale to play haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute();
     // max amplitude allowed for haptic data
     float mHapticMaxAmplitude = NAN;
     class AudioVibrationController : public os::BnExternalVibrationController {
@@ -352,6 +354,7 @@
     // Must hold thread lock to access tee patches
     template <class F>
     void                forEachTeePatchTrack_l(F f) {
+        RWLock::AutoRLock readLock(mTeePatchesRWLock);
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
@@ -387,6 +390,7 @@
     audio_output_flags_t mFlags;
     TeePatches mTeePatches;
     std::optional<TeePatches> mTeePatchesToUpdate;
+    RWLock              mTeePatchesRWLock;
     const float         mSpeed;
     const bool          mIsSpatialized;
     const bool          mIsBitPerfect;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 18bb173..d1a09a4 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -47,6 +47,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/PersistableBundle.h>
+#include <com_android_media_audio.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <fastpath/AutoPark.h>
@@ -185,7 +186,7 @@
 // Minimum amount of time between checking to see if the timestamp is advancing
 // for underrun detection. If we check too frequently, we may not detect a
 // timestamp update and will falsely detect underrun.
-static const nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1000;
+static constexpr nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1'000'000;
 
 // The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
 // balance between power consumption and latency, and allows threads to be scheduled reliably
@@ -222,6 +223,8 @@
 static const int kPriorityAudioApp = 2;
 static const int kPriorityFastMixer = 3;
 static const int kPriorityFastCapture = 3;
+// Request real-time priority for PlaybackThread in ARC
+static const int kPriorityPlaybackThreadArc = 1;
 
 // IAudioFlinger::createTrack() has an in/out parameter 'pFrameCount' for the total size of the
 // track buffer in shared memory.  Zero on input means to use a default value.  For fast tracks,
@@ -721,8 +724,9 @@
     {
         audio_utils::unique_lock _l(event->mutex());
         while (event->mWaitStatus) {
-            if (event->mCondition.wait_for(_l, std::chrono::nanoseconds(kConfigEventTimeoutNs))
-                        == std::cv_status::timeout) {
+            if (event->mCondition.wait_for(
+                    _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+                            == std::cv_status::timeout) {
                 event->mStatus = TIMED_OUT;
                 event->mWaitStatus = false;
             }
@@ -1483,7 +1487,7 @@
         return BAD_VALUE;
     }
 
-    if (memcmp(&desc->type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0
+    if (IAfEffectModule::isSpatializer(&desc->type)
             && mType != SPATIALIZER) {
         ALOGW("%s: attempt to create a spatializer effect on a thread of type %d",
                 __func__, mType);
@@ -1571,7 +1575,7 @@
             return BAD_VALUE;
         } else if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
             // only post processing , downmixer or spatializer effects on output stage session
-            if (memcmp(&desc->type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0
+            if (IAfEffectModule::isSpatializer(&desc->type)
                     || memcmp(&desc->type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) {
                 break;
             }
@@ -1690,7 +1694,7 @@
                     std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
             if (defaultVibratorInfo) {
                 // Only set the vibrator info when it is a valid one.
-                effect->setVibratorInfo(*defaultVibratorInfo);
+                effect->setVibratorInfo_l(*defaultVibratorInfo);
             }
         }
         // create effect handle and connect it to effect module
@@ -1792,7 +1796,7 @@
 std::vector<int> ThreadBase::getEffectIds_l(audio_session_t sessionId) const
 {
     sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
-    return chain != nullptr ? chain->getEffectIds() : std::vector<int>{};
+    return chain != nullptr ? chain->getEffectIds_l() : std::vector<int>{};
 }
 
 // PlaybackThread::addEffect_ll() must be called with AudioFlinger::mutex() and
@@ -1824,7 +1828,7 @@
         return BAD_VALUE;
     }
 
-    effect->setOffloaded(mType == OFFLOAD, mId);
+    effect->setOffloaded_l(mType == OFFLOAD, mId);
 
     status_t status = chain->addEffect_l(effect);
     if (status != NO_ERROR) {
@@ -1861,22 +1865,20 @@
     }
 }
 
-void ThreadBase::lockEffectChains_l(
-        Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
+void ThreadBase::lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
-    for (size_t i = 0; i < mEffectChains.size(); i++) {
-        mEffectChains[i]->mutex().lock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().lock();
     }
 }
 
-void ThreadBase::unlockEffectChains(
-        const Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
+void ThreadBase::unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
-    for (size_t i = 0; i < effectChains.size(); i++) {
-        effectChains[i]->mutex().unlock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().unlock();
     }
 }
 
@@ -2849,6 +2851,8 @@
         // effectively get the latency it requested.
         if (track->isExternalTrack()) {
             IAfTrackBase::track_state state = track->state();
+            // Because the track is not on the ActiveTracks,
+            // at this point, only the TrackHandle will be adding the track.
             mutex().unlock();
             status = AudioSystem::startOutput(track->portId());
             mutex().lock();
@@ -2897,7 +2901,7 @@
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mutex().unlock();
-            const os::HapticScale intensity = afutils::onExternalVibrationStart(
+            const os::HapticScale hapticScale = afutils::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
@@ -2907,7 +2911,7 @@
                 vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
             }
             mutex().lock();
-            track->setHapticIntensity(intensity);
+            track->setHapticScale(hapticScale);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
             }
@@ -2923,13 +2927,19 @@
 
             // Set haptic intensity for effect
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), intensity);
+                // TODO(b/324559333): Add adaptive haptics scaling support for the HapticGenerator.
+                chain->setHapticScale_l(track->id(), hapticScale);
             }
         }
 
         track->setResetDone(false);
         track->resetPresentationComplete();
+
+        // Do not release the ThreadBase mutex after the track is added to mActiveTracks unless
+        // all key changes are complete.  It is possible that the threadLoop will begin
+        // processing the added track immediately after the ThreadBase mutex is released.
         mActiveTracks.add(track);
+
         if (chain != 0) {
             ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
                     track->sessionId());
@@ -3048,7 +3058,7 @@
 }
 
 void PlaybackThread::onCodecFormatChanged(
-        const std::basic_string<uint8_t>& metadataBs)
+        const std::vector<uint8_t>& metadataBs)
 {
     const auto weakPointerThis = wp<PlaybackThread>::fromExisting(this);
     std::thread([this, metadataBs, weakPointerThis]() {
@@ -3311,10 +3321,48 @@
         return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
-    auto backInserter = std::back_inserter(metadata.tracks);
-    for (const sp<IAfTrack>& track : mActiveTracks) {
-        // No track is invalid as this is called after prepareTrack_l in the same critical section
-        track->copyMetadataTo(backInserter);
+    static const bool stereo_spatialization_property =
+            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+    const bool stereo_spatialization_enabled =
+            stereo_spatialization_property && com_android_media_audio_stereo_spatialization();
+    if (stereo_spatialization_enabled) {
+        std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
+        for (const sp<IAfTrack>& track : mActiveTracks) {
+            std::vector<playback_track_metadata_v7_t>& sessionMetadata =
+                    allSessionsMetadata[track->sessionId()];
+            auto backInserter = std::back_inserter(sessionMetadata);
+            // No track is invalid as this is called after prepareTrack_l in the same
+            // critical section
+            track->copyMetadataTo(backInserter);
+        }
+        std::vector<playback_track_metadata_v7_t> spatializedTracksMetaData;
+        for (const auto& [session, sessionTrackMetadata] : allSessionsMetadata) {
+            metadata.tracks.insert(metadata.tracks.end(),
+                    sessionTrackMetadata.begin(), sessionTrackMetadata.end());
+            if (auto chain = getEffectChain_l(session) ; chain != nullptr) {
+                chain->sendMetadata_l(sessionTrackMetadata, {});
+            }
+            if ((hasAudioSession_l(session) & IAfThreadBase::SPATIALIZED_SESSION) != 0) {
+                spatializedTracksMetaData.insert(spatializedTracksMetaData.end(),
+                        sessionTrackMetadata.begin(), sessionTrackMetadata.end());
+            }
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, {});
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, spatializedTracksMetaData);
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_DEVICE); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, {});
+        }
+    } else {
+        auto backInserter = std::back_inserter(metadata.tracks);
+        for (const sp<IAfTrack>& track : mActiveTracks) {
+            // No track is invalid as this is called after prepareTrack_l in the same
+            // critical section
+            track->copyMetadataTo(backInserter);
+        }
     }
     sendMetadataToBackend_l(metadata);
     MetadataUpdate change;
@@ -3909,6 +3957,27 @@
                 stream()->setHalThreadPriority(priorityBoost);
             }
         }
+    } else if (property_get_bool("ro.boot.container", false /* default_value */)) {
+        // In ARC experiments (b/73091832), the latency under using CFS scheduler with any priority
+        // is not enough for PlaybackThread to process audio data in time. We request the lowest
+        // real-time priority, SCHED_FIFO=1, for PlaybackThread in ARC. ro.boot.container is true
+        // only on ARC.
+        const pid_t tid = getTid();
+        if (tid == -1) {
+            ALOGW("%s: Cannot update PlaybackThread priority for ARC, no tid", __func__);
+        } else {
+            const status_t status = requestPriority(getpid(),
+                                                    tid,
+                                                    kPriorityPlaybackThreadArc,
+                                                    false /* isForApp */,
+                                                    true /* asynchronous */);
+            if (status != OK) {
+                ALOGW("%s: Cannot update PlaybackThread priority for ARC, status %d", __func__,
+                        status);
+            } else {
+                stream()->setHalThreadPriority(kPriorityPlaybackThreadArc);
+            }
+        }
     }
 
     Vector<sp<IAfTrack>> tracksToRemove;
@@ -4120,6 +4189,30 @@
 
             metadataUpdate = updateMetadata_l();
 
+            // Acquire a local copy of active tracks with lock (release w/o lock).
+            //
+            // Control methods on the track acquire the ThreadBase lock (e.g. start()
+            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
+            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
+            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
+
+            setHalLatencyMode_l();
+
+            // updateTeePatches_l will acquire the ThreadBase_Mutex of other threads,
+            // so this is done before we lock our effect chains.
+            for (const auto& track : mActiveTracks) {
+                track->updateTeePatches_l();
+            }
+
+            // signal actual start of output stream when the render position reported by
+            // the kernel starts moving.
+            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+                    && (mKernelPositionOnStandby
+                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+                mHalStarted = true;
+                mWaitHalStartCV.notify_all();
+            }
+
             // prevent any changes in effect chain list and in each effect chain
             // during mixing and effect process as the audio buffers could be deleted
             // or modified if an effect is created or deleted
@@ -4147,28 +4240,6 @@
                     }
                 }
             }
-
-            // Acquire a local copy of active tracks with lock (release w/o lock).
-            //
-            // Control methods on the track acquire the ThreadBase lock (e.g. start()
-            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
-            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
-            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
-
-            setHalLatencyMode_l();
-
-            for (const auto &track : mActiveTracks ) {
-                track->updateTeePatches_l();
-            }
-
-            // signal actual start of output stream when the render position reported by the kernel
-            // starts moving.
-            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
-                    && (mKernelPositionOnStandby
-                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
-                mHalStarted = true;
-                mWaitHalStartCV.notify_all();
-            }
         } // mutex() scope ends
 
         if (mBytesRemaining == 0) {
@@ -4704,8 +4775,12 @@
 void PlaybackThread::removeTracks_l(const Vector<sp<IAfTrack>>& tracksToRemove)
 NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
+    if (tracksToRemove.empty()) return;
+
+    // Block all incoming TrackHandle requests until we are finished with the release.
+    setThreadBusy_l(true);
+
     for (const auto& track : tracksToRemove) {
-        mActiveTracks.remove(track);
         ALOGV("%s(%d): removing track on session %d", __func__, track->id(), track->sessionId());
         sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
@@ -4713,17 +4788,16 @@
                     __func__, track->id(), chain.get(), track->sessionId());
             chain->decActiveTrackCnt();
         }
+
         // If an external client track, inform APM we're no longer active, and remove if needed.
-        // We do this under lock so that the state is consistent if the Track is destroyed.
+        // Since the track is active, we do it here instead of TrackBase::destroy().
         if (track->isExternalTrack()) {
+            mutex().unlock();
             AudioSystem::stopOutput(track->portId());
             if (track->isTerminated()) {
                 AudioSystem::releaseOutput(track->portId());
             }
-        }
-        if (track->isTerminated()) {
-            // remove from our tracks vector
-            removeTrack_l(track);
+            mutex().lock();
         }
         if (mHapticChannelCount > 0 &&
                 ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
@@ -4737,10 +4811,27 @@
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
+                chain->setHapticScale_l(track->id(), os::HapticScale::mute());
             }
         }
+
+        // Under lock, the track is removed from the active tracks list.
+        //
+        // Once the track is no longer active, the TrackHandle may directly
+        // modify it as the threadLoop() is no longer responsible for its maintenance.
+        // Do not modify the track from threadLoop after the mutex is unlocked
+        // if it is not active.
+        mActiveTracks.remove(track);
+
+        if (track->isTerminated()) {
+            // remove from our tracks vector
+            removeTrack_l(track);
+        }
     }
+
+    // Allow incoming TrackHandle requests.  We still hold the mutex,
+    // so pending TrackHandle requests will occur after we unlock it.
+    setThreadBusy_l(false);
 }
 
 status_t PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp)
@@ -5080,7 +5171,7 @@
                                                     // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = os::HapticScale::NONE;
+        fastTrack->mHapticScale = {/*level=*/os::HapticLevel::NONE };
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
@@ -5439,7 +5530,7 @@
     sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
     if (chain != 0) {
         uint32_t v = (uint32_t)(masterVolume * (1 << 24));
-        chain->setVolume_l(&v, &v);
+        chain->setVolume(&v, &v);
         masterVolume = (float)((v + (1 << 23)) >> 24);
         chain.clear();
     }
@@ -5638,7 +5729,7 @@
                     fastTrack->mChannelMask = track->channelMask();
                     fastTrack->mFormat = track->format();
                     fastTrack->mHapticPlaybackEnabled = track->getHapticPlaybackEnabled();
-                    fastTrack->mHapticIntensity = track->getHapticIntensity();
+                    fastTrack->mHapticScale = track->getHapticScale();
                     fastTrack->mHapticMaxAmplitude = track->getHapticMaxAmplitude();
                     fastTrack->mGeneration++;
                     state->mTrackMask |= 1 << j;
@@ -5774,7 +5865,7 @@
 
             mixedTracks++;
 
-            // track->mainBuffer() != mSinkBuffer or mMixerBuffer means
+            // track->mainBuffer() != mSinkBuffer and mMixerBuffer means
             // there is an effect chain connected to the track
             chain.clear();
             if (track->mainBuffer() != mSinkBuffer &&
@@ -5878,7 +5969,7 @@
             track->setFinalVolume(vrf, vlf);
 
             // Delegate volume control to effect in track effect chain if needed
-            if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
+            if (chain != 0 && chain->setVolume(&vl, &vr)) {
                 // Do not ramp volume if volume is controlled by effect
                 param = AudioMixer::VOLUME;
                 // Update remaining floating point volume levels
@@ -5999,10 +6090,11 @@
                 trackId,
                 AudioMixer::TRACK,
                 AudioMixer::HAPTIC_ENABLED, (void *)(uintptr_t)track->getHapticPlaybackEnabled());
+            const os::HapticScale hapticScale = track->getHapticScale();
             mAudioMixer->setParameter(
-                trackId,
-                AudioMixer::TRACK,
-                AudioMixer::HAPTIC_INTENSITY, (void *)(uintptr_t)track->getHapticIntensity());
+                    trackId,
+                    AudioMixer::TRACK,
+                    AudioMixer::HAPTIC_SCALE, (void *)&hapticScale);
             const float hapticMaxAmplitude = track->getHapticMaxAmplitude();
             mAudioMixer->setParameter(
                 trackId,
@@ -6618,8 +6710,8 @@
                 // Convert volumes from float to 8.24
                 uint32_t vl = (uint32_t)(left * (1 << 24));
                 uint32_t vr = (uint32_t)(right * (1 << 24));
-                // Direct/Offload effect chains set output volume in setVolume_l().
-                (void)mEffectChains[0]->setVolume_l(&vl, &vr);
+                // Direct/Offload effect chains set output volume in setVolume().
+                (void)mEffectChains[0]->setVolume(&vl, &vr);
             } else {
                 // otherwise we directly set the volume.
                 setVolumeForOutput_l(left, right);
@@ -7813,16 +7905,12 @@
         //   (mRequestedLatencyMode = AUDIO_LATENCY_MODE_LOW)
         //      AND
         // - At least one active track is spatialized
-        bool hasSpatializedActiveTrack = false;
         for (const auto& track : mActiveTracks) {
             if (track->isSpatialized()) {
-                hasSpatializedActiveTrack = true;
+                latencyMode = mRequestedLatencyMode;
                 break;
             }
         }
-        if (hasSpatializedActiveTrack && mRequestedLatencyMode == AUDIO_LATENCY_MODE_LOW) {
-            latencyMode = AUDIO_LATENCY_MODE_LOW;
-        }
     }
 
     if (latencyMode != mSetLatencyMode) {
@@ -7836,7 +7924,7 @@
 }
 
 status_t SpatializerThread::setRequestedLatencyMode(audio_latency_mode_t mode) {
-    if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
+    if (mode < 0 || mode >= AUDIO_LATENCY_MODE_CNT) {
         return BAD_VALUE;
     }
     audio_utils::lock_guard _l(mutex());
@@ -9579,10 +9667,24 @@
 
 void RecordThread::readInputParameters_l()
 {
-    status_t result = mInput->stream->getAudioProperties(&mSampleRate, &mChannelMask, &mHALFormat);
-    LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving audio properties from HAL: %d", result);
-    mFormat = mHALFormat;
+    const audio_config_base_t audioConfig = mInput->getAudioProperties();
+    mSampleRate = audioConfig.sample_rate;
+    mChannelMask = audioConfig.channel_mask;
+    if (!audio_is_input_channel(mChannelMask)) {
+        LOG_ALWAYS_FATAL("Channel mask %#x not valid for input", mChannelMask);
+    }
+
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
+
+    // Get actual HAL format.
+    status_t result = mInput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
+    LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving input stream format: %d", result);
+    // Get format from the shim, which will be different than the HAL format
+    // if recording compressed audio from IEC61937 wrapped sources.
+    mFormat = audioConfig.format;
+    if (!audio_is_valid_format(mFormat)) {
+        LOG_ALWAYS_FATAL("Format %#x not valid for input", mFormat);
+    }
     if (audio_is_linear_pcm(mFormat)) {
         LOG_ALWAYS_FATAL_IF(mChannelCount > FCC_LIMIT, "HAL channel count %d > %d",
                 mChannelCount, FCC_LIMIT);
@@ -9590,8 +9692,7 @@
         // Can have more that FCC_LIMIT channels in encoded streams.
         ALOGI("HAL format %#x is not linear pcm", mFormat);
     }
-    result = mInput->stream->getFrameSize(&mFrameSize);
-    LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+    mFrameSize = mInput->getFrameSize();
     LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
             mFrameSize);
     result = mInput->stream->getBufferSize(&mBufferSize);
@@ -9673,7 +9774,7 @@
 
     // make sure enabled pre processing effects state is communicated to the HAL as we
     // just moved them to a new input stream.
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
 
@@ -10564,6 +10665,16 @@
         }
     }
 
+    // For mmap streams, once the routing has changed, they will be disconnected. It should be
+    // okay to notify the client earlier before the new patch creation.
+    if (mDeviceId != deviceId) {
+        if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+            // The aaudioservice handle the routing changed event asynchronously. In that case,
+            // it is safe to hold the lock here.
+            callback->onRoutingChanged(deviceId);
+        }
+    }
+
     if (mAudioHwDev->supportsAudioPatches()) {
         status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
                                               patch->sinks, handle);
@@ -10589,12 +10700,6 @@
             sendIoConfigEvent_l(AUDIO_INPUT_CONFIG_CHANGED);
             mInDeviceTypeAddr = sourceDeviceTypeAddr;
         }
-        sp<MmapStreamCallback> callback = mCallback.promote();
-        if (mDeviceId != deviceId && callback != 0) {
-            mutex().unlock();
-            callback->onRoutingChanged(deviceId);
-            mutex().lock();
-        }
         mPatch = *patch;
         mDeviceId = deviceId;
     }
@@ -10658,7 +10763,7 @@
     chain->setThread(this);
     chain->setInBuffer(nullptr);
     chain->setOutBuffer(nullptr);
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
     checkSuspendOnAddEffectChain_l(chain);
@@ -10746,22 +10851,19 @@
 
 void MmapThread::checkInvalidTracks_l()
 {
-    sp<MmapStreamCallback> callback;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
         if (track->isInvalid()) {
-            callback = mCallback.promote();
-            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+            if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+                // The aaudioservice handle the routing changed event asynchronously. In that case,
+                // it is safe to hold the lock here.
+                callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
                 ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
             break;
         }
     }
-    if (callback != 0) {
-        mutex().unlock();
-        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
-        mutex().lock();
-    }
 }
 
 void MmapThread::dumpInternals_l(int fd, const Vector<String16>& /* args */)
@@ -10951,7 +11053,7 @@
         // only one effect chain can be present on DirectOutputThread, so if
         // there is one, the track is connected to it
         if (!mEffectChains.isEmpty()) {
-            mEffectChains[0]->setVolume_l(&vol, &vol);
+            mEffectChains[0]->setVolume(&vol, &vol);
             volume = (float)vol / (1 << 24);
         }
         // Try to use HW volume control and fall back to SW control if not implemented
@@ -11020,7 +11122,7 @@
             char *endptr;
             unsigned long ul = strtoul(value, &endptr, 0);
             if (*endptr == '\0' && ul != 0) {
-                ALOGD("Silence is golden");
+                ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
                 // The setprop command will not allow a property to be changed after
                 // the first time it is set, so we don't have to worry about un-muting.
                 setMasterMute_l(true);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index ea994a5..86e1894 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -436,9 +436,11 @@
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final REQUIRES(mutex());
+    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) ACQUIRE(audio_utils::EffectChain_Mutex);
                 // unlock effect chains after process
-    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final;
+    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final
+            RELEASE(audio_utils::EffectChain_Mutex);
                 // get a copy of mEffectChains vector
     Vector<sp<IAfEffectChain>> getEffectChains_l() const final REQUIRES(mutex()) {
         return mEffectChains;
@@ -599,6 +601,35 @@
                 // check if some effects must be suspended when an effect chain is added
     void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
 
+    /**
+     * waitWhileThreadBusy_l() serves as a mutex gate, which does not allow
+     * progress beyond the method while the PlaybackThread is busy (see setThreadBusy_l()).
+     * During the wait, the ThreadBase_Mutex is temporarily unlocked.
+     *
+     * This implementation uses a condition variable.  Alternative methods to gate
+     * the thread may use a second mutex (i.e. entry based on scoped_lock(mutex, gating_mutex)),
+     * but those have less flexibility and more lock order issues.
+     *
+     * Current usage by Track::destroy(), Track::start(), Track::stop(), Track::pause(),
+     * and Track::flush() block this way, and the primary caller is through TrackHandle
+     * with no other mutexes held.
+     *
+     * Special tracks like PatchTrack and OutputTrack may also hold the another thread's
+     * ThreadBase_Mutex during this time.  No other mutex is held.
+     */
+
+    void waitWhileThreadBusy_l(audio_utils::unique_lock& ul) final REQUIRES(mutex()) {
+        // the wait returns immediately if the predicate is satisfied.
+        mThreadBusyCv.wait(ul, [&]{ return mThreadBusy == false;});
+    }
+
+    void setThreadBusy_l(bool busy) REQUIRES(mutex()) {
+        if (busy == mThreadBusy) return;
+        mThreadBusy = busy;
+        if (busy == true) return;  // no need to wake threads if we become busy.
+        mThreadBusyCv.notify_all();
+    }
+
                 // sends the metadata of the active tracks to the HAL
                 struct MetadataUpdate {
                     std::vector<playback_track_metadata_v7_t> playbackMetadataUpdate;
@@ -641,6 +672,13 @@
                 ThreadMetrics           mThreadMetrics;
                 const bool              mIsOut;
 
+    // mThreadBusy is checked under the ThreadBase_Mutex to ensure that
+    // TrackHandle operations do not proceed while the ThreadBase is busy
+    // with the track.  mThreadBusy is only true if the track is active.
+    //
+    bool mThreadBusy = false; // GUARDED_BY(ThreadBase_Mutex) but read in lambda.
+    audio_utils::condition_variable mThreadBusyCv;
+
                 // updated by PlaybackThread::readOutputParameters_l() or
                 // RecordThread::readInputParameters_l()
                 uint32_t                mSampleRate;
@@ -839,7 +877,7 @@
 
                 SimpleLog mLocalLog;  // locked internally
 
-private:
+    private:
     void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
     void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
 };
@@ -926,7 +964,7 @@
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
-            const std::basic_string<uint8_t>& metadataBs) final;
+            const std::vector<uint8_t>& metadataBs) final;
 
     // ThreadBase virtuals
     void preExit() final EXCLUDES_ThreadBase_Mutex;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index fe582eb..4d07821 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -890,12 +890,17 @@
         bool wasActive = false;
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != 0) {
-            audio_utils::lock_guard _l(thread->mutex());
+            audio_utils::unique_lock ul(thread->mutex());
+            thread->waitWhileThreadBusy_l(ul);
+
             auto* const playbackThread = thread->asIAfPlaybackThread().get();
             wasActive = playbackThread->destroyTrack_l(this);
             forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
         }
         if (isExternalTrack() && !wasActive) {
+            // If the track is not active, the TrackHandle is responsible for
+            // releasing the port id, not the ThreadBase::threadLoop().
+            // At this point, there is no concurrency issue as the track is going away.
             AudioSystem::releaseOutput(mPortId);
         }
     }
@@ -1079,7 +1084,13 @@
         // Additionally PatchProxyBufferProvider::obtainBuffer (called by PathTrack::getNextBuffer)
         // does not allow 0 frame size request contrary to getNextBuffer
     }
-    for (auto& teePatch : mTeePatches) {
+    TeePatches teePatches;
+    if (mTeePatchesRWLock.tryReadLock() == NO_ERROR) {
+        // Cache a copy of tee patches in case it is updated while using.
+        teePatches = mTeePatches;
+        mTeePatchesRWLock.unlock();
+    }
+    for (auto& teePatch : teePatches) {
         IAfPatchRecord* patchRecord = teePatch.patchRecord.get();
         const size_t framesWritten = patchRecord->writeFrames(
                 sourceBuffer.i8, frameCount, mFrameSize);
@@ -1092,7 +1103,7 @@
     using namespace std::chrono_literals;
     // Average is ~20us per track, this should virtually never be logged (Logging takes >200us)
     ALOGD_IF(spent > 500us, "%s: took %lldus to intercept %zu tracks", __func__,
-             spent.count(), mTeePatches.size());
+             spent.count(), teePatches.size());
 }
 
 // ExtendedAudioBufferProvider interface
@@ -1181,7 +1192,9 @@
                 return PERMISSION_DENIED;
             }
         }
-        audio_utils::lock_guard _lth(thread->mutex());
+        audio_utils::unique_lock ul(thread->mutex());
+        thread->waitWhileThreadBusy_l(ul);
+
         track_state state = mState;
         // here the track could be either new, or restarted
         // in both cases "unstop" the track
@@ -1220,7 +1233,7 @@
                 && (state == IDLE || state == STOPPED || state == FLUSHED)) {
             mFrameMap.reset();
 
-            if (!isFastTrack() && (isDirect() || isOffloaded())) {
+            if (!isFastTrack()) {
                 // Start point of track -> sink frame map. If the HAL returns a
                 // frame position smaller than the first written frame in
                 // updateTrackFrameInfo, the timestamp can be interpolated
@@ -1306,7 +1319,9 @@
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        audio_utils::lock_guard _l(thread->mutex());
+        audio_utils::unique_lock ul(thread->mutex());
+        thread->waitWhileThreadBusy_l(ul);
+
         track_state state = mState;
         if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) {
             // If the track is not active (PAUSED and buffers full), flush buffers
@@ -1341,7 +1356,9 @@
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        audio_utils::lock_guard _l(thread->mutex());
+        audio_utils::unique_lock ul(thread->mutex());
+        thread->waitWhileThreadBusy_l(ul);
+
         auto* const playbackThread = thread->asIAfPlaybackThread().get();
         switch (mState) {
         case STOPPING_1:
@@ -1378,7 +1395,9 @@
     ALOGV("%s(%d)", __func__, mId);
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        audio_utils::lock_guard _l(thread->mutex());
+        audio_utils::unique_lock ul(thread->mutex());
+        thread->waitWhileThreadBusy_l(ul);
+
         auto* const playbackThread = thread->asIAfPlaybackThread().get();
 
         // Flush the ring buffer now if the track is not active in the PlaybackThread.
@@ -1616,7 +1635,10 @@
 void Track::updateTeePatches_l() {
     if (mTeePatchesToUpdate.has_value()) {
         forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
-        mTeePatches = mTeePatchesToUpdate.value();
+        {
+            RWLock::AutoWLock writeLock(mTeePatchesRWLock);
+            mTeePatches = std::move(mTeePatchesToUpdate.value());
+        }
         if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
                 mState == TrackBase::STOPPING_1) {
             forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->start(); });
@@ -1652,7 +1674,7 @@
 
     if (result == OK) {
         ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
-              int(muteState), int(mMuteState));
+                static_cast<int>(mMuteState), static_cast<int>(muteState));
         mMuteState = muteState;
     } else {
         ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -3532,6 +3554,8 @@
     }
 
     if (result == OK) {
+        ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+                static_cast<int>(mMuteState), static_cast<int>(muteState));
         mMuteState = muteState;
     } else {
         ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
diff --git a/services/audioflinger/afutils/Vibrator.cpp b/services/audioflinger/afutils/Vibrator.cpp
index 25fcc6a..7c99ca9 100644
--- a/services/audioflinger/afutils/Vibrator.cpp
+++ b/services/audioflinger/afutils/Vibrator.cpp
@@ -20,6 +20,7 @@
 
 #include "Vibrator.h"
 
+#include <android/os/ExternalVibrationScale.h>
 #include <android/os/IExternalVibratorService.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -44,12 +45,17 @@
 }
 
 os::HapticScale onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+    if (externalVibration->getAudioAttributes().flags & AUDIO_FLAG_MUTE_HAPTIC) {
+        ALOGD("%s, mute haptic according to audio attributes flag", __func__);
+        return os::HapticScale::mute();
+    }
     const sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
-        int32_t ret;
+
+        os::ExternalVibrationScale ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
-            ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
+            ALOGD("%s, start external vibration with intensity as %d", __func__, ret.scaleLevel);
             return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
         }
     }
@@ -57,7 +63,7 @@
             __func__,
             evs == nullptr ? "external vibration service not found"
                            : "error when querying intensity");
-    return os::HapticScale::MUTE;
+    return os::HapticScale::mute();
 }
 
 void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
index ee98aef..4235f14 100644
--- a/services/audioflinger/datapath/Android.bp
+++ b/services/audioflinger/datapath/Android.bp
@@ -43,11 +43,14 @@
 
     srcs: [
         "AudioHwDevice.cpp",
+        "AudioStreamIn.cpp",
         "AudioStreamOut.cpp",
+        "SpdifStreamIn.cpp",
         "SpdifStreamOut.cpp",
     ],
 
     header_libs: [
+        "libaudioclient_headers",
         "libaudiohal_headers",
         "liberror_headers",
     ],
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 67e9991..95e9ecc 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -1,19 +1,19 @@
 /*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2007, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioHwDevice"
 //#define LOG_NDEBUG 0
@@ -21,10 +21,13 @@
 #include <system/audio.h>
 #include <utils/Log.h>
 
+#include <audio_utils/spdif/SPDIFDecoder.h>
 #include <audio_utils/spdif/SPDIFEncoder.h>
+#include <media/AudioResamplerPublic.h>
 
 #include "AudioHwDevice.h"
 #include "AudioStreamOut.h"
+#include "SpdifStreamIn.h"
 #include "SpdifStreamOut.h"
 
 namespace android {
@@ -47,12 +50,8 @@
     auto outputStream = new AudioStreamOut(this, flags);
 
     // Try to open the HAL first using the current format.
-    ALOGV("openOutputStream(), try "
-            " sampleRate %d, Format %#x, "
-            "channelMask %#x",
-            config->sample_rate,
-            config->format,
-            config->channel_mask);
+    ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
+            config->format, config->channel_mask);
     status_t status = outputStream->open(handle, deviceType, config, address);
 
     if (status != NO_ERROR) {
@@ -62,13 +61,8 @@
         // FIXME Look at any modification to the config.
         // The HAL might modify the config to suggest a wrapped format.
         // Log this so we can see what the HALs are doing.
-        ALOGI("openOutputStream(), HAL returned"
-            " sampleRate %d, Format %#x, "
-            "channelMask %#x, status %d",
-            config->sample_rate,
-            config->format,
-            config->channel_mask,
-            status);
+        ALOGI("openOutputStream(), HAL returned sampleRate %d, format %#x, channelMask %#x,"
+                " status %d", config->sample_rate, config->format, config->channel_mask, status);
 
         // If the data is encoded then try again using wrapped PCM.
         const bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
@@ -96,6 +90,79 @@
     return status;
 }
 
+status_t AudioHwDevice::openInputStream(
+        AudioStreamIn **ppStreamIn,
+        audio_io_handle_t handle,
+        audio_devices_t deviceType,
+        audio_input_flags_t flags,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char *outputDeviceAddress) {
+
+    struct audio_config originalConfig = *config;
+    auto inputStream = new AudioStreamIn(this, flags);
+
+    // Try to open the HAL first using the current format.
+    ALOGV("openInputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
+            config->format, config->channel_mask);
+    status_t status = inputStream->open(handle, deviceType, config, address, source, outputDevice,
+                                        outputDeviceAddress);
+
+    // If the input could not be opened with the requested parameters and we can handle the
+    // conversion internally, try to open again with the proposed parameters.
+    if (status == BAD_VALUE &&
+        audio_is_linear_pcm(originalConfig.format) &&
+        audio_is_linear_pcm(config->format) &&
+        (config->sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
+        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_LIMIT) &&
+        (audio_channel_count_from_in_mask(originalConfig.channel_mask) <= FCC_LIMIT)) {
+        // FIXME describe the change proposed by HAL (save old values so we can log them here)
+        ALOGV("openInputStream() reopening with proposed sampling rate and channel mask");
+        status = inputStream->open(handle, deviceType, config, address, source,
+                outputDevice, outputDeviceAddress);
+        // FIXME log this new status; HAL should not propose any further changes
+        if (status != NO_ERROR) {
+            delete inputStream;
+            inputStream = nullptr;
+        }
+    } else if (status != NO_ERROR) {
+        delete inputStream;
+        inputStream = nullptr;
+
+        // FIXME Look at any modification to the config.
+        // The HAL might modify the config to suggest a wrapped format.
+        // Log this so we can see what the HALs are doing.
+        ALOGI("openInputStream(), HAL returned sampleRate %d, format %#x, channelMask %#x,"
+                " status %d", config->sample_rate, config->format, config->channel_mask, status);
+
+        // If the data is encoded then try again using wrapped PCM.
+        const bool unwrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
+                && ((flags & AUDIO_INPUT_FLAG_DIRECT) != 0);
+
+        if (unwrapperNeeded) {
+            if (SPDIFDecoder::isFormatSupported(originalConfig.format)) {
+                inputStream = new SpdifStreamIn(this, flags, originalConfig.format);
+                status = inputStream->open(handle, deviceType, &originalConfig, address, source,
+                        outputDevice, outputDeviceAddress);
+                if (status != NO_ERROR) {
+                    ALOGE("ERROR - openInputStream(), SPDIF open returned %d",
+                        status);
+                    delete inputStream;
+                    inputStream = nullptr;
+                }
+            } else {
+                ALOGE("ERROR - openInputStream(), SPDIFDecoder does not support format 0x%08x",
+                    originalConfig.format);
+            }
+        }
+    }
+
+    *ppStreamIn = inputStream;
+    return status;
+}
+
 bool AudioHwDevice::supportsAudioPatches() const {
     bool result;
     return mHwDevice->supportsAudioPatches(&result) == OK ? result : false;
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index cfb6fbd..80c1473 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -1,22 +1,21 @@
 /*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2007, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_AUDIO_HW_DEVICE_H
-#define ANDROID_AUDIO_HW_DEVICE_H
+#pragma once
 
 #include <stdint.h>
 #include <stdlib.h>
@@ -30,6 +29,7 @@
 
 namespace android {
 
+class AudioStreamIn;
 class AudioStreamOut;
 
 class AudioHwDevice {
@@ -89,6 +89,17 @@
             struct audio_config *config,
             const char *address);
 
+    status_t openInputStream(
+            AudioStreamIn **ppStreamIn,
+            audio_io_handle_t handle,
+            audio_devices_t deviceType,
+            audio_input_flags_t flags,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char *outputDeviceAddress);
+
     [[nodiscard]] bool supportsAudioPatches() const;
 
     [[nodiscard]] status_t getAudioPort(struct audio_port_v7 *port) const;
@@ -112,5 +123,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_HW_DEVICE_H
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
new file mode 100644
index 0000000..76618f4
--- /dev/null
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -0,0 +1,137 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include "AudioStreamIn.h"
+
+#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "AudioHwDevice.h"
+
+namespace android {
+
+// ----------------------------------------------------------------------------
+AudioStreamIn::AudioStreamIn(AudioHwDevice *dev, audio_input_flags_t flags)
+        : audioHwDev(dev)
+        , flags(flags)
+{
+}
+
+// This must be defined here together with the HAL includes above and
+// not solely in the header.
+AudioStreamIn::~AudioStreamIn() = default;
+
+sp<DeviceHalInterface> AudioStreamIn::hwDev() const
+{
+    return audioHwDev->hwDevice();
+}
+
+status_t AudioStreamIn::getCapturePosition(int64_t* frames, int64_t* time)
+{
+    if (stream == nullptr) {
+        return NO_INIT;
+    }
+
+    int64_t halPosition = 0;
+    const status_t status = stream->getCapturePosition(&halPosition, time);
+    if (status != NO_ERROR) {
+        return status;
+    }
+
+    if (mHalFormatHasProportionalFrames &&
+            (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
+        // For DirectRecord reset timestamp to 0 on standby.
+        const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
+                0 : (halPosition - mFramesReadAtStandby);
+        // Scale from HAL sample rate to application rate.
+        *frames = adjustedPosition / mRateMultiplier;
+    } else {
+        // For compressed formats and linear PCM.
+        *frames = halPosition;
+    }
+
+    return status;
+}
+
+status_t AudioStreamIn::open(
+        audio_io_handle_t handle,
+        audio_devices_t deviceType,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char *outputDeviceAddress)
+{
+    sp<StreamInHalInterface> inStream;
+
+    int status = hwDev()->openInputStream(
+            handle,
+            deviceType,
+            config,
+            flags,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress,
+            &inStream);
+    ALOGV("AudioStreamIn::open(), HAL returned stream %p, sampleRate %d, format %#x,"
+            " channelMask %#x, status %d", inStream.get(), config->sample_rate, config->format,
+            config->channel_mask, status);
+
+    if (status == NO_ERROR) {
+        stream = inStream;
+        mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
+        status = stream->getFrameSize(&mHalFrameSize);
+        LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
+        LOG_ALWAYS_FATAL_IF(mHalFrameSize == 0, "Error frame size was %zu but must be greater than"
+                " zero", mHalFrameSize);
+    }
+
+    return status;
+}
+
+audio_config_base_t AudioStreamIn::getAudioProperties() const
+{
+    audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (stream->getAudioProperties(&result) != OK) {
+        result.sample_rate = 0;
+        result.channel_mask = AUDIO_CHANNEL_INVALID;
+        result.format = AUDIO_FORMAT_INVALID;
+    }
+    return result;
+}
+
+status_t AudioStreamIn::standby()
+{
+    mFramesReadAtStandby = mFramesRead;
+    return stream->standby();
+}
+
+status_t AudioStreamIn::read(void* buffer, size_t bytes, size_t* read)
+{
+    const status_t result = stream->read(buffer, bytes, read);
+    if (result == OK && *read > 0 && mHalFrameSize > 0) {
+        mFramesRead += *read / mHalFrameSize;
+    }
+    return result;
+}
+
+} // namespace android
diff --git a/services/audioflinger/datapath/AudioStreamIn.h b/services/audioflinger/datapath/AudioStreamIn.h
index 604a4e4..6d1c6a7 100644
--- a/services/audioflinger/datapath/AudioStreamIn.h
+++ b/services/audioflinger/datapath/AudioStreamIn.h
@@ -31,30 +31,57 @@
     virtual status_t standby() = 0;
 };
 
-// AudioStreamIn is immutable, so its fields are const.
-// The methods must not be const to match StreamHalInterface signature.
-
-struct AudioStreamIn : public Source {
+/**
+ * Managed access to a HAL input stream.
+ */
+class AudioStreamIn : public Source {
+public:
     const AudioHwDevice* const audioHwDev;
-    const sp<StreamInHalInterface> stream;
+    sp<StreamInHalInterface> stream;
     const audio_input_flags_t flags;
 
-    AudioStreamIn(
-            const AudioHwDevice* dev, const sp<StreamInHalInterface>& in,
-            audio_input_flags_t flags)
-        : audioHwDev(dev), stream(in), flags(flags) {}
+    [[nodiscard]] sp<DeviceHalInterface> hwDev() const;
 
-    status_t read(void* buffer, size_t bytes, size_t* read) final {
-        return stream->read(buffer, bytes, read);
-    }
+    AudioStreamIn(AudioHwDevice *dev, audio_input_flags_t flags);
 
-    status_t getCapturePosition(int64_t* frames, int64_t* time) final {
-        return stream->getCapturePosition(frames, time);
-    }
+    virtual status_t open(
+            audio_io_handle_t handle,
+            audio_devices_t deviceType,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char *outputDeviceAddress);
 
-    status_t standby() final { return stream->standby(); }
+    ~AudioStreamIn() override;
 
-    sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
+    status_t getCapturePosition(int64_t* frames, int64_t* time) override;
+
+    status_t read(void* buffer, size_t bytes, size_t* read) override;
+
+    /**
+     * @return frame size from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual size_t getFrameSize() const { return mHalFrameSize; }
+
+    /**
+     * @return audio stream configuration: channel mask, format, sample rate:
+     *   - channel mask from the perspective of the application and the AudioFlinger,
+     *     The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+     *   - format from the perspective of the application and the AudioFlinger;
+     *   - sample rate from the perspective of the application and the AudioFlinger,
+     *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
+     */
+    [[nodiscard]] virtual audio_config_base_t getAudioProperties() const;
+
+    status_t standby() override;
+
+protected:
+    uint64_t mFramesRead = 0;
+    int64_t mFramesReadAtStandby = 0;
+    int mRateMultiplier = 1;
+    bool mHalFormatHasProportionalFrames = false;
+    size_t mHalFrameSize = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 6fa82e5..9851f3a 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -1,30 +1,31 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
 
+#include "AudioStreamOut.h"
+
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <system/audio.h>
 #include <utils/Log.h>
 
 #include "AudioHwDevice.h"
-#include "AudioStreamOut.h"
 
 namespace android {
 
@@ -98,15 +99,15 @@
         return status;
     }
 
-    // Adjust for standby using HAL rate frames.
-    // Only apply this correction if the HAL is getting PCM frames.
-    if (mHalFormatHasProportionalFrames) {
+    if (mHalFormatHasProportionalFrames &&
+            (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
+        // For DirectTrack reset timestamp to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
         *frames = adjustedPosition / mRateMultiplier;
     } else {
-        // For offloaded MP3 and other compressed formats.
+        // For offloaded MP3 and other compressed formats, and linear PCM.
         *frames = halPosition;
     }
 
@@ -132,14 +133,9 @@
             config,
             address,
             &outStream);
-    ALOGV("AudioStreamOut::open(), HAL returned "
-            " stream %p, sampleRate %d, Format %#x, "
-            "channelMask %#x, status %d",
-            outStream.get(),
-            config->sample_rate,
-            config->format,
-            config->channel_mask,
-            status);
+    ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
+            " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
+            config->channel_mask, status);
 
     // Some HALs may not recognize AUDIO_FORMAT_IEC61937. But if we declare
     // it as PCM then it will probably work.
@@ -162,7 +158,7 @@
         mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
         status = stream->getFrameSize(&mHalFrameSize);
         LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
-        LOG_ALWAYS_FATAL_IF(mHalFrameSize <= 0, "Error frame size was %zu but must be greater than"
+        LOG_ALWAYS_FATAL_IF(mHalFrameSize == 0, "Error frame size was %zu but must be greater than"
                 " zero", mHalFrameSize);
 
     }
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ce00f8c..ea41bba 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -1,27 +1,28 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_AUDIO_STREAM_OUT_H
-#define ANDROID_AUDIO_STREAM_OUT_H
+#pragma once
 
 #include <stdint.h>
 #include <sys/types.h>
 
 #include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
 
 namespace android {
 
@@ -34,9 +35,6 @@
  */
 class AudioStreamOut {
 public:
-// AudioStreamOut is immutable, so its fields are const.
-// For emphasis, we could also make all pointers to them be "const *",
-// but that would clutter the code unnecessarily.
     AudioHwDevice * const audioHwDev;
     sp<StreamOutHalInterface> stream;
     const audio_output_flags_t flags;
@@ -101,15 +99,13 @@
     virtual void presentationComplete() { mExpectRetrograde = true; }
 
 protected:
-    uint64_t             mFramesWritten = 0; // reset by flush
-    uint64_t             mFramesWrittenAtStandby = 0;
-    uint64_t             mRenderPosition = 0; // reset by flush, standby, or presentation complete
-    int                  mRateMultiplier = 1;
-    bool                 mHalFormatHasProportionalFrames = false;
-    size_t               mHalFrameSize = 0;
-    bool                 mExpectRetrograde = false; // see presentationComplete
+    uint64_t mFramesWritten = 0; // reset by flush
+    uint64_t mFramesWrittenAtStandby = 0;
+    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
+    int mRateMultiplier = 1;
+    bool mHalFormatHasProportionalFrames = false;
+    size_t mHalFrameSize = 0;
+    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_STREAM_OUT_H
diff --git a/services/audioflinger/datapath/SpdifStreamIn.cpp b/services/audioflinger/datapath/SpdifStreamIn.cpp
new file mode 100644
index 0000000..98ce712
--- /dev/null
+++ b/services/audioflinger/datapath/SpdifStreamIn.cpp
@@ -0,0 +1,128 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include "Configuration.h"
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include <audio_utils/spdif/SPDIFDecoder.h>
+
+#include "AudioHwDevice.h"
+#include "SpdifStreamIn.h"
+
+namespace android {
+
+/**
+ * If the HAL is generating IEC61937 data and AudioFlinger expects elementary stream then we need to
+ * extract the data using an SPDIF decoder.
+ */
+SpdifStreamIn::SpdifStreamIn(AudioHwDevice *dev,
+            audio_input_flags_t flags,
+            audio_format_t format)
+        : AudioStreamIn(dev, flags)
+        , mSpdifDecoder(this, format)
+{
+}
+
+status_t SpdifStreamIn::open(
+        audio_io_handle_t handle,
+        audio_devices_t devices,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char* outputDeviceAddress)
+{
+    struct audio_config customConfig = *config;
+
+    mApplicationConfig.format = config->format;
+    mApplicationConfig.sample_rate = config->sample_rate;
+    mApplicationConfig.channel_mask = config->channel_mask;
+
+    mRateMultiplier = spdif_rate_multiplier(config->format);
+    if (mRateMultiplier <= 0) {
+        ALOGE("ERROR SpdifStreamIn::open() unrecognized format 0x%08X\n", config->format);
+        return BAD_VALUE;
+    }
+    customConfig.sample_rate = config->sample_rate * mRateMultiplier;
+    customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    customConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
+
+    // Always print this because otherwise it could be very confusing if the
+    // HAL and AudioFlinger are using different formats.
+    // Print before open() because HAL may modify customConfig.
+    ALOGI("SpdifStreamIn::open() AudioFlinger requested sampleRate %d, format %#x, channelMask %#x",
+            config->sample_rate, config->format, config->channel_mask);
+    ALOGI("SpdifStreamIn::open() HAL configured for sampleRate %d, format %#x, channelMask %#x",
+            customConfig.sample_rate, customConfig.format, customConfig.channel_mask);
+
+    const status_t status = AudioStreamIn::open(
+            handle,
+            devices,
+            &customConfig,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress);
+
+    ALOGI("SpdifStreamIn::open() status = %d", status);
+
+#ifdef TEE_SINK
+    if (status == OK) {
+        // Don't use PCM 16-bit format to avoid WAV encoding IEC61937 data.
+        mTee.set(customConfig.sample_rate,
+                audio_channel_count_from_in_mask(customConfig.channel_mask),
+                AUDIO_FORMAT_IEC61937, NBAIO_Tee::TEE_FLAG_INPUT_THREAD);
+        mTee.setId(std::string("_") + std::to_string(handle) + "_C");
+    }
+#endif
+
+    return status;
+}
+
+int SpdifStreamIn::standby()
+{
+    mSpdifDecoder.reset();
+    return AudioStreamIn::standby();
+}
+
+status_t SpdifStreamIn::readDataBurst(void* buffer, size_t bytes, size_t* read)
+{
+    status_t status = AudioStreamIn::read(buffer, bytes, read);
+
+#ifdef TEE_SINK
+    if (*read > 0) {
+        mTee.write(reinterpret_cast<const char *>(buffer), *read / AudioStreamIn::getFrameSize());
+    }
+#endif
+    return status;
+}
+
+status_t SpdifStreamIn::read(void* buffer, size_t numBytes, size_t* read)
+{
+    // Read from SPDIF extractor. It will call back to readDataBurst().
+    const auto bytesRead = mSpdifDecoder.read(buffer, numBytes);
+    if (bytesRead >= 0) {
+        *read = bytesRead;
+        return OK;
+    }
+    return NOT_ENOUGH_DATA;
+}
+
+} // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamIn.h b/services/audioflinger/datapath/SpdifStreamIn.h
new file mode 100644
index 0000000..78832ee
--- /dev/null
+++ b/services/audioflinger/datapath/SpdifStreamIn.h
@@ -0,0 +1,134 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <system/audio.h>
+
+#include "AudioStreamIn.h"
+
+#include <audio_utils/spdif/SPDIFDecoder.h>
+#include <afutils/NBAIO_Tee.h>
+
+namespace android {
+
+/**
+ * Stream that is a PCM data burst in the HAL but looks like an encoded stream
+ * to the AudioFlinger. Wraps encoded data in an SPDIF wrapper per IEC61973-3.
+ */
+class SpdifStreamIn : public AudioStreamIn {
+public:
+
+    SpdifStreamIn(AudioHwDevice *dev, audio_input_flags_t flags,
+            audio_format_t format);
+
+    status_t open(
+            audio_io_handle_t handle,
+            audio_devices_t devices,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char* outputDeviceAddress) override;
+
+    /**
+    * Read audio buffer from driver. If at least one frame was read successfully prior to the error,
+    * it is suggested that the driver return that successful (short) byte count
+    * and then return an error in the subsequent call.
+    *
+    * If set_callback() has previously been called to enable non-blocking mode
+    * the write() is not allowed to block. It must write only the number of
+    * bytes that currently fit in the driver/hardware buffer and then return
+    * this byte count. If this is less than the requested write size the
+    * callback function must be called when more space is available in the
+    * driver/hardware buffer.
+    */
+    status_t read(void* buffer, size_t bytes, size_t* read) override;
+
+    /**
+     * @return frame size from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] size_t getFrameSize() const override { return sizeof(int8_t); }
+
+    /**
+     * @return audio_config_base_t from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] audio_config_base_t getAudioProperties() const override {
+        return mApplicationConfig;
+    }
+
+    /**
+     * @return format from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual audio_format_t getFormat() const { return mApplicationConfig.format; }
+
+    /**
+     * The HAL may be running at a higher sample rate if, for example, reading wrapped EAC3.
+     * @return sample rate from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual uint32_t getSampleRate() const { return mApplicationConfig.sample_rate; }
+
+    /**
+     * The HAL is in stereo mode when reading multi-channel compressed audio.
+     * @return channel mask from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual audio_channel_mask_t getChannelMask() const {
+        return mApplicationConfig.channel_mask;
+    }
+
+    status_t standby() override;
+
+private:
+
+    class MySPDIFDecoder : public SPDIFDecoder
+    {
+    public:
+        MySPDIFDecoder(SpdifStreamIn *spdifStreamIn, audio_format_t format)
+          :  SPDIFDecoder(format)
+          , mSpdifStreamIn(spdifStreamIn)
+        {
+        }
+
+        ssize_t readInput(void* buffer, size_t bytes) override
+        {
+            size_t bytesRead = 0;
+            const auto result = mSpdifStreamIn->readDataBurst(buffer, bytes, &bytesRead);
+            if (result < 0) {
+                return result;
+            }
+            return bytesRead;
+        }
+
+    protected:
+        SpdifStreamIn * const mSpdifStreamIn;
+    };
+
+    MySPDIFDecoder mSpdifDecoder;
+    audio_config_base_t mApplicationConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+
+    status_t readDataBurst(void* data, size_t bytes, size_t* read);
+
+#ifdef TEE_SINK
+    NBAIO_Tee mTee;
+#endif
+
+};
+
+} // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 0c6a5a1..65a4eec 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -1,19 +1,19 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
@@ -42,10 +42,10 @@
 }
 
 status_t SpdifStreamOut::open(
-                              audio_io_handle_t handle,
-                              audio_devices_t devices,
-                              struct audio_config *config,
-                              const char *address)
+        audio_io_handle_t handle,
+        audio_devices_t devices,
+        struct audio_config *config,
+        const char *address)
 {
     struct audio_config customConfig = *config;
 
@@ -53,22 +53,10 @@
     mApplicationConfig.sample_rate = config->sample_rate;
     mApplicationConfig.channel_mask = config->channel_mask;
 
-    // Some data bursts run at a higher sample rate.
-    // TODO Move this into the audio_utils as a static method.
-    switch(config->format) {
-        case AUDIO_FORMAT_E_AC3:
-        case AUDIO_FORMAT_E_AC3_JOC:
-            mRateMultiplier = 4;
-            break;
-        case AUDIO_FORMAT_AC3:
-        case AUDIO_FORMAT_DTS:
-        case AUDIO_FORMAT_DTS_HD:
-            mRateMultiplier = 1;
-            break;
-        default:
-            ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n",
-                config->format);
-            return BAD_VALUE;
+    mRateMultiplier = spdif_rate_multiplier(config->format);
+    if (mRateMultiplier <= 0) {
+        ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n", config->format);
+        return BAD_VALUE;
     }
     customConfig.sample_rate = config->sample_rate * mRateMultiplier;
 
@@ -78,16 +66,10 @@
     // Always print this because otherwise it could be very confusing if the
     // HAL and AudioFlinger are using different formats.
     // Print before open() because HAL may modify customConfig.
-    ALOGI("SpdifStreamOut::open() AudioFlinger requested"
-            " sampleRate %d, format %#x, channelMask %#x",
-            config->sample_rate,
-            config->format,
-            config->channel_mask);
-    ALOGI("SpdifStreamOut::open() HAL configured for"
-            " sampleRate %d, format %#x, channelMask %#x",
-            customConfig.sample_rate,
-            customConfig.format,
-            customConfig.channel_mask);
+    ALOGI("SpdifStreamOut::open() AudioFlinger requested sampleRate %d, format %#x,"
+            " channelMask %#x", config->sample_rate, config->format, config->channel_mask);
+    ALOGI("SpdifStreamOut::open() HAL configured for sampleRate %d, format %#x, channelMask %#x",
+            customConfig.sample_rate, customConfig.format, customConfig.channel_mask);
 
     const status_t status = AudioStreamOut::open(
             handle,
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index 56d57f6..c6d27ba 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -1,22 +1,21 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_SPDIF_STREAM_OUT_H
-#define ANDROID_SPDIF_STREAM_OUT_H
+#pragma once
 
 #include <stdint.h>
 #include <sys/types.h>
@@ -40,8 +39,6 @@
     SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags,
             audio_format_t format);
 
-    ~SpdifStreamOut() override = default;
-
     status_t open(
             audio_io_handle_t handle,
             audio_devices_t devices,
@@ -116,10 +113,10 @@
         SpdifStreamOut * const mSpdifStreamOut;
     };
 
-    MySPDIFEncoder       mSpdifEncoder;
-    audio_config_base_t  mApplicationConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    MySPDIFEncoder mSpdifEncoder;
+    audio_config_base_t mApplicationConfig = AUDIO_CONFIG_BASE_INITIALIZER;
 
-    ssize_t  writeDataBurst(const void* data, size_t bytes);
+    ssize_t writeDataBurst(const void* data, size_t bytes);
 
 #ifdef TEE_SINK
     NBAIO_Tee mTee;
@@ -128,5 +125,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_SPDIF_STREAM_OUT_H
diff --git a/services/audioflinger/datapath/ThreadMetrics.h b/services/audioflinger/datapath/ThreadMetrics.h
index c643a57..4eb8aa0 100644
--- a/services/audioflinger/datapath/ThreadMetrics.h
+++ b/services/audioflinger/datapath/ThreadMetrics.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_THREADMETRICS_H
-#define ANDROID_AUDIO_THREADMETRICS_H
+#pragma once
 
 #include <media/MediaMetricsItem.h>
 
@@ -210,5 +209,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_THREADMETRICS_H
diff --git a/services/audioflinger/datapath/TrackMetrics.h b/services/audioflinger/datapath/TrackMetrics.h
index 2b44acb..ad5d3db 100644
--- a/services/audioflinger/datapath/TrackMetrics.h
+++ b/services/audioflinger/datapath/TrackMetrics.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_TRACKMETRICS_H
-#define ANDROID_AUDIO_TRACKMETRICS_H
+#pragma once
 
 #include <binder/IActivityManager.h>
 #include <binder/IPCThreadState.h>
@@ -306,5 +305,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_TRACKMETRICS_H
diff --git a/services/audioflinger/fastpath/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
index e0a15c1..1d41b3f 100644
--- a/services/audioflinger/fastpath/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -178,8 +178,8 @@
                 (void *)(uintptr_t)mSinkChannelMask);
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
                 (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
-        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
-                (void *)(uintptr_t)fastTrack->mHapticIntensity);
+        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_SCALE,
+                (void *)(&(fastTrack->mHapticScale)));
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_MAX_AMPLITUDE,
                 (void *)(&(fastTrack->mHapticMaxAmplitude)));
 
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index 8ab6d25..0a56f92 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -54,7 +54,7 @@
     audio_format_t          mFormat = AUDIO_FORMAT_INVALID;         // track format
     int                     mGeneration = 0;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute(); // scale of haptic data
     float                   mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
 };
 
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 197cff9..3b764d1 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -21,10 +21,12 @@
 #include "SoundDoseManager.h"
 
 #include "android/media/SoundDoseRecord.h"
+#include <algorithm>
 #include <android-base/stringprintf.h>
-#include <media/AidlConversionCppNdk.h>
 #include <cinttypes>
 #include <ctime>
+#include <functional>
+#include <media/AidlConversionCppNdk.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -46,6 +48,8 @@
     return now_ts.tv_sec;
 }
 
+constexpr float kDefaultRs2LowerBound = 80.f;  // dBA
+
 }  // namespace
 
 sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
@@ -53,7 +57,7 @@
         size_t channelCount, audio_format_t format) {
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose.size() > 0 && mEnabledCsd) {
+    if (!mUseFrameworkMel && mHalSoundDose.size() > 0 && mEnabledCsd) {
         ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
         return nullptr;
     }
@@ -143,7 +147,7 @@
     ALOGV("%s", __func__);
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose.size() > 0) {
+    if (!mUseFrameworkMel && mHalSoundDose.size() > 0) {
         bool success = true;
         for (auto& halSoundDose : mHalSoundDose) {
             // using the HAL sound dose interface
@@ -187,6 +191,21 @@
     }
 }
 
+float SoundDoseManager::getAttenuationForDeviceId(audio_port_handle_t id) const {
+    float attenuation = 0.f;
+
+    const std::lock_guard _l(mLock);
+    const auto deviceTypeIt = mActiveDeviceTypes.find(id);
+    if (deviceTypeIt != mActiveDeviceTypes.end()) {
+        auto attenuationIt = mMelAttenuationDB.find(deviceTypeIt->second);
+        if (attenuationIt != mMelAttenuationDB.end()) {
+            attenuation = attenuationIt->second;
+        }
+    }
+
+    return attenuation;
+}
+
 audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
     if (isComputeCsdForcedOnAllDevices()) {
         // If CSD is forced on all devices return random port id. Used only in testing.
@@ -212,6 +231,15 @@
         ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
         return AUDIO_PORT_HANDLE_NONE;
     }
+
+    if (audio_is_ble_out_device(type) || audio_is_a2dp_device(type)) {
+        const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
+        if (btDeviceIt == mBluetoothDevicesWithCsd.end() || !btDeviceIt->second) {
+            ALOGI("%s: bt device %s does not support sound dose", __func__,
+                  adt.toString().c_str());
+            return AUDIO_PORT_HANDLE_NONE;
+        }
+    }
     return deviceIt->second;
 }
 
@@ -256,11 +284,15 @@
     auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
-                __func__, in_audioDevice.type.type,
+                __func__, static_cast<int>(in_audioDevice.type.type),
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
-    soundDoseManager->onMomentaryExposure(in_currentDbA, id);
+
+    float attenuation = soundDoseManager->getAttenuationForDeviceId(id);
+    ALOGV("%s: attenuating received momentary exposure with %f dB", __func__, attenuation);
+    // TODO: remove attenuation when enforcing HAL MELs to always be attenuated
+    soundDoseManager->onMomentaryExposure(in_currentDbA - attenuation, id);
 
     return ndk::ScopedAStatus::ok();
 }
@@ -285,13 +317,14 @@
     auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
-                __func__, in_audioDevice.type.type,
+                __func__, static_cast<int>(in_audioDevice.type.type),
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
+
     // TODO: introduce timestamp in onNewMelValues callback
-    soundDoseManager->onNewMelValues(in_melRecord.melValues, 0,
-                                     in_melRecord.melValues.size(), id);
+    soundDoseManager->onNewMelValues(in_melRecord.melValues, 0, in_melRecord.melValues.size(),
+                                     id, /*attenuated=*/false);
 
     return ndk::ScopedAStatus::ok();
 }
@@ -549,9 +582,6 @@
 }
 
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
-    // invalidate any HAL sound dose interface used
-    resetHalSoundDoseInterfaces();
-
     const std::lock_guard _l(mLock);
     mUseFrameworkMel = useFrameworkMel;
 }
@@ -562,8 +592,19 @@
 }
 
 void SoundDoseManager::setComputeCsdOnAllDevices(bool computeCsdOnAllDevices) {
-    const std::lock_guard _l(mLock);
-    mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+    bool changed = false;
+    {
+        const std::lock_guard _l(mLock);
+        if (mHalSoundDose.size() != 0) {
+            // when using the HAL path we cannot enforce to deliver values for all devices
+            changed = mUseFrameworkMel != computeCsdOnAllDevices;
+            mUseFrameworkMel = computeCsdOnAllDevices;
+        }
+        mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+    }
+    if (changed && computeCsdOnAllDevices) {
+        mMelReporterCallback->applyAllAudioPatches();
+    }
 }
 
 bool SoundDoseManager::isComputeCsdForcedOnAllDevices() const {
@@ -582,7 +623,7 @@
 
 bool SoundDoseManager::useHalSoundDose() const {
     const std::lock_guard _l(mLock);
-    return mHalSoundDose.size() > 0;
+    return !mUseFrameworkMel && mHalSoundDose.size() > 0;
 }
 
 void SoundDoseManager::resetSoundDose() {
@@ -604,26 +645,68 @@
 }
 
 void SoundDoseManager::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
-                                      audio_port_handle_t deviceId) const {
+                                      audio_port_handle_t deviceId, bool attenuated) const {
     ALOGV("%s", __func__);
 
-
     sp<media::ISoundDoseCallback> soundDoseCallback;
     std::vector<audio_utils::CsdRecord> records;
     float currentCsd;
+
+    // TODO: delete this case when enforcing HAL MELs to always be attenuated
+    float attenuation = attenuated ? 0.0f : getAttenuationForDeviceId(deviceId);
+
     {
         const std::lock_guard _l(mLock);
         if (!mEnabledCsd) {
             return;
         }
 
-
         const int64_t timestampSec = getMonotonicSecond();
 
-        // only for internal callbacks
-        records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
-                deviceId, std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
-                timestampSec - length));
+        if (attenuated) {
+            records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
+                    deviceId,
+                    std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
+                    timestampSec - length));
+        } else {
+            ALOGV("%s: attenuating received values with %f dB", __func__, attenuation);
+
+            // Extracting all intervals that contain values >= RS2 low limit (80dBA) after the
+            // attenuation is applied
+            size_t start = offset;
+            size_t stop = offset;
+            for (; stop < mels.size() && stop < offset + length; ++stop) {
+                if (mels[stop] - attenuation < kDefaultRs2LowerBound) {
+                    if (start < stop) {
+                        std::vector<float> attMel(stop-start, -attenuation);
+                        // attMel[i] = mels[i] + attenuation, i in [start, stop)
+                        std::transform(mels.begin() + start, mels.begin() + stop, attMel.begin(),
+                                       attMel.begin(), std::plus<float>());
+                        std::vector<audio_utils::CsdRecord> newRec =
+                                mMelAggregator->aggregateAndAddNewMelRecord(
+                                        audio_utils::MelRecord(deviceId,
+                                                               attMel,
+                                                               timestampSec - length + start -
+                                                               offset));
+                        std::copy(newRec.begin(), newRec.end(), std::back_inserter(records));
+                    }
+                    start = stop+1;
+                }
+            }
+            if (start < stop) {
+                std::vector<float> attMel(stop-start, -attenuation);
+                // attMel[i] = mels[i] + attenuation, i in [start, stop)
+                std::transform(mels.begin() + start, mels.begin() + stop, attMel.begin(),
+                               attMel.begin(), std::plus<float>());
+                std::vector<audio_utils::CsdRecord> newRec =
+                        mMelAggregator->aggregateAndAddNewMelRecord(
+                                audio_utils::MelRecord(deviceId,
+                                                       attMel,
+                                                       timestampSec - length + start -
+                                                       offset));
+                std::copy(newRec.begin(), newRec.end(), std::back_inserter(records));
+            }
+        }
 
         currentCsd = mMelAggregator->getCsd();
     }
@@ -658,6 +741,10 @@
         if (!mEnabledCsd) {
             return;
         }
+
+        if (currentMel < mRs2UpperBound) {
+            return;
+        }
     }
 
     auto soundDoseCallback = getSoundDoseCallback();
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 347eabe..52a3fd6 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -39,6 +39,8 @@
 
     virtual void stopMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
     virtual void startMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+
+    virtual void applyAllAudioPatches() = 0;
 };
 
 class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
@@ -53,6 +55,13 @@
           mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
           mRs2UpperBound(kDefaultRs2UpperBound) {};
 
+    // Used only for testing
+    SoundDoseManager(const sp<IMelReporterCallback>& melReporterCallback,
+                     const sp<audio_utils::MelAggregator>& melAggregator)
+            : mMelReporterCallback(melReporterCallback),
+              mMelAggregator(melAggregator),
+              mRs2UpperBound(kDefaultRs2UpperBound) {};
+
     /**
      * \brief Creates or gets the MelProcessor assigned to the streamHandle
      *
@@ -144,7 +153,7 @@
 
     // ------ Override audio_utils::MelProcessor::MelCallback ------
     void onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
-                        audio_port_handle_t deviceId) const override;
+                        audio_port_handle_t deviceId, bool attenuated) const override;
 
     void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
 
@@ -205,6 +214,8 @@
 
     sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
 
+    float getAttenuationForDeviceId(audio_port_handle_t id) const;
+
     void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
     void setCsdEnabled(bool enabled);
     void setUseFrameworkMel(bool useFrameworkMel);
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index 2a2addf..60e170d 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
@@ -11,7 +12,7 @@
     name: "sounddosemanager_tests",
 
     srcs: [
-        "sounddosemanager_tests.cpp"
+        "sounddosemanager_tests.cpp",
     ],
 
     defaults: [
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 294080b..e79b05e 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -20,6 +20,7 @@
 #include <SoundDoseManager.h>
 
 #include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+#include <audio_utils/MelAggregator.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
 #include <media/AidlConversionCppNdk.h>
@@ -43,6 +44,15 @@
 public:
     MOCK_METHOD(void, startMelComputationForDeviceId, (audio_port_handle_t), (override));
     MOCK_METHOD(void, stopMelComputationForDeviceId, (audio_port_handle_t), (override));
+    MOCK_METHOD(void, applyAllAudioPatches, (), (override));
+};
+
+class MelAggregatorMock : public audio_utils::MelAggregator {
+public:
+    MelAggregatorMock() : MelAggregator(100) {}
+
+    MOCK_METHOD(std::vector<audio_utils::CsdRecord>, aggregateAndAddNewMelRecord,
+                (const audio_utils::MelRecord&), (override));
 };
 
 constexpr char kPrimaryModule[] = "primary";
@@ -52,7 +62,8 @@
 protected:
     void SetUp() override {
         mMelReporterCallback = sp<MelReporterCallback>::make();
-        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback);
+        mMelAggregator = sp<MelAggregatorMock>::make();
+        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback, mMelAggregator);
         mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
         mSecondaryHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
 
@@ -69,6 +80,7 @@
     }
 
     sp<MelReporterCallback> mMelReporterCallback;
+    sp<MelAggregatorMock> mMelAggregator;
     sp<SoundDoseManager> mSoundDoseManager;
     std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
     std::shared_ptr<HalSoundDoseMock> mSecondaryHalSoundDose;
@@ -110,12 +122,33 @@
     EXPECT_NE(processor1, processor2);
 }
 
-TEST_F(SoundDoseManagerTest, NewMelValuesCacheNewRecord) {
-    std::vector<float>mels{1, 1};
+TEST_F(SoundDoseManagerTest, NewMelValuesAttenuatedAggregateMels) {
+    std::vector<float>mels{1.f, 1.f};
 
-    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1);
+    EXPECT_CALL(*mMelAggregator.get(), aggregateAndAddNewMelRecord)
+            .Times(1)
+            .WillOnce([&] (const audio_utils::MelRecord& record) {
+                EXPECT_THAT(record.mels, ::testing::ElementsAreArray(mels));
+                return std::vector<audio_utils::CsdRecord>();
+            });
 
-    EXPECT_EQ(mSoundDoseManager->getCachedMelRecordsSize(), size_t{1});
+    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1,
+                                      /*attenuated=*/true);
+}
+
+TEST_F(SoundDoseManagerTest, NewMelValuesUnattenuatedAreSplit) {
+    std::vector<float>mels{79.f, 80.f, 79.f, 80.f, 79.f, 79.f, 80.f};
+
+    EXPECT_CALL(*mMelAggregator.get(), aggregateAndAddNewMelRecord)
+            .Times(3)
+            .WillRepeatedly([&] (const audio_utils::MelRecord& record) {
+                EXPECT_EQ(record.mels.size(), size_t {1});
+                EXPECT_EQ(record.mels[0], 80.f);
+                return std::vector<audio_utils::CsdRecord>();
+            });
+
+    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1,
+            /*attenuated=*/false);
 }
 
 TEST_F(SoundDoseManagerTest, InvalidHalInterfaceIsNotSet) {
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..040a914 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
@@ -13,7 +14,7 @@
     host_supported: true,
 
     srcs: [
-        "mediasyncevent_tests.cpp"
+        "mediasyncevent_tests.cpp",
     ],
 
     header_libs: [
@@ -38,7 +39,7 @@
     host_supported: true,
 
     srcs: [
-        "monotonicframecounter_tests.cpp"
+        "monotonicframecounter_tests.cpp",
     ],
 
     static_libs: [
@@ -54,26 +55,26 @@
 }
 
 cc_test {
-     name: "synchronizedrecordstate_tests",
+    name: "synchronizedrecordstate_tests",
 
-     host_supported: true,
+    host_supported: true,
 
-     srcs: [
-         "synchronizedrecordstate_tests.cpp"
-     ],
+    srcs: [
+        "synchronizedrecordstate_tests.cpp",
+    ],
 
-     header_libs: [
-         "libaudioclient_headers",
-     ],
+    header_libs: [
+        "libaudioclient_headers",
+    ],
 
-     static_libs: [
-         "liblog",
-         "libutils", // RefBase
-     ],
+    static_libs: [
+        "liblog",
+        "libutils", // RefBase
+    ],
 
-     cflags: [
-         "-Wall",
-         "-Werror",
-         "-Wextra",
-     ],
- }
\ No newline at end of file
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
new file mode 100644
index 0000000..b3da333
--- /dev/null
+++ b/services/audioparameterparser/Android.bp
@@ -0,0 +1,69 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_audioparameterparser_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_audioparameterparser_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_defaults {
+    name: "android.hardware.audio.parameter_parser.example_defaults",
+    defaults: [
+        "latest_android_hardware_audio_core_ndk_shared",
+    ],
+
+    shared_libs: [
+        "av-audio-types-aidl-V1-ndk",
+        "libbase",
+        "libbinder_ndk",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wthread-safety",
+    ],
+}
+
+cc_binary {
+    name: "android.hardware.audio.parameter_parser.example_service",
+    system_ext_specific: true,
+    relative_install_path: "hw",
+
+    init_rc: ["android.hardware.audio.parameter_parser.example_service.rc"],
+
+    defaults: [
+        "android.hardware.audio.parameter_parser.example_defaults",
+    ],
+
+    srcs: [
+        "ParameterParser.cpp",
+        "main.cpp",
+    ],
+}
diff --git a/services/audioparameterparser/NOTICE b/services/audioparameterparser/NOTICE
new file mode 100644
index 0000000..44158cb
--- /dev/null
+++ b/services/audioparameterparser/NOTICE
@@ -0,0 +1,190 @@
+
+   Copyright (c) 2005-2024, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
diff --git a/services/audioparameterparser/ParameterParser.cpp b/services/audioparameterparser/ParameterParser.cpp
new file mode 100644
index 0000000..8d6a64f
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParameterParser.h"
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+
+namespace vendor::audio::parserservice {
+
+using ::aidl::android::hardware::audio::core::VendorParameter;
+using ParameterScope = ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope;
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameterIds(ParameterScope in_scope,
+                                                              const std::string& in_rawKeys,
+                                                              std::vector<std::string>*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope) << ", keys: " << in_rawKeys;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameters(ParameterScope in_scope,
+                                                            const std::string& in_rawKeysAndValues,
+                                                            std::vector<VendorParameter>*,
+                                                            std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+               << ", keys/values: " << in_rawKeysAndValues;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothA2dpReconfigureOffload(
+        const std::string& in_rawValue, std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothLeReconfigureOffload(
+        const std::string& in_rawValue, std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::processVendorParameters(
+        ParameterScope in_scope, const std::vector<VendorParameter>& in_parameters, std::string*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+               << ", parameters: " << ::android::internal::ToString(in_parameters);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/ParameterParser.h b/services/audioparameterparser/ParameterParser.h
new file mode 100644
index 0000000..1e0e333
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+
+namespace vendor::audio::parserservice {
+
+class ParameterParser : public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+  public:
+    ParameterParser() = default;
+
+  private:
+    ::ndk::ScopedAStatus parseVendorParameterIds(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::string& in_rawKeys, std::vector<std::string>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus parseVendorParameters(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::string& in_rawKeysAndValues,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+                    out_syncParameters,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+                    out_asyncParameters) override;
+
+    ::ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+            const std::string& in_rawValue,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+            override;
+
+    ::ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(
+            const std::string& in_rawValue,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+            override;
+
+    ::ndk::ScopedAStatus processVendorParameters(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
+                    in_parameters,
+            std::string* _aidl_return) override;
+};
+
+}  // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
new file mode 100644
index 0000000..b6aca5c
--- /dev/null
+++ b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
@@ -0,0 +1,6 @@
+service audio_parameter_parser_service /system_ext/bin/hw/android.hardware.audio.parameter_parser.example_service
+    class core
+    user audioserver
+    group media
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/services/audioparameterparser/main.cpp b/services/audioparameterparser/main.cpp
new file mode 100644
index 0000000..d22eb55
--- /dev/null
+++ b/services/audioparameterparser/main.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+#include "ParameterParser.h"
+
+using vendor::audio::parserservice::ParameterParser;
+
+int main() {
+    // This is a debug implementation, always enable debug logging.
+    android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+
+    auto parser = ndk::SharedRefBase::make<ParameterParser>();
+    const std::string parserFqn =
+            std::string().append(ParameterParser::descriptor).append("/default");
+    binder_status_t status =
+            AServiceManager_addService(parser->asBinder().get(), parserFqn.c_str());
+    if (status != STATUS_OK) {
+        LOG(ERROR) << "failed to register service for \"" << parserFqn << "\"";
+    }
+
+    ABinderProcess_joinThreadPool();
+    return EXIT_FAILURE;  // should not reach
+}
diff --git a/services/audiopolicy/Android.bp b/services/audiopolicy/Android.bp
index e018dd3..66ba7e2 100644
--- a/services/audiopolicy/Android.bp
+++ b/services/audiopolicy/Android.bp
@@ -1,10 +1,11 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_library_headers {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index b164159..bfc3132 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -269,6 +269,7 @@
 
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+    virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) = 0;
 
     virtual status_t updatePolicyMix(
         const AudioMix& mix,
@@ -285,7 +286,8 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid) = 0;
+                                      uid_t uid,
+                                      bool internal = false) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
diff --git a/services/audiopolicy/common/Android.bp b/services/audiopolicy/common/Android.bp
index 91701ad..a699b8b 100644
--- a/services/audiopolicy/common/Android.bp
+++ b/services/audiopolicy/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 8b76842..e8b04ce 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -35,6 +36,7 @@
         "src/TypeConverter.cpp",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 13b70e5..7c70877 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -364,7 +364,7 @@
 
     void dump(String8 *dst, int spaces, const char* extraInfo = nullptr) const override;
     virtual DeviceVector devices() const;
-    void setDevices(const DeviceVector &devices) { mDevices = devices; }
+    void setDevices(const DeviceVector &devices);
     bool sharesHwModuleWith(const sp<SwAudioOutputDescriptor>& outputDesc);
     virtual DeviceVector supportedDevices() const;
     virtual bool devicesSupportEncodedFormats(const DeviceTypeSet& deviceTypes);
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 7119b85..fe90a1e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -222,7 +222,8 @@
                            const struct audio_port_config &config,
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
-                           VolumeSource volumeSource);
+                           VolumeSource volumeSource,
+                           bool isInternal);
 
     ~SourceClientDescriptor() override = default;
 
@@ -248,6 +249,7 @@
     void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
+    bool isInternal() const override { return mIsInternal; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -268,34 +270,17 @@
      * behavior of AudioDeviceCallback.
      */
     bool mCloseOutput = false;
-};
-
-/**
- * @brief The InternalSourceClientDescriptor class
- * Specialized Client Descriptor for either a raw patch created from @see createAudioPatch API
- * or for internal audio patches managed by APM (e.g. phone call patches).
- * Whatever the bridge created (software or hardware), we need a client to track the activity
- * and manage volumes.
- * The Audio Patch requested sink is expressed as a preferred device which allows to route
- * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
- * requester to prevent rerouting SwOutput involved in raw patches.
- */
-class InternalSourceClientDescriptor: public SourceClientDescriptor
-{
-public:
-    InternalSourceClientDescriptor(
-            audio_port_handle_t portId, uid_t uid, audio_attributes_t attributes,
-            const struct audio_port_config &config, const sp<DeviceDescriptor>& srcDevice,
-             const sp<DeviceDescriptor>& sinkDevice,
-            product_strategy_t strategy, VolumeSource volumeSource) :
-        SourceClientDescriptor(
-            portId, uid, attributes, config, srcDevice, AUDIO_STREAM_PATCH, strategy,
-            volumeSource)
-    {
-        setPreferredDeviceId(sinkDevice->getId());
-    }
-    bool isInternal() const override { return true; }
-    ~InternalSourceClientDescriptor() override = default;
+    /**
+     * True for specialized Client Descriptor for either a raw patch created from
+     * @see createAudioPatch API or for internal audio patches managed by APM
+     * (e.g. phone call patches).
+     * Whatever the bridge created (software or hardware), we need a client to track the activity
+     * and manage volumes.
+     * The Audio Patch requested sink is expressed as a preferred device which allows to route
+     * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
+     * requester to prevent rerouting SwOutput involved in raw patches.
+     */
+    bool mIsInternal = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 6c130fd..c502fc2 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -93,6 +93,8 @@
 
     void setEncapsulationInfoFromHal(AudioPolicyClientInterface *clientInterface);
 
+    void setPreferredConfig(const audio_config_base_t * preferredConfig);
+
     void dump(String8 *dst, int spaces, bool verbose = true) const;
 
 private:
@@ -107,6 +109,7 @@
     audio_format_t      mCurrentEncodedFormat;
     bool                mIsDynamic = false;
     std::string         mDeclaredAddress; // Original device address
+    std::optional<audio_config_base_t> mPreferredConfig;
 };
 
 class DeviceVector : public SortedVector<sp<DeviceDescriptor> >
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index cf20260..d206637 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -89,10 +89,12 @@
     status_t addProfile(const sp<IOProfile> &profile);
 
     status_t addOutputProfile(const std::string& name, const audio_config_t *config,
-            audio_devices_t device, const String8& address);
+            audio_devices_t device, const String8& address,
+            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE);
     status_t removeOutputProfile(const std::string& name);
     status_t addInputProfile(const std::string& name, const audio_config_t *config,
-            audio_devices_t device, const String8& address);
+            audio_devices_t device, const String8& address,
+            audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
     status_t removeInputProfile(const std::string& name);
 
     audio_module_handle_t getHandle() const { return mHandle; }
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index f3a9518..688772c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,10 +70,17 @@
         return mMixerBehaviors;
     }
 
+    enum CompatibilityScore{
+        NO_MATCH = 0,
+        PARTIAL_MATCH = 1,
+        EXACT_MATCH = 2
+    };
+
     /**
-     * @brief isCompatibleProfile: This method is used for input and direct output,
+     * @brief compatibilityScore: This method is used for input and direct output,
      * and is not used for other output.
-     * Checks if the IO profile is compatible with specified parameters.
+     * Return the compatibility score to measure how much the IO profile is compatible
+     * with specified parameters.
      * For input, flags is interpreted as audio_input_flags_t.
      * TODO: merge audio_output_flags_t and audio_input_flags_t.
      *
@@ -86,18 +93,18 @@
      * @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
      * @param flags to be checked for compatibility
      * @param exactMatchRequiredForInputFlags true if exact match is required on flags
-     * @return true if the profile is compatible, false otherwise.
+     * @return how the IO profile is compatible with the given parameters.
      */
-    bool isCompatibleProfile(const DeviceVector &devices,
-                             uint32_t samplingRate,
-                             uint32_t *updatedSamplingRate,
-                             audio_format_t format,
-                             audio_format_t *updatedFormat,
-                             audio_channel_mask_t channelMask,
-                             audio_channel_mask_t *updatedChannelMask,
-                             // FIXME parameter type
-                             uint32_t flags,
-                             bool exactMatchRequiredForInputFlags = false) const;
+    CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
+                                             uint32_t samplingRate,
+                                             uint32_t *updatedSamplingRate,
+                                             audio_format_t format,
+                                             audio_format_t *updatedFormat,
+                                             audio_channel_mask_t channelMask,
+                                             audio_channel_mask_t *updatedChannelMask,
+                                             // FIXME parameter type
+                                             uint32_t flags,
+                                             bool exactMatchRequiredForInputFlags = false) const;
 
     /**
      * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d027564..6537a00 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -778,6 +778,19 @@
     }
 }
 
+void SwAudioOutputDescriptor::setDevices(const android::DeviceVector &devices) {
+    if ((mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        for (auto device : mDevices) {
+            device->setPreferredConfig(nullptr);
+        }
+        auto config = getConfig();
+        for (auto device : devices) {
+            device->setPreferredConfig(&config);
+        }
+    }
+    mDevices = devices;
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index a467a8e..40c3911 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "APM_AudioPolicyMix"
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 
 #include <algorithm>
 #include <iterator>
@@ -28,6 +28,9 @@
 #include "PolicyAudioPort.h"
 #include "IOProfile.h"
 #include <AudioOutputDescriptor.h>
+#include <android_media_audiopolicy.h>
+
+namespace audio_flags = android::media::audiopolicy;
 
 namespace android {
 namespace {
@@ -190,6 +193,12 @@
                     mix.mDeviceType, mix.mDeviceAddress.c_str());
             return BAD_VALUE;
         }
+        if (audio_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGE("registerMix(): same mix already registered - skipping");
+                return BAD_VALUE;
+            }
+        }
     }
     if (!areMixCriteriaConsistent(mix.mCriteria)) {
         ALOGE("registerMix(): Mix contains inconsistent criteria "
@@ -212,12 +221,21 @@
 {
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
-        if (mix.mDeviceType == registeredMix->mDeviceType
+        if (audio_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
+        } else {
+            if (mix.mDeviceType == registeredMix->mDeviceType
                 && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
-            ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
-                    mix.mDeviceType, mix.mDeviceAddress.c_str());
-            removeAt(i);
-            return NO_ERROR;
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
         }
     }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 8b6866e..2aee501 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -96,12 +96,12 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource) :
+         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice)
+    mSrcDevice(srcDevice), mIsInternal(isInternal)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index fe25693..9f7b8fc 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -132,6 +132,20 @@
 {
     DeviceDescriptorBase::toAudioPortConfig(dstConfig, srcConfig);
     dstConfig->ext.device.hw_module = getModuleHandle();
+    if (mPreferredConfig.has_value()) {
+        if (mPreferredConfig->format != AUDIO_FORMAT_DEFAULT) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_FORMAT;
+            dstConfig->format = mPreferredConfig->format;
+        }
+        if (mPreferredConfig->sample_rate != 0) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
+            dstConfig->sample_rate = mPreferredConfig->sample_rate;
+        }
+        if (mPreferredConfig->channel_mask != AUDIO_CHANNEL_NONE) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
+            dstConfig->channel_mask = mPreferredConfig->channel_mask;
+        }
+    }
 }
 
 void DeviceDescriptor::toAudioPort(struct audio_port *port) const
@@ -183,6 +197,14 @@
     }
 }
 
+void DeviceDescriptor::setPreferredConfig(const audio_config_base_t* preferredConfig) {
+    if (preferredConfig == nullptr) {
+        mPreferredConfig.reset();
+    } else {
+        mPreferredConfig = *preferredConfig;
+    }
+}
+
 void DeviceDescriptor::dump(String8 *dst, int spaces, bool verbose) const
 {
     String8 extraInfo;
@@ -193,6 +215,13 @@
     std::string descBaseDumpStr;
     DeviceDescriptorBase::dump(&descBaseDumpStr, spaces, extraInfo.c_str(), verbose);
     dst->append(descBaseDumpStr.c_str());
+
+    if (mPreferredConfig.has_value()) {
+        dst->append(base::StringPrintf(
+                "%*sPreferred Config: format=%#x, channelMask=%#x, sampleRate=%u\n",
+                spaces, "", mPreferredConfig.value().format, mPreferredConfig.value().channel_mask,
+                mPreferredConfig.value().sample_rate).c_str());
+    }
 }
 
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index cb45f54..6696b45 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -59,12 +59,13 @@
 }
 
 status_t HwModule::addOutputProfile(const std::string& name, const audio_config_t *config,
-                                    audio_devices_t device, const String8& address)
+                                    audio_devices_t device, const String8& address,
+                                    audio_output_flags_t flags)
 {
     sp<IOProfile> profile = new OutputProfile(name);
-
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
+    profile->setFlags(flags);
 
     sp<DeviceDescriptor> devDesc =
             new DeviceDescriptor(device, getTagForDevice(device), address.c_str());
@@ -128,11 +129,13 @@
 }
 
 status_t HwModule::addInputProfile(const std::string& name, const audio_config_t *config,
-                                   audio_devices_t device, const String8& address)
+                                   audio_devices_t device, const String8& address,
+                                   audio_input_flags_t flags)
 {
     sp<IOProfile> profile = new InputProfile(name);
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
+    profile->setFlags(flags);
 
     sp<DeviceDescriptor> devDesc =
             new DeviceDescriptor(device, getTagForDevice(device), address.c_str());
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index dd222de..d9fbd89 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -33,17 +33,17 @@
     }
 }
 
-bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
-                                    uint32_t samplingRate,
-                                    uint32_t *updatedSamplingRate,
-                                    audio_format_t format,
-                                    audio_format_t *updatedFormat,
-                                    audio_channel_mask_t channelMask,
-                                    audio_channel_mask_t *updatedChannelMask,
-                                    // FIXME type punning here
-                                    uint32_t flags,
-                                    bool exactMatchRequiredForInputFlags) const
-{
+IOProfile::CompatibilityScore IOProfile::getCompatibilityScore(
+        const android::DeviceVector &devices,
+        uint32_t samplingRate,
+        uint32_t *updatedSamplingRate,
+        audio_format_t format,
+        audio_format_t *updatedFormat,
+        audio_channel_mask_t channelMask,
+        audio_channel_mask_t *updatedChannelMask,
+        // FIXME type punning here
+        uint32_t flags,
+        bool exactMatchRequiredForInputFlags) const {
     const bool isPlaybackThread =
             getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
     const bool isRecordThread =
@@ -51,13 +51,13 @@
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
     if (!areAllDevicesSupported(devices) ||
             !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
-        return false;
+        return NO_MATCH;
     }
 
     if (!audio_is_valid_format(format) ||
             (isPlaybackThread && (samplingRate == 0 || !audio_is_output_channel(channelMask))) ||
             (isRecordThread && (!audio_is_input_channel(channelMask)))) {
-         return false;
+         return NO_MATCH;
     }
 
     audio_format_t myUpdatedFormat = format;
@@ -69,32 +69,40 @@
         .channel_mask = channelMask,
         .format = format,
     };
+    auto result = NO_MATCH;
     if (isRecordThread)
     {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             if (checkExactAudioProfile(&config) != NO_ERROR) {
-                return false;
+                return result;
             }
-        } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
-                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
-            return false;
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else if (checkCompatibleAudioProfile(
+                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
+            result = PARTIAL_MATCH;
+        } else {
+            return result;
         }
     } else {
-        if (checkExactAudioProfile(&config) != NO_ERROR) {
-            return false;
+        if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else {
+            return result;
         }
     }
 
-    if (updatedSamplingRate != NULL) {
+    if (updatedSamplingRate != nullptr) {
         *updatedSamplingRate = myUpdatedSamplingRate;
     }
-    if (updatedFormat != NULL) {
+    if (updatedFormat != nullptr) {
         *updatedFormat = myUpdatedFormat;
     }
-    if (updatedChannelMask != NULL) {
+    if (updatedChannelMask != nullptr) {
         *updatedChannelMask = myUpdatedChannelMask;
     }
-    return true;
+    return result;
 }
 
 bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
@@ -156,9 +164,9 @@
         for (const auto sampleRate : profile->getSampleRates()) {
             for (const auto channelMask : profile->getChannels()) {
                 const audio_config_base_t config = {
-                        .format = profile->getFormat(),
                         .sample_rate = sampleRate,
-                        .channel_mask = channelMask
+                        .channel_mask = channelMask,
+                        .format = profile->getFormat(),
                 };
                 for (const auto mixerBehavior : mMixerBehaviors) {
                     mixerAttributes->push_back({
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..7d529df 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -18,6 +18,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -31,41 +32,49 @@
     vendor: true,
     src: ":a2dp_in_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "a2dp_audio_policy_configuration.xml",
     vendor: true,
     src: ":a2dp_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_configuration.xml",
     vendor: true,
     src: ":audio_policy_configuration_generic",
 }
+
 prebuilt_etc {
     name: "r_submix_audio_policy_configuration.xml",
     vendor: true,
     src: ":r_submix_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_volumes.xml",
     vendor: true,
     src: ":audio_policy_volumes",
 }
+
 prebuilt_etc {
     name: "default_volume_tables.xml",
     vendor: true,
     src: ":default_volume_tables",
 }
+
 prebuilt_etc {
     name: "surround_sound_configuration_5_0.xml",
     vendor: true,
     src: ":surround_sound_configuration_5_0",
 }
+
 prebuilt_etc {
     name: "usb_audio_policy_configuration.xml",
     vendor: true,
     src: ":usb_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "primary_audio_policy_configuration.xml",
     src: ":primary_audio_policy_configuration",
@@ -76,50 +85,62 @@
     name: "a2dp_in_audio_policy_configuration",
     srcs: ["a2dp_in_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "a2dp_audio_policy_configuration",
     srcs: ["a2dp_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "primary_audio_policy_configuration",
     srcs: ["primary_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "surround_sound_configuration_5_0",
     srcs: ["surround_sound_configuration_5_0.xml"],
 }
+
 filegroup {
     name: "default_volume_tables",
     srcs: ["default_volume_tables.xml"],
 }
+
 filegroup {
     name: "audio_policy_volumes",
     srcs: ["audio_policy_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic",
     srcs: ["audio_policy_configuration_generic.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic_configurable",
     srcs: ["audio_policy_configuration_generic_configurable.xml"],
 }
+
 filegroup {
     name: "usb_audio_policy_configuration",
     srcs: ["usb_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "r_submix_audio_policy_configuration",
     srcs: ["r_submix_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "bluetooth_audio_policy_configuration_7_0",
     srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "bluetooth_with_le_audio_policy_configuration_7_0",
     srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "hearing_aid_audio_policy_configuration_7_0",
     srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 0034a04..a93c816 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 12597de..0864e6a 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 5d1aa16..8c7b7db 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
index 9cee978..99d62a3 100644
--- a/services/audiopolicy/engine/config/tests/resources/Android.bp
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/interface/Android.bp b/services/audiopolicy/engine/interface/Android.bp
index 5dd5adb..b1f7666 100644
--- a/services/audiopolicy/engine/interface/Android.bp
+++ b/services/audiopolicy/engine/interface/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index eb2e2f4..d59ab5a 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -40,7 +41,7 @@
         "libaudiopolicyengineconfigurable_pfwwrapper",
 
     ],
-  shared_libs: [
+    shared_libs: [
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
         "libaudiopolicycomponents",
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..8dd13e8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -30,10 +31,12 @@
     vendor: true,
     src: ":audio_policy_engine_criteria",
 }
+
 filegroup {
     name: "audio_policy_engine_criterion_types_template",
     srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
 }
+
 filegroup {
     name: "audio_policy_engine_criteria",
     srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index e46b60f..fb1a71c 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,16 +43,19 @@
         ":audio_policy_engine_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -69,6 +73,7 @@
         ":audio_policy_configuration_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
@@ -79,18 +84,22 @@
         ":primary_audio_policy_configuration",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration",
     srcs: ["audio_policy_engine_configuration.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_volumes",
     srcs: ["audio_policy_engine_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index ad6eeb1..b9abb54 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -43,11 +44,13 @@
         ":audio_policy_engine_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -65,6 +68,7 @@
         ":audio_policy_configuration_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
@@ -75,10 +79,12 @@
         ":primary_audio_policy_configuration",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 773a99a..67a6128 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,21 +43,25 @@
         ":audio_policy_engine_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_stream_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_stream_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_default_stream_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_default_stream_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -74,6 +79,7 @@
         ":audio_policy_configuration_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
@@ -84,22 +90,27 @@
         ":primary_audio_policy_configuration",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration",
     srcs: ["audio_policy_engine_configuration.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_stream_volumes",
     srcs: ["audio_policy_engine_stream_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_default_stream_volumes",
     srcs: ["audio_policy_engine_default_stream_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..7fe111f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -31,18 +32,21 @@
     src: ":PolicyClass",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
     src: ":PolicySubsystem",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem-CommonTypes.xml",
     vendor: true,
     src: ":buildcommontypesstructure_gen",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 genrule {
     name: "buildcommontypesstructure_gen",
     defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +56,42 @@
     name: "product_strategies_structure_template",
     srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
 }
+
 filegroup {
     name: "PolicySubsystem",
     srcs: ["examples/common/Structure/PolicySubsystem.xml"],
 }
+
 filegroup {
     name: "PolicySubsystem-no-strategy",
     srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
 }
+
 filegroup {
     name: "common_types_structure_template",
     srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
 }
+
 filegroup {
     name: "PolicyClass",
     srcs: ["examples/common/Structure/PolicyClass.xml"],
 }
+
 filegroup {
     name: "volumes.pfw",
     srcs: ["examples/Settings/volumes.pfw"],
 }
+
 filegroup {
     name: "device_for_input_source.pfw",
     srcs: ["examples/Settings/device_for_input_source.pfw"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.user.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 7d2d293..38451f2 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -67,6 +69,7 @@
         "PolicySubsystem-CommonTypes.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
@@ -78,6 +81,7 @@
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -86,11 +90,13 @@
         "Settings/device_for_product_strategies.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index f825e5f..eae6ae2 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -28,6 +28,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -43,6 +44,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -68,6 +70,7 @@
         "PolicySubsystem-CommonTypes.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
@@ -79,6 +82,7 @@
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -87,11 +91,13 @@
         "Settings/device_for_product_strategies.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 4a83cbc..4e8654b 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -67,6 +69,7 @@
         "PolicySubsystem-CommonTypes.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
@@ -78,6 +81,7 @@
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -95,11 +99,13 @@
         "Settings/device_for_product_strategy_patch.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 89ab892..e279a8f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -57,10 +58,12 @@
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
@@ -69,6 +72,7 @@
         ":buildcommontypesstructure_gen",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -76,6 +80,7 @@
         ":volumes.pfw",
     ],
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 4880547..47b8b54 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -45,6 +46,7 @@
         "PolicySubsystem-CommonTypes.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
@@ -56,10 +58,12 @@
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
@@ -68,6 +72,7 @@
         ":buildcommontypesstructure_gen",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -76,6 +81,7 @@
         ":device_for_input_source.pfw",
     ],
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index f7159c5..aa2163e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -40,6 +41,6 @@
         "liblog",
         "libutils",
         "libmedia_helper",
-        "libparameter"
+        "libparameter",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3aec064..2f77372 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -36,13 +37,13 @@
     name: "buildpolicycriteriontypesrule",
     tools: ["buildPolicyCriterionTypes"],
     cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
-         "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
-         "$(location buildPolicyCriterionTypes) " +
-         " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
-         " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
-         "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
-         "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
-         "--outputfile $(out)",
+        "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+        "$(location buildPolicyCriterionTypes) " +
+        " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+        "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+        "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
+        "--outputfile $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
@@ -81,17 +82,17 @@
         "domainGeneratorConnector",
     ],
     cmd: "mkdir -p $(genDir)/Structure/Policy && " +
-         "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
-         "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
-         "$(location domainGeneratorPolicy) " +
-         "--validate " +
-         "--domain-generator-tool $(location domainGeneratorConnector) " +
-         "--toplevel-config $(genDir)/top_level " +
-         "--criteria $(location :audio_policy_engine_criteria) " +
-         "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
-         "--add-edds $(locations :edd_files) " +
-         "--schemas-dir external/parameter-framework/upstream/schemas " +
-         " > $(out)",
+        "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
+        "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+        "$(location domainGeneratorPolicy) " +
+        "--validate " +
+        "--domain-generator-tool $(location domainGeneratorConnector) " +
+        "--toplevel-config $(genDir)/top_level " +
+        "--criteria $(location :audio_policy_engine_criteria) " +
+        "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
+        "--add-edds $(locations :edd_files) " +
+        "--schemas-dir external/parameter-framework/upstream/schemas " +
+        " > $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // ":audio_policy_pfw_toplevel",
@@ -118,11 +119,11 @@
 genrule_defaults {
     name: "buildstrategiesstructurerule",
     tools: ["buildStrategiesStructureFile"],
-    cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&"+
-         "$(location buildStrategiesStructureFile) " +
-         "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml "+
-         "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
-         "--outputfile $(out)",
+    cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&" +
+        "$(location buildStrategiesStructureFile) " +
+        "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml " +
+        "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
+        "--outputfile $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // ":audio_policy_engine_configuration_files",
@@ -146,9 +147,9 @@
     name: "buildcommontypesstructurerule",
     tools: ["buildCommonTypesStructureFile"],
     cmd: "$(location buildCommonTypesStructureFile) " +
-         "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
-         "--commontypesstructure $(location :common_types_structure_template) " +
-         "--outputfile $(out)",
+        "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        "--commontypesstructure $(location :common_types_structure_template) " +
+        "--outputfile $(out)",
     srcs: [
         ":common_types_structure_template",
         ":libaudio_system_audio_base",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 0ef0b82..a897880 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7d4ccab..98adff0 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 59a704b..f305c39 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -20,6 +20,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -38,16 +39,19 @@
         ":audio_policy_engine_product_strategies.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "phone/audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_stream_volumes.xml",
     vendor: true,
     src: "phone/audio_policy_engine_stream_volumes.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_default_stream_volumes.xml",
     vendor: true,
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index fd240e3..fca02e4 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 38a2cde..8b37d36 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -16,6 +16,10 @@
  *
  ******************************************************************************/
 
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+}
+
 cc_defaults {
     name: "audiopolicy_aidl_fuzzer_defaults",
     shared_libs: [
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
index 22ee256..2a2b83b 100644
--- a/services/audiopolicy/fuzzer/resources/Android.bp
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index a1785da..2f46d48 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -22,6 +23,7 @@
     export_include_dirs: ["."],
 
     shared_libs: [
+        "com.android.media.audio-aconfig-cc",
         "libaudiofoundation",
         "libaudiopolicycomponents",
         "libcutils",
@@ -42,6 +44,9 @@
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "audioclient-types-aidl-cpp",
+        // Flag support
+        "android.media.audiopolicy-aconfig-cc",
+        "com.android.media.audioserver-aconfig-cc",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 135548f..3bebb11 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -41,6 +41,9 @@
 
 #include <Serializer.h>
 #include <android/media/audio/common/AudioPort.h>
+#include <com_android_media_audio.h>
+#include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioParameter.h>
@@ -56,6 +59,9 @@
 
 namespace android {
 
+
+namespace audio_flags = android::media::audiopolicy;
+
 using android::media::audio::common::AudioDevice;
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
@@ -123,7 +129,8 @@
     device->toAudioPort(&devicePort);
     if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
             status != OK) {
-        ALOGE("Error %d while setting connected state for device %s", state,
+        ALOGE("Error %d while setting connected state for device %s",
+                static_cast<int>(state),
                 device->getDeviceTypeAddr().toString(false).c_str());
     }
 }
@@ -780,7 +787,11 @@
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-    mCallRxSourceClient = startAudioSourceInternal(&source, &aa, 0/*uid*/);
+
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
+    mCallRxSourceClient = mAudioSources.valueFor(portId);
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -813,9 +824,11 @@
 
     struct audio_port_config source = {};
     srcDevice->toAudioPortConfig(&source);
-    mCallTxSourceClient = new InternalSourceClientDescriptor(
-                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, sinkDevice,
-                mCommunnicationStrategy, toVolumeSource(aa));
+    mCallTxSourceClient = new SourceClientDescriptor(
+                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
+                mCommunnicationStrategy, toVolumeSource(aa), true);
+    mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
+
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
     status_t status = connectAudioSourceToSink(
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
@@ -1042,11 +1055,11 @@
     sp<IOProfile> profile;
     for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-             if (!curProfile->isCompatibleProfile(devices,
+             if (curProfile->getCompatibilityScore(devices,
                      samplingRate, NULL /*updatedSamplingRate*/,
                      format, NULL /*updatedFormat*/,
                      channelMask, NULL /*updatedChannelMask*/,
-                     flags)) {
+                     flags) == IOProfile::NO_MATCH) {
                  continue;
              }
              // reject profiles not corresponding to a device currently available
@@ -1508,11 +1521,30 @@
     }
 
     if (!profile->canOpenNewIo()) {
+        if (!com::android::media::audioserver::direct_track_reprioritization()) {
+            return NAME_NOT_FOUND;
+        } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
+            // MMAP gracefully handles lack of an exclusive track resource by mixing
+            // above the audio framework. For AAudio to know that the limit is reached,
+            // return an error.
+            return NAME_NOT_FOUND;
+        } else {
+            // Close outputs on this profile, if available, to free resources for this request
+            for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
+                const auto desc = mOutputs.valueAt(i);
+                if (desc->mProfile == profile) {
+                    closeOutput(desc->mIoHandle);
+                }
+            }
+        }
+    }
+
+    // Unable to close streams to find free resources for this request
+    if (!profile->canOpenNewIo()) {
         return NAME_NOT_FOUND;
     }
 
-    sp<SwAudioOutputDescriptor> outputDesc =
-            new SwAudioOutputDescriptor(profile, mpClientInterface);
+    auto outputDesc = sp<SwAudioOutputDescriptor>::make(profile, mpClientInterface);
 
     // An MSD patch may be using the only output stream that can service this request. Release
     // all MSD patches to prioritize this request over any active output on MSD.
@@ -1605,9 +1637,13 @@
         *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
     }
 
+    // Use the spatializer output if the content can be spatialized, no preferred mixer
+    // was specified and offload or direct playback is not explicitly requested.
     *isSpatialized = false;
     if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())) {
+            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
+            && prefMixerConfigInfo == nullptr
+            && ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0)) {
         *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
@@ -3260,7 +3296,8 @@
         ALOGD("%s: no group matching with %s", __FUNCTION__, toString(attributes).c_str());
         return BAD_VALUE;
     }
-    ALOGV("%s: group %d matching with %s", __FUNCTION__, group, toString(attributes).c_str());
+    ALOGV("%s: group %d matching with %s index %d",
+            __FUNCTION__, group, toString(attributes).c_str(), index);
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
@@ -3299,6 +3336,7 @@
     // requested device or one of the devices selected by the engine for this stream
     // - For default requested device (AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME), apply volume only if
     // no specific device volume value exists for currently selected device.
+    // - Only apply the volume if the requested device is the desired device for volume control.
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceTypeSet curDevices = desc->devices().types();
@@ -3318,7 +3356,8 @@
         if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
             curSrcDevices.insert(device);
             applyVolume = (curSrcDevices.find(
-                    Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end());
+                    Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end())
+                    && Volume::getDeviceForVolume(curSrcDevices) == device;
         } else {
             applyVolume = !curves.hasVolumeIndexForDevice(curSrcDevice);
         }
@@ -3377,6 +3416,21 @@
             status = volStatus;
         }
     }
+
+    // update voice volume if the an active call route exists
+    if (mCallRxSourceClient != nullptr && mCallRxSourceClient->isConnected()
+            && (curSrcDevices.find(
+                Volume::getDeviceForVolume({mCallRxSourceClient->sinkDevice()->type()}))
+                != curSrcDevices.end())) {
+        bool isVoiceVolSrc;
+        bool isBtScoVolSrc;
+        if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
+                isVoiceVolSrc, isBtScoVolSrc, __func__)
+                && (isVoiceVolSrc || isBtScoVolSrc)) {
+            setVoiceVolume(index, curves, isVoiceVolSrc, 0);
+        }
+    }
+
     mpClientInterface->onAudioVolumeGroupChanged(group, 0 /*flags*/);
     return status;
 }
@@ -3624,6 +3678,7 @@
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
+    Vector<AudioMix> registeredMixes;
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
         AudioMix mix = mixes[i];
@@ -3673,9 +3728,13 @@
             outputConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
             inputConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
             rSubmixModule->addOutputProfile(address.c_str(), &outputConfig,
-                    AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address);
+                    AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address,
+                    audio_is_linear_pcm(outputConfig.format)
+                        ? AUDIO_OUTPUT_FLAG_NONE : AUDIO_OUTPUT_FLAG_DIRECT);
             rSubmixModule->addInputProfile(address.c_str(), &inputConfig,
-                    AUDIO_DEVICE_IN_REMOTE_SUBMIX, address);
+                    AUDIO_DEVICE_IN_REMOTE_SUBMIX, address,
+                    audio_is_linear_pcm(inputConfig.format)
+                        ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_DIRECT);
 
             if ((res = setDeviceConnectionStateInt(deviceTypeToMakeAvailable,
                     AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
@@ -3743,11 +3802,19 @@
                 break;
             } else {
                 checkOutputs = true;
+                registeredMixes.add(mix);
             }
         }
     }
     if (res != NO_ERROR) {
-        unregisterPolicyMixes(mixes);
+        if (audio_flags::audio_mix_ownership()) {
+            // Only unregister mixes that were actually registered to not accidentally unregister
+            // mixes that already existed previously.
+            unregisterPolicyMixes(registeredMixes);
+            registeredMixes.clear();
+        } else {
+            unregisterPolicyMixes(mixes);
+        }
     } else if (checkOutputs) {
         checkForDeviceAndOutputChanges();
         updateCallAndOutputRouting();
@@ -3758,6 +3825,7 @@
 status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes)
 {
     ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
+    status_t endResult = NO_ERROR;
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
@@ -3770,6 +3838,7 @@
                         AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
                 if (rSubmixModule == 0) {
                     res = INVALID_OPERATION;
+                    endResult = INVALID_OPERATION;
                     continue;
                 }
             }
@@ -3778,6 +3847,7 @@
 
             if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
                 res = INVALID_OPERATION;
+                endResult = INVALID_OPERATION;
                 continue;
             }
 
@@ -3790,6 +3860,7 @@
                     if (res != OK) {
                         ALOGE("Error making RemoteSubmix device unavailable for mix "
                               "with type %d, address %s", device, address.c_str());
+                        endResult = INVALID_OPERATION;
                     }
                 }
             }
@@ -3799,19 +3870,47 @@
         } else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
             if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
                 res = INVALID_OPERATION;
+                endResult = INVALID_OPERATION;
                 continue;
             } else {
                 checkOutputs = true;
             }
         }
     }
-    if (res == NO_ERROR && checkOutputs) {
-        checkForDeviceAndOutputChanges();
-        updateCallAndOutputRouting();
+    if (audio_flags::audio_mix_ownership()) {
+        res = endResult;
+        if (res == NO_ERROR && checkOutputs) {
+            checkForDeviceAndOutputChanges();
+            updateCallAndOutputRouting();
+        }
+    } else {
+        if (res == NO_ERROR && checkOutputs) {
+            checkForDeviceAndOutputChanges();
+            updateCallAndOutputRouting();
+        }
     }
     return res;
 }
 
+status_t AudioPolicyManager::getRegisteredPolicyMixes(std::vector<AudioMix>& _aidl_return) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    _aidl_return.clear();
+    _aidl_return.reserve(mPolicyMixes.size());
+    for (const auto &policyMix: mPolicyMixes) {
+        _aidl_return.emplace_back(policyMix->mCriteria, policyMix->mMixType,
+                             policyMix->mFormat, policyMix->mRouteFlags, policyMix->mDeviceAddress,
+                             policyMix->mCbFlags);
+        _aidl_return.back().mDeviceType = policyMix->mDeviceType;
+        _aidl_return.back().mToken = policyMix->mToken;
+    }
+
+    ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return->size());
+    return OK;
+}
+
 status_t AudioPolicyManager::updatePolicyMix(
             const AudioMix& mix,
             const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
@@ -4433,19 +4532,19 @@
             outputDevices = getMsdAudioOutDevices();
         }
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-            if (!curProfile->isCompatibleProfile(outputDevices,
+            if (curProfile->getCompatibilityScore(outputDevices,
                     config->sample_rate, nullptr /*updatedSamplingRate*/,
                     config->format, nullptr /*updatedFormat*/,
                     config->channel_mask, nullptr /*updatedChannelMask*/,
-                    flags)) {
+                    flags) == IOProfile::NO_MATCH) {
                 continue;
             }
             // reject profiles not corresponding to a device currently available
             if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
                 continue;
             }
-            if ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
-                        != AUDIO_OUTPUT_FLAG_NONE) {
+            if (offloadPossible && ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
+                        != AUDIO_OUTPUT_FLAG_NONE)) {
                 if ((directMode & AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED)
                         != AUDIO_DIRECT_NOT_SUPPORTED) {
                     // Already reports offload gapless supported. No need to report offload support.
@@ -4543,15 +4642,17 @@
     for (const auto& hwModule : mHwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (curProfile->hasDynamicAudioProfile()
-                    && curProfile->isCompatibleProfile(devices,
-                                                       mixerAttributes->config.sample_rate,
-                                                       nullptr /*updatedSamplingRate*/,
-                                                       mixerAttributes->config.format,
-                                                       nullptr /*updatedFormat*/,
-                                                       mixerAttributes->config.channel_mask,
-                                                       nullptr /*updatedChannelMask*/,
-                                                       flags,
-                                                       false /*exactMatchRequiredForInputFlags*/)) {
+                    && curProfile->getCompatibilityScore(
+                            devices,
+                            mixerAttributes->config.sample_rate,
+                            nullptr /*updatedSamplingRate*/,
+                            mixerAttributes->config.format,
+                            nullptr /*updatedFormat*/,
+                            mixerAttributes->config.channel_mask,
+                            nullptr /*updatedChannelMask*/,
+                            flags,
+                            false /*exactMatchRequiredForInputFlags*/)
+                            != IOProfile::NO_MATCH) {
                 profile = curProfile;
                 break;
             }
@@ -4832,9 +4933,11 @@
     audio_attributes_t attributes = attributes_initializer(AUDIO_USAGE_MEDIA);
     const struct audio_port_config *source = &patch->sources[0];
     sp<SourceClientDescriptor> sourceDesc =
-            new InternalSourceClientDescriptor(
-                portId, uid, attributes, *source, srcDevice, sinkDevice,
-                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes));
+            new SourceClientDescriptor(
+                portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
+                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
+                true);
+    sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
             connectAudioSourceToSink(sourceDesc, sinkDevice, patch, *handle, uid, 0 /* delayMs */);
@@ -4954,14 +5057,15 @@
                 return BAD_VALUE;
             }
 
-            if (!outputDesc->mProfile->isCompatibleProfile(DeviceVector(devDesc),
-                                                           patch->sources[0].sample_rate,
-                                                           NULL,  // updatedSamplingRate
-                                                           patch->sources[0].format,
-                                                           NULL,  // updatedFormat
-                                                           patch->sources[0].channel_mask,
-                                                           NULL,  // updatedChannelMask
-                                                           AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+            if (outputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(devDesc),
+                    patch->sources[0].sample_rate,
+                    nullptr,  // updatedSamplingRate
+                    patch->sources[0].format,
+                    nullptr,  // updatedFormat
+                    patch->sources[0].channel_mask,
+                    nullptr,  // updatedChannelMask
+                    AUDIO_OUTPUT_FLAG_NONE /*FIXME*/) == IOProfile::NO_MATCH) {
                 ALOGV("%s profile not supported for device %08x", __func__, devDesc->type());
                 return INVALID_OPERATION;
             }
@@ -5009,17 +5113,18 @@
                 return BAD_VALUE;
             }
 
-            if (!inputDesc->mProfile->isCompatibleProfile(DeviceVector(device),
-                                                          patch->sinks[0].sample_rate,
-                                                          NULL, /*updatedSampleRate*/
-                                                          patch->sinks[0].format,
-                                                          NULL, /*updatedFormat*/
-                                                          patch->sinks[0].channel_mask,
-                                                          NULL, /*updatedChannelMask*/
-                                                          // FIXME for the parameter type,
-                                                          // and the NONE
-                                                          (audio_output_flags_t)
-                                                            AUDIO_INPUT_FLAG_NONE)) {
+            if (inputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(device),
+                    patch->sinks[0].sample_rate,
+                    nullptr, /*updatedSampleRate*/
+                    patch->sinks[0].format,
+                    nullptr, /*updatedFormat*/
+                    patch->sinks[0].channel_mask,
+                    nullptr, /*updatedChannelMask*/
+                    // FIXME for the parameter type,
+                    // and the NONE
+                    (audio_output_flags_t)
+                    AUDIO_INPUT_FLAG_NONE) == IOProfile::NO_MATCH) {
                 return INVALID_OPERATION;
             }
             // TODO: reconfigure output format and channels here
@@ -5501,7 +5606,7 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid)
+                                              uid_t uid, bool internal)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5534,7 +5639,7 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes));
+                                   toVolumeSource(*attributes), internal);
 
     status_t status = connectAudioSource(sourceDesc);
     if (status == NO_ERROR) {
@@ -5543,18 +5648,6 @@
     return status;
 }
 
-sp<SourceClientDescriptor> AudioPolicyManager::startAudioSourceInternal(
-        const struct audio_port_config *source, const audio_attributes_t *attributes, uid_t uid)
-{
-    ALOGV("%s", __FUNCTION__);
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-
-    status_t status = startAudioSource(source, attributes, &portId, uid);
-    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
-    return mAudioSources.valueFor(portId);
-}
-
-
 status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
@@ -5963,15 +6056,26 @@
     // The caller can have the audio config criteria ignored by either passing a null ptr or
     // the AUDIO_CONFIG_INITIALIZER value.
     // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks and PCM format
+    // some positional channel masks and PCM format and for stereo if low latency performance
+    // mode is not requested.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
+        static const bool stereo_spatialization_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+        const bool channel_mask_spatialized =
+                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
+                ? audio_channel_mask_contains_stereo(config->channel_mask)
+                : audio_is_channel_mask_spatialized(config->channel_mask);
+        if (!channel_mask_spatialized) {
             return false;
         }
         if (!audio_is_linear_pcm(config->format)) {
             return false;
         }
+        if (config->channel_mask == AUDIO_CHANNEL_OUT_STEREO
+                && ((attr->flags & AUDIO_FLAG_LOW_LATENCY) != 0)) {
+            return false;
+        }
     }
 
     sp<IOProfile> profile =
@@ -6747,6 +6851,12 @@
         closingOutput->stop();
     }
     closingOutput->close();
+    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
+            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        for (const auto device : closingOutput->devices()) {
+            device->setPreferredConfig(nullptr);
+        }
+    }
 
     removeOutput(output);
     mPreviousOutputs = mOutputs;
@@ -7636,9 +7746,6 @@
     // Choose an input profile based on the requested capture parameters: select the first available
     // profile supporting all requested parameters.
     // The flags can be ignored if it doesn't contain a much match flag.
-    //
-    // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
-    // the best matching profile, not the first one.
 
     using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
     const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
@@ -7655,27 +7762,35 @@
             for (const auto& profile : hwModule->getInputProfiles()) {
                 // profile->log();
                 //updatedFormat = format;
-                if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
-                                                 &samplingRate  /*updatedSamplingRate*/,
-                                                 format,
-                                                 &format,       /*updatedFormat*/
-                                                 channelMask,
-                                                 &channelMask   /*updatedChannelMask*/,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 true /*exactMatchRequiredForInputFlags*/)) {
+                if (profile->getCompatibilityScore(
+                        DeviceVector(device),
+                        samplingRate,
+                        &updatedSamplingRate,
+                        format,
+                        &updatedFormat,
+                        channelMask,
+                        &updatedChannelMask,
+                        // FIXME ugly cast
+                        (audio_output_flags_t) flags,
+                        true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+                    samplingRate = updatedSamplingRate;
+                    format = updatedFormat;
+                    channelMask = updatedChannelMask;
                     return profile;
                 }
-                if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
-                                                 samplingRate,
-                                                 &updatedSamplingRate,
-                                                 format,
-                                                 &updatedFormat,
-                                                 channelMask,
-                                                 &updatedChannelMask,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 false /*exactMatchRequiredForInputFlags*/)) {
+                if (firstInexact == nullptr
+                        && profile->getCompatibilityScore(
+                                DeviceVector(device),
+                                samplingRate,
+                                &updatedSamplingRate,
+                                format,
+                                &updatedFormat,
+                                channelMask,
+                                &updatedChannelMask,
+                                // FIXME ugly cast
+                                (audio_output_flags_t) flags,
+                                false /*exactMatchRequiredForInputFlags*/)
+                                != IOProfile::NO_MATCH) {
                     firstInexact = profile;
                 }
             }
@@ -7867,26 +7982,16 @@
                outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
         return NO_ERROR;
     }
-    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
-    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
-    bool isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
-    bool isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
-    bool isScoRequested = isScoRequestedForComm();
-    bool isHAUsed = isHearingAidUsedForComm();
-
-    // do not change in call volume if bluetooth is connected and vice versa
-    // if sco and call follow same curves, bypass forceUseForComm
-    if ((callVolSrc != btScoVolSrc) &&
-            ((isVoiceVolSrc && isScoRequested) ||
-             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
-            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
-        ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
-             volumeSource, isScoRequested ? " " : " not ");
+    bool isVoiceVolSrc;
+    bool isBtScoVolSrc;
+    if (!isVolumeConsistentForCalls(
+            volumeSource, deviceTypes, isVoiceVolSrc, isBtScoVolSrc, __func__)) {
         // Do not return an error here as AudioService will always set both voice call
-        // and bluetooth SCO volumes due to stream aliasing.
+        // and Bluetooth SCO volumes due to stream aliasing.
         return NO_ERROR;
     }
+
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
         index = curves.getVolumeIndex(deviceTypes);
@@ -7911,21 +8016,51 @@
             deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
-        float voiceVolume;
-        // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed by the headset
-        if (isVoiceVolSrc) {
-            voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
-        } else {
-            voiceVolume = index == 0 ? 0.0 : 1.0;
-        }
-        if (voiceVolume != mLastVoiceVolume) {
-            mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
-            mLastVoiceVolume = voiceVolume;
-        }
+        setVoiceVolume(index, curves, isVoiceVolSrc, delayMs);
     }
     return NO_ERROR;
 }
 
+void AudioPolicyManager::setVoiceVolume(
+        int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs) {
+    float voiceVolume;
+    // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed
+    // by the headset
+    if (isVoiceVolSrc) {
+        voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
+    } else {
+        voiceVolume = index == 0 ? 0.0 : 1.0;
+    }
+    if (voiceVolume != mLastVoiceVolume) {
+        mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
+        mLastVoiceVolume = voiceVolume;
+    }
+}
+
+bool AudioPolicyManager::isVolumeConsistentForCalls(VolumeSource volumeSource,
+                                                   const DeviceTypeSet& deviceTypes,
+                                                   bool& isVoiceVolSrc,
+                                                   bool& isBtScoVolSrc,
+                                                   const char* caller) {
+    const VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    const bool isScoRequested = isScoRequestedForComm();
+    const bool isHAUsed = isHearingAidUsedForComm();
+
+    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
+
+    if ((callVolSrc != btScoVolSrc) &&
+            ((isVoiceVolSrc && isScoRequested) ||
+             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
+            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
+        ALOGV("%s cannot set volume group %d volume when is%srequested for comm", caller,
+             volumeSource, isScoRequested ? " " : " not ");
+        return false;
+    }
+    return true;
+}
+
 void AudioPolicyManager::applyStreamVolumes(const sp<AudioOutputDescriptor>& outputDesc,
                                             const DeviceTypeSet& deviceTypes,
                                             int delayMs,
@@ -8361,6 +8496,12 @@
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
     }
+    if ((flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        auto portConfig = desc->getConfig();
+        for (const auto& device : devices) {
+            device->setPreferredConfig(&portConfig);
+        }
+    }
 
     // Here is where the out_set_parameters() for card & device gets called
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index a1c8f62..a3232a2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -292,6 +292,7 @@
 
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+        virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) override;
         virtual status_t updatePolicyMix(
                 const AudioMix& mix,
                 const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
@@ -339,7 +340,8 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid);
+                                          uid_t uid,
+                                          bool internal = false);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -580,6 +582,20 @@
                                            DeviceTypeSet deviceTypes,
                                            int delayMs = 0, bool force = false);
 
+        void setVoiceVolume(int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs);
+
+        // returns true if the supplied set of volume source and devices are consistent with
+        // call volume rules:
+        // if Bluetooth SCO and voice call use different volume curves:
+        // - do not apply voice call volume if Bluetooth SCO is used for call
+        // - do not apply Bluetooth SCO volume if SCO or Hearing Aid is not used for call.
+        // Also updates the booleans isVoiceVolSrc and isBtScoVolSrc according to the
+        // volume source supplied.
+        bool isVolumeConsistentForCalls(VolumeSource volumeSource,
+                                       const DeviceTypeSet& deviceTypes,
+                                       bool& isVoiceVolSrc,
+                                       bool& isBtScoVolSrc,
+                                       const char* caller);
         // apply all stream volumes to the specified output and device
         void applyStreamVolumes(const sp<AudioOutputDescriptor>& outputDesc,
                                 const DeviceTypeSet& deviceTypes,
@@ -1041,9 +1057,6 @@
         bool isMsdPatch(const audio_patch_handle_t &handle) const;
 
 private:
-        sp<SourceClientDescriptor> startAudioSourceInternal(
-                const struct audio_port_config *source, const audio_attributes_t *attributes,
-                uid_t uid);
 
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index fb55225..cddbf39 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -52,7 +53,7 @@
     static_libs: [
         "libeffectsconfig",
         "libaudiopolicycomponents",
-    ]
+    ],
 }
 
 cc_library {
@@ -75,10 +76,9 @@
     ],
 
     include_dirs: [
-        "frameworks/av/services/audioflinger"
+        "frameworks/av/services/audioflinger",
     ],
 
-
     static_libs: [
         "framework-permission-aidl-cpp",
     ],
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 7584632..6de71a3 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -164,6 +164,8 @@
     status_t status = af->openInput(request, &response);
     if (status == OK) {
         *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(response.input));
+        *config = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioConfig_audio_config_t(response.config, true /*isInput*/));
     }
     return status;
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 85b7ad9..d67ddb6 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -42,55 +42,25 @@
 // ----------------------------------------------------------------------------
 
 AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+    // Note: clang thread-safety permits the ctor to call guarded _l methods without
+    // acquiring the associated mutex capability as standard practice is to assume
+    // single threaded construction and destruction.
+
     // load xml config with effectsFactoryHal
-    status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
+    status_t loadResult = loadAudioEffectConfig_ll(effectsFactoryHal);
     if (loadResult < 0) {
         ALOGW("Failed to query effect configuration, fallback to load .conf");
         // load automatic audio effect modules
         if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+            loadAudioEffectConfigLegacy_l(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
         } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+            loadAudioEffectConfigLegacy_l(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
         }
     } else if (loadResult > 0) {
         ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
     }
 }
 
-void AudioPolicyEffects::setDefaultDeviceEffects() {
-    mDefaultDeviceEffectFuture = std::async(
-                std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-}
-
-AudioPolicyEffects::~AudioPolicyEffects()
-{
-    size_t i = 0;
-    // release audio input processing resources
-    for (i = 0; i < mInputSources.size(); i++) {
-        delete mInputSources.valueAt(i);
-    }
-    mInputSources.clear();
-
-    for (i = 0; i < mInputSessions.size(); i++) {
-        mInputSessions.valueAt(i)->mEffects.clear();
-        delete mInputSessions.valueAt(i);
-    }
-    mInputSessions.clear();
-
-    // release audio output processing resources
-    for (i = 0; i < mOutputStreams.size(); i++) {
-        delete mOutputStreams.valueAt(i);
-    }
-    mOutputStreams.clear();
-
-    for (i = 0; i < mOutputSessions.size(); i++) {
-        mOutputSessions.valueAt(i)->mEffects.clear();
-        delete mOutputSessions.valueAt(i);
-    }
-    mOutputSessions.clear();
-}
-
-
 status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
                              audio_source_t inputSource,
                              audio_session_t audioSession)
@@ -101,48 +71,43 @@
     audio_source_t aliasSource = (inputSource == AUDIO_SOURCE_HOTWORD) ?
                                     AUDIO_SOURCE_VOICE_RECOGNITION : inputSource;
 
-    Mutex::Autolock _l(mLock);
-    ssize_t index = mInputSources.indexOfKey(aliasSource);
-    if (index < 0) {
+    audio_utils::lock_guard _l(mMutex);
+    auto sourceIt = mInputSources.find(aliasSource);
+    if (sourceIt == mInputSources.end()) {
         ALOGV("addInputEffects(): no processing needs to be attached to this source");
         return status;
     }
-    ssize_t idx = mInputSessions.indexOfKey(audioSession);
-    EffectVector *sessionDesc;
-    if (idx < 0) {
-        sessionDesc = new EffectVector(audioSession);
-        mInputSessions.add(audioSession, sessionDesc);
-    } else {
-        // EffectVector is existing and we just need to increase ref count
-        sessionDesc = mInputSessions.valueAt(idx);
+    std::shared_ptr<EffectVector>& sessionDesc = mInputSessions[audioSession];
+    if (sessionDesc == nullptr) {
+        sessionDesc = std::make_shared<EffectVector>(audioSession);
     }
     sessionDesc->mRefCount++;
 
     ALOGV("addInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
     if (sessionDesc->mRefCount == 1) {
         int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
-        for (size_t i = 0; i < effects.size(); i++) {
-            EffectDesc *effect = effects[i];
+        const std::shared_ptr<EffectDescVector>& effects = sourceIt->second;
+        for (const std::shared_ptr<EffectDesc>& effect : *effects) {
             AttributionSourceState attributionSource;
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
-            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            auto fx = sp<AudioEffect>::make(attributionSource);
             fx->set(nullptr /*type */, &effect->mUuid, -1 /* priority */, nullptr /* callback */,
                     audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGW("addInputEffects(): failed to create Fx %s on source %d",
-                      effect->mName, (int32_t)aliasSource);
+                      effect->mName.c_str(), (int32_t)aliasSource);
                 // fx goes out of scope and strong ref on AudioEffect is released
                 continue;
             }
             for (size_t j = 0; j < effect->mParams.size(); j++) {
-                fx->setParameter(effect->mParams[j]);
+                // const_cast here due to API.
+                fx->setParameter(const_cast<effect_param_t*>(effect->mParams[j].get()));
             }
             ALOGV("addInputEffects(): added Fx %s on source: %d",
-                  effect->mName, (int32_t)aliasSource);
-            sessionDesc->mEffects.add(fx);
+                  effect->mName.c_str(), (int32_t)aliasSource);
+            sessionDesc->mEffects.push_back(std::move(fx));
         }
         sessionDesc->setProcessorEnabled(true);
         IPCThreadState::self()->restoreCallingIdentity(token);
@@ -156,18 +121,17 @@
 {
     status_t status = NO_ERROR;
 
-    Mutex::Autolock _l(mLock);
-    ssize_t index = mInputSessions.indexOfKey(audioSession);
-    if (index < 0) {
+    audio_utils::lock_guard _l(mMutex);
+    auto it = mInputSessions.find(audioSession);
+    if (it == mInputSessions.end()) {
         return status;
     }
-    EffectVector *sessionDesc = mInputSessions.valueAt(index);
+    std::shared_ptr<EffectVector> sessionDesc = it->second;
     sessionDesc->mRefCount--;
     ALOGV("releaseInputEffects(): input: %d, refCount: %d", input, sessionDesc->mRefCount);
     if (sessionDesc->mRefCount == 0) {
         sessionDesc->setProcessorEnabled(false);
-        delete sessionDesc;
-        mInputSessions.removeItemsAt(index);
+        mInputSessions.erase(it);
         ALOGV("releaseInputEffects(): all effects released");
     }
     return status;
@@ -179,24 +143,16 @@
 {
     status_t status = NO_ERROR;
 
-    Mutex::Autolock _l(mLock);
-    size_t index;
-    for (index = 0; index < mInputSessions.size(); index++) {
-        if (mInputSessions.valueAt(index)->mSessionId == audioSession) {
-            break;
-        }
-    }
-    if (index == mInputSessions.size()) {
+    audio_utils::lock_guard _l(mMutex);
+    auto it = mInputSessions.find(audioSession);
+    if (it == mInputSessions.end()) {
         *count = 0;
         return BAD_VALUE;
     }
-    Vector< sp<AudioEffect> > effects = mInputSessions.valueAt(index)->mEffects;
-
-    for (size_t i = 0; i < effects.size(); i++) {
-        effect_descriptor_t desc = effects[i]->descriptor();
-        if (i < *count) {
-            descriptors[i] = desc;
-        }
+    const std::vector<sp<AudioEffect>>& effects = it->second->mEffects;
+    const size_t copysize = std::min(effects.size(), (size_t)*count);
+    for (size_t i = 0; i < copysize; i++) {
+        descriptors[i] = effects[i]->descriptor();
     }
     if (effects.size() > *count) {
         status = NO_MEMORY;
@@ -212,24 +168,16 @@
 {
     status_t status = NO_ERROR;
 
-    Mutex::Autolock _l(mLock);
-    size_t index;
-    for (index = 0; index < mOutputSessions.size(); index++) {
-        if (mOutputSessions.valueAt(index)->mSessionId == audioSession) {
-            break;
-        }
-    }
-    if (index == mOutputSessions.size()) {
+    audio_utils::lock_guard _l(mMutex);
+    auto it = mOutputSessions.find(audioSession);
+    if (it == mOutputSessions.end()) {
         *count = 0;
         return BAD_VALUE;
     }
-    Vector< sp<AudioEffect> > effects = mOutputSessions.valueAt(index)->mEffects;
-
-    for (size_t i = 0; i < effects.size(); i++) {
-        effect_descriptor_t desc = effects[i]->descriptor();
-        if (i < *count) {
-            descriptors[i] = desc;
-        }
+    const std::vector<sp<AudioEffect>>& effects = it->second->mEffects;
+    const size_t copysize = std::min(effects.size(), (size_t)*count);
+    for (size_t i = 0; i < copysize; i++) {
+        descriptors[i] = effects[i]->descriptor();
     }
     if (effects.size() > *count) {
         status = NO_MEMORY;
@@ -245,27 +193,22 @@
 {
     status_t status = NO_ERROR;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     // create audio processors according to stream
     // FIXME: should we have specific post processing settings for internal streams?
     // default to media for now.
     if (stream >= AUDIO_STREAM_PUBLIC_CNT) {
         stream = AUDIO_STREAM_MUSIC;
     }
-    ssize_t index = mOutputStreams.indexOfKey(stream);
-    if (index < 0) {
+    auto it = mOutputStreams.find(stream);
+    if (it == mOutputStreams.end()) {
         ALOGV("addOutputSessionEffects(): no output processing needed for this stream");
         return NO_ERROR;
     }
 
-    ssize_t idx = mOutputSessions.indexOfKey(audioSession);
-    EffectVector *procDesc;
-    if (idx < 0) {
-        procDesc = new EffectVector(audioSession);
-        mOutputSessions.add(audioSession, procDesc);
-    } else {
-        // EffectVector is existing and we just need to increase ref count
-        procDesc = mOutputSessions.valueAt(idx);
+    std::shared_ptr<EffectVector>& procDesc = mOutputSessions[audioSession];
+    if (procDesc == nullptr) {
+        procDesc = std::make_shared<EffectVector>(audioSession);
     }
     procDesc->mRefCount++;
 
@@ -274,25 +217,24 @@
     if (procDesc->mRefCount == 1) {
         // make sure effects are associated to audio server even if we are executing a binder call
         int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
-        for (size_t i = 0; i < effects.size(); i++) {
-            EffectDesc *effect = effects[i];
+        const std::shared_ptr<EffectDescVector>& effects = it->second;
+        for (const std::shared_ptr<EffectDesc>& effect : *effects) {
             AttributionSourceState attributionSource;
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
-            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            auto fx = sp<AudioEffect>::make(attributionSource);
             fx->set(nullptr /* type */, &effect->mUuid, 0 /* priority */, nullptr /* callback */,
                     audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("addOutputSessionEffects(): failed to create Fx  %s on session %d",
-                      effect->mName, audioSession);
+                      effect->mName.c_str(), audioSession);
                 // fx goes out of scope and strong ref on AudioEffect is released
                 continue;
             }
             ALOGV("addOutputSessionEffects(): added Fx %s on session: %d for stream: %d",
-                  effect->mName, audioSession, (int32_t)stream);
-            procDesc->mEffects.add(fx);
+                  effect->mName.c_str(), audioSession, (int32_t)stream);
+            procDesc->mEffects.push_back(std::move(fx));
         }
 
         procDesc->setProcessorEnabled(true);
@@ -305,30 +247,28 @@
                          audio_stream_type_t stream,
                          audio_session_t audioSession)
 {
-    status_t status = NO_ERROR;
     (void) output; // argument not used for now
     (void) stream; // argument not used for now
 
-    Mutex::Autolock _l(mLock);
-    ssize_t index = mOutputSessions.indexOfKey(audioSession);
-    if (index < 0) {
+    audio_utils::lock_guard _l(mMutex);
+    auto it = mOutputSessions.find(audioSession);
+    if (it == mOutputSessions.end()) {
         ALOGV("releaseOutputSessionEffects: no output processing was attached to this stream");
         return NO_ERROR;
     }
 
-    EffectVector *procDesc = mOutputSessions.valueAt(index);
+    std::shared_ptr<EffectVector> procDesc = it->second;
     procDesc->mRefCount--;
     ALOGV("releaseOutputSessionEffects(): session: %d, refCount: %d",
           audioSession, procDesc->mRefCount);
     if (procDesc->mRefCount == 0) {
         procDesc->setProcessorEnabled(false);
         procDesc->mEffects.clear();
-        delete procDesc;
-        mOutputSessions.removeItemsAt(index);
+        mOutputSessions.erase(it);
         ALOGV("releaseOutputSessionEffects(): output processing released from session: %d",
               audioSession);
     }
-    return status;
+    return NO_ERROR;
 }
 
 status_t AudioPolicyEffects::addSourceDefaultEffect(const effect_uuid_t *type,
@@ -370,17 +310,12 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     // Find the EffectDescVector for the given source type, or create a new one if necessary.
-    ssize_t index = mInputSources.indexOfKey(source);
-    EffectDescVector *desc = NULL;
-    if (index < 0) {
-        // No effects for this source type yet.
-        desc = new EffectDescVector();
-        mInputSources.add(source, desc);
-    } else {
-        desc = mInputSources.valueAt(index);
+    std::shared_ptr<EffectDescVector>& desc = mInputSources[source];
+    if (desc == nullptr) {
+        desc = std::make_shared<EffectDescVector>();
     }
 
     // Create a new effect and add it to the vector.
@@ -389,9 +324,9 @@
         ALOGE("addSourceDefaultEffect(): failed to get new unique id.");
         return res;
     }
-    EffectDesc *effect = new EffectDesc(
+    std::shared_ptr<EffectDesc> effect = std::make_shared<EffectDesc>(
             descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
-    desc->mEffects.add(effect);
+    desc->push_back(std::move(effect));
     // TODO(b/71813697): Support setting params as well.
 
     // TODO(b/71814300): Retroactively attach to any existing sources of the given type.
@@ -435,17 +370,13 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     // Find the EffectDescVector for the given stream type, or create a new one if necessary.
-    ssize_t index = mOutputStreams.indexOfKey(stream);
-    EffectDescVector *desc = NULL;
-    if (index < 0) {
+    std::shared_ptr<EffectDescVector>& desc = mOutputStreams[stream];
+    if (desc == nullptr) {
         // No effects for this stream type yet.
-        desc = new EffectDescVector();
-        mOutputStreams.add(stream, desc);
-    } else {
-        desc = mOutputStreams.valueAt(index);
+        desc = std::make_shared<EffectDescVector>();
     }
 
     // Create a new effect and add it to the vector.
@@ -454,9 +385,9 @@
         ALOGE("addStreamDefaultEffect(): failed to get new unique id.");
         return res;
     }
-    EffectDesc *effect = new EffectDesc(
+    std::shared_ptr<EffectDesc> effect = std::make_shared<EffectDesc>(
             descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
-    desc->mEffects.add(effect);
+    desc->push_back(std::move(effect));
     // TODO(b/71813697): Support setting params as well.
 
     // TODO(b/71814300): Retroactively attach to any existing streams of the given type.
@@ -475,18 +406,16 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     // Check each source type.
-    size_t numSources = mInputSources.size();
-    for (size_t i = 0; i < numSources; ++i) {
+    for (auto& [source, descVector] : mInputSources) {
         // Check each effect for each source.
-        EffectDescVector* descVector = mInputSources[i];
-        for (auto desc = descVector->mEffects.begin(); desc != descVector->mEffects.end(); ++desc) {
+        for (auto desc = descVector->begin(); desc != descVector->end(); ++desc) {
             if ((*desc)->mId == id) {
                 // Found it!
                 // TODO(b/71814300): Remove from any sources the effect was attached to.
-                descVector->mEffects.erase(desc);
+                descVector->erase(desc);
                 // Handles are unique; there can only be one match, so return early.
                 return NO_ERROR;
             }
@@ -506,18 +435,16 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     // Check each stream type.
-    size_t numStreams = mOutputStreams.size();
-    for (size_t i = 0; i < numStreams; ++i) {
+    for (auto& [stream, descVector] : mOutputStreams) {
         // Check each effect for each stream.
-        EffectDescVector* descVector = mOutputStreams[i];
-        for (auto desc = descVector->mEffects.begin(); desc != descVector->mEffects.end(); ++desc) {
+        for (auto desc = descVector->begin(); desc != descVector->end(); ++desc) {
             if ((*desc)->mId == id) {
                 // Found it!
                 // TODO(b/71814300): Remove from any streams the effect was attached to.
-                descVector->mEffects.erase(desc);
+                descVector->erase(desc);
                 // Handles are unique; there can only be one match, so return early.
                 return NO_ERROR;
             }
@@ -530,8 +457,8 @@
 
 void AudioPolicyEffects::EffectVector::setProcessorEnabled(bool enabled)
 {
-    for (size_t i = 0; i < mEffects.size(); i++) {
-        mEffects.itemAt(i)->setEnabled(enabled);
+    for (const auto& effect : mEffects) {
+        effect->setEnabled(enabled);
     }
 }
 
@@ -540,7 +467,8 @@
 // Audio processing configuration
 // ----------------------------------------------------------------------------
 
-/*static*/ const char * const AudioPolicyEffects::kInputSourceNames[AUDIO_SOURCE_CNT -1] = {
+// we keep to const char* instead of std::string_view as comparison is believed faster.
+constexpr const char* kInputSourceNames[AUDIO_SOURCE_CNT - 1] = {
     MIC_SRC_TAG,
     VOICE_UL_SRC_TAG,
     VOICE_DL_SRC_TAG,
@@ -567,7 +495,8 @@
     return (audio_source_t)i;
 }
 
-const char *AudioPolicyEffects::kStreamNames[AUDIO_STREAM_PUBLIC_CNT+1] = {
+// +1 as enum starts from -1
+constexpr const char* kStreamNames[AUDIO_STREAM_PUBLIC_CNT + 1] = {
     AUDIO_STREAM_DEFAULT_TAG,
     AUDIO_STREAM_VOICE_CALL_TAG,
     AUDIO_STREAM_SYSTEM_TAG,
@@ -584,6 +513,7 @@
 
 // returns the audio_stream_t enum corresponding to the output stream name or
 // AUDIO_STREAM_PUBLIC_CNT is no match found
+/* static */
 audio_stream_type_t AudioPolicyEffects::streamNameToEnum(const char *name)
 {
     int i;
@@ -600,6 +530,7 @@
 // Audio Effect Config parser
 // ----------------------------------------------------------------------------
 
+/* static */
 size_t AudioPolicyEffects::growParamSize(char **param,
                                          size_t size,
                                          size_t *curSize,
@@ -623,7 +554,7 @@
     return pos;
 }
 
-
+/* static */
 size_t AudioPolicyEffects::readParamValue(cnode *node,
                                           char **param,
                                           size_t *curSize,
@@ -692,7 +623,8 @@
     return len;
 }
 
-effect_param_t *AudioPolicyEffects::loadEffectParameter(cnode *root)
+/* static */
+std::shared_ptr<const effect_param_t> AudioPolicyEffects::loadEffectParameter(cnode* root)
 {
     cnode *param;
     cnode *value;
@@ -722,7 +654,7 @@
             *ptr = atoi(param->value);
             fx_param->psize = sizeof(int);
             fx_param->vsize = sizeof(int);
-            return fx_param;
+            return {fx_param, free};
         }
     }
     if (param == NULL || value == NULL) {
@@ -760,42 +692,43 @@
         value = value->next;
     }
 
-    return fx_param;
+    return {fx_param, free};
 
 error:
     free(fx_param);
     return NULL;
 }
 
-void AudioPolicyEffects::loadEffectParameters(cnode *root, Vector <effect_param_t *>& params)
+/* static */
+void AudioPolicyEffects::loadEffectParameters(
+        cnode* root, std::vector<std::shared_ptr<const effect_param_t>>& params)
 {
     cnode *node = root->first_child;
     while (node) {
         ALOGV("loadEffectParameters() loading param %s", node->name);
-        effect_param_t *param = loadEffectParameter(node);
-        if (param != NULL) {
-            params.add(param);
+        const auto param = loadEffectParameter(node);
+        if (param != nullptr) {
+            params.push_back(param);
         }
         node = node->next;
     }
 }
 
-
-AudioPolicyEffects::EffectDescVector *AudioPolicyEffects::loadEffectConfig(
-                                                            cnode *root,
-                                                            const Vector <EffectDesc *>& effects)
+/* static */
+std::shared_ptr<AudioPolicyEffects::EffectDescVector> AudioPolicyEffects::loadEffectConfig(
+        cnode* root, const EffectDescVector& effects)
 {
     cnode *node = root->first_child;
     if (node == NULL) {
         ALOGW("loadInputSource() empty element %s", root->name);
         return NULL;
     }
-    EffectDescVector *desc = new EffectDescVector();
+    auto desc = std::make_shared<EffectDescVector>();
     while (node) {
         size_t i;
 
         for (i = 0; i < effects.size(); i++) {
-            if (strncmp(effects[i]->mName, node->name, EFFECT_STRING_LEN_MAX) == 0) {
+            if (effects[i]->mName == node->name) {
                 ALOGV("loadEffectConfig() found effect %s in list", node->name);
                 break;
             }
@@ -805,23 +738,22 @@
             node = node->next;
             continue;
         }
-        EffectDesc *effect = new EffectDesc(*effects[i]);   // deep copy
+        auto effect = std::make_shared<EffectDesc>(*effects[i]);   // deep copy
         loadEffectParameters(node, effect->mParams);
         ALOGV("loadEffectConfig() adding effect %s uuid %08x",
-              effect->mName, effect->mUuid.timeLow);
-        desc->mEffects.add(effect);
+              effect->mName.c_str(), effect->mUuid.timeLow);
+        desc->push_back(std::move(effect));
         node = node->next;
     }
-    if (desc->mEffects.size() == 0) {
+    if (desc->empty()) {
         ALOGW("loadEffectConfig() no valid effects found in config %s", root->name);
-        delete desc;
-        return NULL;
+        return nullptr;
     }
     return desc;
 }
 
-status_t AudioPolicyEffects::loadInputEffectConfigurations(cnode *root,
-                                                           const Vector <EffectDesc *>& effects)
+status_t AudioPolicyEffects::loadInputEffectConfigurations_l(cnode* root,
+        const EffectDescVector& effects)
 {
     cnode *node = config_find(root, PREPROCESSING_TAG);
     if (node == NULL) {
@@ -831,24 +763,24 @@
     while (node) {
         audio_source_t source = inputSourceNameToEnum(node->name);
         if (source == AUDIO_SOURCE_CNT) {
-            ALOGW("loadInputSources() invalid input source %s", node->name);
+            ALOGW("%s() invalid input source %s", __func__, node->name);
             node = node->next;
             continue;
         }
-        ALOGV("loadInputSources() loading input source %s", node->name);
-        EffectDescVector *desc = loadEffectConfig(node, effects);
+        ALOGV("%s() loading input source %s", __func__, node->name);
+        auto desc = loadEffectConfig(node, effects);
         if (desc == NULL) {
             node = node->next;
             continue;
         }
-        mInputSources.add(source, desc);
+        mInputSources[source] = std::move(desc);
         node = node->next;
     }
     return NO_ERROR;
 }
 
-status_t AudioPolicyEffects::loadStreamEffectConfigurations(cnode *root,
-                                                            const Vector <EffectDesc *>& effects)
+status_t AudioPolicyEffects::loadStreamEffectConfigurations_l(cnode* root,
+        const EffectDescVector& effects)
 {
     cnode *node = config_find(root, OUTPUT_SESSION_PROCESSING_TAG);
     if (node == NULL) {
@@ -858,23 +790,24 @@
     while (node) {
         audio_stream_type_t stream = streamNameToEnum(node->name);
         if (stream == AUDIO_STREAM_PUBLIC_CNT) {
-            ALOGW("loadStreamEffectConfigurations() invalid output stream %s", node->name);
+            ALOGW("%s() invalid output stream %s", __func__, node->name);
             node = node->next;
             continue;
         }
-        ALOGV("loadStreamEffectConfigurations() loading output stream %s", node->name);
-        EffectDescVector *desc = loadEffectConfig(node, effects);
+        ALOGV("%s() loading output stream %s", __func__, node->name);
+        std::shared_ptr<EffectDescVector> desc = loadEffectConfig(node, effects);
         if (desc == NULL) {
             node = node->next;
             continue;
         }
-        mOutputStreams.add(stream, desc);
+        mOutputStreams[stream] = std::move(desc);
         node = node->next;
     }
     return NO_ERROR;
 }
 
-AudioPolicyEffects::EffectDesc *AudioPolicyEffects::loadEffect(cnode *root)
+/* static */
+std::shared_ptr<AudioPolicyEffects::EffectDesc> AudioPolicyEffects::loadEffect(cnode* root)
 {
     cnode *node = config_find(root, UUID_TAG);
     if (node == NULL) {
@@ -885,30 +818,33 @@
         ALOGW("loadEffect() invalid uuid %s", node->value);
         return NULL;
     }
-    return new EffectDesc(root->name, uuid);
+    return std::make_shared<EffectDesc>(root->name, uuid);
 }
 
-status_t AudioPolicyEffects::loadEffects(cnode *root, Vector <EffectDesc *>& effects)
+/* static */
+android::AudioPolicyEffects::EffectDescVector AudioPolicyEffects::loadEffects(cnode *root)
 {
+    EffectDescVector effects;
     cnode *node = config_find(root, EFFECTS_TAG);
     if (node == NULL) {
-        return -ENOENT;
+        ALOGW("%s() Cannot find %s configuration", __func__, EFFECTS_TAG);
+        return effects;
     }
     node = node->first_child;
     while (node) {
         ALOGV("loadEffects() loading effect %s", node->name);
-        EffectDesc *effect = loadEffect(node);
+        auto effect = loadEffect(node);
         if (effect == NULL) {
             node = node->next;
             continue;
         }
-        effects.add(effect);
+        effects.push_back(std::move(effect));
         node = node->next;
     }
-    return NO_ERROR;
+    return effects;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectConfig(
+status_t AudioPolicyEffects::loadAudioEffectConfig_ll(
         const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
     if (!effectsFactoryHal) {
         ALOGE("%s Null EffectsFactoryHalInterface", __func__);
@@ -924,11 +860,12 @@
 
     auto loadProcessingChain = [](auto& processingChain, auto& streams) {
         for (auto& stream : processingChain) {
-            auto effectDescs = std::make_unique<EffectDescVector>();
+            auto effectDescs = std::make_shared<EffectDescVector>();
             for (auto& effect : stream.effects) {
-                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
+                effectDescs->push_back(
+                        std::make_shared<EffectDesc>(effect->name, effect->uuid));
             }
-            streams.add(stream.type, effectDescs.release());
+            streams[stream.type] = std::move(effectDescs);
         }
     };
 
@@ -936,26 +873,26 @@
         for (auto& deviceProcess : processingChain) {
             auto effectDescs = std::make_unique<EffectDescVector>();
             for (auto& effect : deviceProcess.effects) {
-                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
+                effectDescs->push_back(
+                        std::make_shared<EffectDesc>(effect->name, effect->uuid));
             }
-            auto deviceEffects = std::make_unique<DeviceEffects>(
+            auto devEffects = std::make_unique<DeviceEffects>(
                         std::move(effectDescs), deviceProcess.type, deviceProcess.address);
-            devicesEffects.emplace(deviceProcess.address, std::move(deviceEffects));
+            devicesEffects.emplace(deviceProcess.address, std::move(devEffects));
         }
     };
 
+    // access to mInputSources and mOutputStreams requires mMutex;
     loadProcessingChain(processings->preprocess, mInputSources);
     loadProcessingChain(processings->postprocess, mOutputStreams);
 
-    {
-        Mutex::Autolock _l(mLock);
-        loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
-    }
+    // access to mDeviceEffects requires mDeviceEffectsMutex
+    loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
 
     return skippedElements;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy_l(const char *path)
 {
     cnode *root;
     char *data;
@@ -967,15 +904,11 @@
     root = config_node("", "");
     config_load(root, data);
 
-    Vector <EffectDesc *> effects;
-    loadEffects(root, effects);
-    loadInputEffectConfigurations(root, effects);
-    loadStreamEffectConfigurations(root, effects);
+    const EffectDescVector effects = loadEffects(root);
 
-    for (size_t i = 0; i < effects.size(); i++) {
-        delete effects[i];
-    }
-
+    // requires mMutex
+    loadInputEffectConfigurations_l(root, effects);
+    loadStreamEffectConfigurations_l(root, effects);
     config_free(root);
     free(root);
     free(data);
@@ -985,14 +918,14 @@
 
 void AudioPolicyEffects::initDefaultDeviceEffects()
 {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mDeviceEffectsMutex);
     for (const auto& deviceEffectsIter : mDeviceEffects) {
         const auto& deviceEffects =  deviceEffectsIter.second;
-        for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
+        for (const auto& effectDesc : *deviceEffects->mEffectDescriptors) {
             AttributionSourceState attributionSource;
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
-            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            sp<AudioEffect> fx = sp<AudioEffect>::make(attributionSource);
             fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0 /* priority */, nullptr /* callback */,
                     AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
                     AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
@@ -1000,16 +933,16 @@
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("%s(): failed to create Fx %s on port type=%d address=%s", __func__,
-                      effectDesc->mName, deviceEffects->getDeviceType(),
+                      effectDesc->mName.c_str(), deviceEffects->getDeviceType(),
                       deviceEffects->getDeviceAddress().c_str());
                 // fx goes out of scope and strong ref on AudioEffect is released
                 continue;
             }
             fx->setEnabled(true);
             ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
-                  effectDesc->mName, deviceEffects->getDeviceType(),
+                  effectDesc->mName.c_str(), deviceEffects->getDeviceType(),
                   deviceEffects->getDeviceAddress().c_str());
-            deviceEffects->mEffects.push_back(fx);
+            deviceEffects->mEffects.push_back(std::move(fx));
         }
     }
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index e17df48..259b84a 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIOPOLICYEFFECTS_H
-#define ANDROID_AUDIOPOLICYEFFECTS_H
+#pragma once
 
 #include <stdlib.h>
 #include <stdio.h>
@@ -23,6 +22,7 @@
 #include <future>
 
 #include <android-base/thread_annotations.h>
+#include <audio_utils/mutex.h>
 #include <cutils/misc.h>
 #include <media/AudioEffect.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -56,44 +56,43 @@
     // First it will look whether vendor specific file exists,
     // otherwise it will parse the system default file.
     explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
-    virtual ~AudioPolicyEffects();
 
     // NOTE: methods on AudioPolicyEffects should never be called with the AudioPolicyService
-    // main mutex (mLock) held as they will indirectly call back into AudioPolicyService when
+    // main mutex (mMutex) held as they will indirectly call back into AudioPolicyService when
     // managing audio effects.
 
     // Return a list of effect descriptors for default input effects
     // associated with audioSession
     status_t queryDefaultInputEffects(audio_session_t audioSession,
                              effect_descriptor_t *descriptors,
-                             uint32_t *count);
+                             uint32_t* count) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Add all input effects associated with this input
     // Effects are attached depending on the audio_source_t
     status_t addInputEffects(audio_io_handle_t input,
                              audio_source_t inputSource,
-                             audio_session_t audioSession);
+                             audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Add all input effects associated to this input
     status_t releaseInputEffects(audio_io_handle_t input,
-                                 audio_session_t audioSession);
+                                 audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Return a list of effect descriptors for default output effects
     // associated with audioSession
     status_t queryDefaultOutputSessionEffects(audio_session_t audioSession,
                              effect_descriptor_t *descriptors,
-                             uint32_t *count);
+                             uint32_t* count) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Add all output effects associated to this output
     // Effects are attached depending on the audio_stream_type_t
     status_t addOutputSessionEffects(audio_io_handle_t output,
                              audio_stream_type_t stream,
-                             audio_session_t audioSession);
+                             audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // release all output effects associated with this output stream and audiosession
     status_t releaseOutputSessionEffects(audio_io_handle_t output,
                              audio_stream_type_t stream,
-                             audio_session_t audioSession);
+                             audio_session_t audioSession) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Add the effect to the list of default effects for sources of type |source|.
     status_t addSourceDefaultEffect(const effect_uuid_t *type,
@@ -101,7 +100,7 @@
                                     const effect_uuid_t *uuid,
                                     int32_t priority,
                                     audio_source_t source,
-                                    audio_unique_id_t* id);
+                                    audio_unique_id_t* id) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Add the effect to the list of default effects for streams of a given usage.
     status_t addStreamDefaultEffect(const effect_uuid_t *type,
@@ -109,36 +108,37 @@
                                     const effect_uuid_t *uuid,
                                     int32_t priority,
                                     audio_usage_t usage,
-                                    audio_unique_id_t* id);
+                                    audio_unique_id_t* id) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Remove the default source effect from wherever it's attached.
-    status_t removeSourceDefaultEffect(audio_unique_id_t id);
+    status_t removeSourceDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
 
     // Remove the default stream effect from wherever it's attached.
-    status_t removeStreamDefaultEffect(audio_unique_id_t id);
+    status_t removeStreamDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
 
-    void setDefaultDeviceEffects();
+    // Initializes the Effects (AudioSystem must be ready as this creates audio client objects).
+    void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
 
 private:
-    void initDefaultDeviceEffects();
 
     // class to store the description of an effects and its parameters
     // as defined in audio_effects.conf
     class EffectDesc {
     public:
-        EffectDesc(const char *name,
+        EffectDesc(std::string_view name,
                    const effect_uuid_t& typeUuid,
                    const String16& opPackageName,
                    const effect_uuid_t& uuid,
                    uint32_t priority,
                    audio_unique_id_t id) :
-                        mName(strdup(name)),
+                        mName(name),
                         mTypeUuid(typeUuid),
                         mOpPackageName(opPackageName),
                         mUuid(uuid),
                         mPriority(priority),
                         mId(id) { }
-        EffectDesc(const char *name, const effect_uuid_t& uuid) :
+        // Modern EffectDesc usage:
+        EffectDesc(std::string_view name, const effect_uuid_t& uuid) :
                         EffectDesc(name,
                                    *EFFECT_UUID_NULL,
                                    String16(""),
@@ -146,67 +146,36 @@
                                    0,
                                    AUDIO_UNIQUE_ID_ALLOCATE) { }
         EffectDesc(const EffectDesc& orig) :
-                        mName(strdup(orig.mName)),
+                        mName(orig.mName),
                         mTypeUuid(orig.mTypeUuid),
                         mOpPackageName(orig.mOpPackageName),
                         mUuid(orig.mUuid),
                         mPriority(orig.mPriority),
-                        mId(orig.mId) {
-                            // deep copy mParams
-                            for (size_t k = 0; k < orig.mParams.size(); k++) {
-                                effect_param_t *origParam = orig.mParams[k];
-                                // psize and vsize are rounded up to an int boundary for allocation
-                                size_t origSize = sizeof(effect_param_t) +
-                                                  ((origParam->psize + 3) & ~3) +
-                                                  ((origParam->vsize + 3) & ~3);
-                                effect_param_t *dupParam = (effect_param_t *) malloc(origSize);
-                                memcpy(dupParam, origParam, origSize);
-                                // This works because the param buffer allocation is also done by
-                                // multiples of 4 bytes originally. In theory we should memcpy only
-                                // the actual param size, that is without rounding vsize.
-                                mParams.add(dupParam);
-                            }
-                        }
-        /*virtual*/ ~EffectDesc() {
-            free(mName);
-            for (size_t k = 0; k < mParams.size(); k++) {
-                free(mParams[k]);
-            }
-        }
-        char *mName;
-        effect_uuid_t mTypeUuid;
-        String16 mOpPackageName;
-        effect_uuid_t mUuid;
-        int32_t mPriority;
-        audio_unique_id_t mId;
-        Vector <effect_param_t *> mParams;
+                        mId(orig.mId),
+                        mParams(orig.mParams) { }
+
+        const std::string mName;
+        const effect_uuid_t mTypeUuid;
+        const String16 mOpPackageName;
+        const effect_uuid_t mUuid;
+        const int32_t mPriority;
+        const audio_unique_id_t mId;
+        std::vector<std::shared_ptr<const effect_param_t>> mParams;
     };
 
-    // class to store voctor of EffectDesc
-    class EffectDescVector {
-    public:
-        EffectDescVector() {}
-        /*virtual*/ ~EffectDescVector() {
-            for (size_t j = 0; j < mEffects.size(); j++) {
-                delete mEffects[j];
-            }
-        }
-        Vector <EffectDesc *> mEffects;
-    };
+    using EffectDescVector = std::vector<std::shared_ptr<EffectDesc>>;
 
-    // class to store voctor of AudioEffects
     class EffectVector {
     public:
-        explicit EffectVector(audio_session_t session) : mSessionId(session), mRefCount(0) {}
-        /*virtual*/ ~EffectVector() {}
+        explicit EffectVector(audio_session_t session) : mSessionId(session) {}
 
         // Enable or disable all effects in effect vector
         void setProcessorEnabled(bool enabled);
 
         const audio_session_t mSessionId;
-        // AudioPolicyManager keeps mLock, no need for lock on reference count here
-        int mRefCount;
-        Vector< sp<AudioEffect> >mEffects;
+        // AudioPolicyManager keeps mMutex, no need for lock on reference count here
+        int mRefCount = 0;
+        std::vector<sp<AudioEffect>> mEffects;
     };
 
     /**
@@ -215,12 +184,11 @@
     class DeviceEffects {
     public:
         DeviceEffects(std::unique_ptr<EffectDescVector> effectDescriptors,
-                               audio_devices_t device, const std::string& address) :
+                               audio_devices_t device, std::string_view address) :
             mEffectDescriptors(std::move(effectDescriptors)),
             mDeviceType(device), mDeviceAddress(address) {}
-        /*virtual*/ ~DeviceEffects() = default;
 
-        std::vector< sp<AudioEffect> > mEffects;
+        std::vector<sp<AudioEffect>> mEffects;
         audio_devices_t getDeviceType() const { return mDeviceType; }
         std::string getDeviceAddress() const { return mDeviceAddress; }
         const std::unique_ptr<EffectDescVector> mEffectDescriptors;
@@ -231,65 +199,81 @@
 
     };
 
-    static const char * const kInputSourceNames[AUDIO_SOURCE_CNT -1];
+    status_t loadAudioEffectConfig_ll(const sp<EffectsFactoryHalInterface>& effectsFactoryHal)
+            REQUIRES(mMutex, mDeviceEffectsMutex);
+
+    // Legacy: Begin methods below.
+    // Parse audio_effects.conf - called from constructor.
+    status_t loadAudioEffectConfigLegacy_l(const char* path) REQUIRES(mMutex);
+
+    // Legacy: Load all automatic effect configurations
+    status_t loadInputEffectConfigurations_l(cnode* root,
+            const EffectDescVector& effects) REQUIRES(mMutex);
+    status_t loadStreamEffectConfigurations_l(cnode* root,
+            const EffectDescVector& effects) REQUIRES(mMutex);
+
+    // Legacy: static methods below.
+
     static audio_source_t inputSourceNameToEnum(const char *name);
 
-    static const char *kStreamNames[AUDIO_STREAM_PUBLIC_CNT+1]; //+1 required as streams start from -1
-    audio_stream_type_t streamNameToEnum(const char *name);
-
-    // Parse audio_effects.conf
-    status_t loadAudioEffectConfigLegacy(const char *path);
-    status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
+    static audio_stream_type_t streamNameToEnum(const char* name);
 
     // Load all effects descriptors in configuration file
-    status_t loadEffects(cnode *root, Vector <EffectDesc *>& effects);
-    EffectDesc *loadEffect(cnode *root);
-
-    // Load all automatic effect configurations
-    status_t loadInputEffectConfigurations(cnode *root, const Vector <EffectDesc *>& effects);
-    status_t loadStreamEffectConfigurations(cnode *root, const Vector <EffectDesc *>& effects);
-    EffectDescVector *loadEffectConfig(cnode *root, const Vector <EffectDesc *>& effects);
+    static EffectDescVector loadEffects(cnode* root);
+    static std::shared_ptr<AudioPolicyEffects::EffectDesc> loadEffect(cnode* root);
+    static std::shared_ptr<EffectDescVector> loadEffectConfig(cnode* root,
+            const EffectDescVector& effects);
 
     // Load all automatic effect parameters
-    void loadEffectParameters(cnode *root, Vector <effect_param_t *>& params);
-    effect_param_t *loadEffectParameter(cnode *root);
-    size_t readParamValue(cnode *node,
+    static void loadEffectParameters(
+            cnode* root, std::vector<std::shared_ptr<const effect_param_t>>& params);
+
+    // loadEffectParameter returns a shared_ptr instead of a unique_ptr as there may
+    // be multiple references to the same effect parameter.
+    static std::shared_ptr<const effect_param_t> loadEffectParameter(cnode* root);
+    static size_t readParamValue(cnode* node,
                           char **param,
                           size_t *curSize,
                           size_t *totSize);
-    size_t growParamSize(char **param,
+    static size_t growParamSize(char** param,
                          size_t size,
                          size_t *curSize,
                          size_t *totSize);
 
+    // Legacy: End methods above.
+
+    // Note: The association of Effects to audio source, session, or stream
+    // is done through std::map instead of std::unordered_map.  This gives
+    // better reproducibility of issues, since map is ordered and more predictable
+    // in enumeration.
+
     // protects access to mInputSources, mInputSessions, mOutputStreams, mOutputSessions
-    // never hold AudioPolicyService::mLock when calling AudioPolicyEffects methods as
+    // never hold AudioPolicyService::mMutex when calling AudioPolicyEffects methods as
     // those can call back into AudioPolicyService methods and try to acquire the mutex
-    Mutex mLock;
+    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyEffects_Mutex};
     // Automatic input effects are configured per audio_source_t
-    KeyedVector< audio_source_t, EffectDescVector* > mInputSources;
-    // Automatic input effects are unique for audio_io_handle_t
-    KeyedVector< audio_session_t, EffectVector* > mInputSessions;
+    std::map<audio_source_t, std::shared_ptr<EffectDescVector>> mInputSources
+            GUARDED_BY(mMutex);
+    // Automatic input effects are unique for an audio_session_t.
+    std::map<audio_session_t, std::shared_ptr<EffectVector>> mInputSessions
+            GUARDED_BY(mMutex);
 
     // Automatic output effects are organized per audio_stream_type_t
-    KeyedVector< audio_stream_type_t, EffectDescVector* > mOutputStreams;
-    // Automatic output effects are unique for audiosession ID
-    KeyedVector< audio_session_t, EffectVector* > mOutputSessions;
+    std::map<audio_stream_type_t, std::shared_ptr<EffectDescVector>> mOutputStreams
+            GUARDED_BY(mMutex);
+    // Automatic output effects are unique for an audio_session_t.
+    std::map<audio_session_t, std::shared_ptr<EffectVector>> mOutputSessions
+            GUARDED_BY(mMutex);
 
     /**
      * @brief mDeviceEffects map of device effects indexed by the device address
      */
-    std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects GUARDED_BY(mLock);
 
-    /**
-     * Device Effect initialization must be asynchronous: the audio_policy service parses and init
-     * effect on first reference. AudioFlinger will handle effect creation and register these
-     * effect on audio_policy service.
-     * We must store the reference of the furture garantee real asynchronous operation.
-     */
-    std::future<void> mDefaultDeviceEffectFuture;
+    // mDeviceEffects is never accessed through AudioPolicyEffects methods.
+    // We keep a separate mutex here to catch future methods attempting to access this variable.
+    std::mutex mDeviceEffectsMutex;
+    std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects
+            GUARDED_BY(mDeviceEffectsMutex);
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIOPOLICYEFFECTS_H
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index deecab6..1951045 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -114,7 +114,7 @@
 void AudioPolicyService::doOnNewAudioModulesAvailable()
 {
     if (mAudioPolicyManager == NULL) return;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     mAudioPolicyManager->onNewAudioModulesAvailable();
 }
@@ -140,7 +140,7 @@
     }
 
     ALOGV("setDeviceConnectionState()");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->setDeviceConnectionState(
             state, port, encodedFormat);
@@ -162,7 +162,7 @@
                         AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
         return Status::ok();
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(
@@ -190,7 +190,7 @@
     }
 
     ALOGV("handleDeviceConfigChange()");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     status_t status =  mAudioPolicyManager->handleDeviceConfigChange(
             device, address.c_str(), deviceNameAidl.c_str(), encodedFormat);
@@ -221,7 +221,7 @@
     // acquire lock before calling setMode() so that setMode() + setPhoneState() are an atomic
     // operation from policy manager standpoint (no other operation (e.g track start or stop)
     // can be interleaved).
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     // TODO: check if it is more appropriate to do it in platform specific policy manager
 
     // Audio HAL mode conversion for call redirect modes
@@ -242,7 +242,7 @@
 }
 
 Status AudioPolicyService::getPhoneState(AudioMode* _aidl_return) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_mode_t_AudioMode(mPhoneState));
     return Status::ok();
 }
@@ -270,7 +270,7 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
     ALOGV("setForceUse()");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     mAudioPolicyManager->setForceUse(usage, config);
     onCheckSpatializer_l();
@@ -312,7 +312,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
     ALOGV("getOutput()");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(mAudioPolicyManager->getOutput(stream)));
@@ -352,7 +352,7 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr, attributionSource)));
 
     ALOGV("%s()", __func__);
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
         aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
@@ -458,7 +458,7 @@
                                                      sp<AudioPolicyEffects>& effects,
                                                      const char *context)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     const ssize_t index = mAudioPlaybackClients.indexOfKey(portId);
     if (index < 0) {
         ALOGE("%s AudioTrack client not found for portId %d", context, portId);
@@ -489,7 +489,7 @@
             ALOGW("Failed to add effects on session %d", client->session);
         }
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->startOutput(portId);
     if (status == NO_ERROR) {
@@ -531,7 +531,7 @@
             ALOGW("Failed to release effects on session %d", client->session);
         }
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->stopOutput(portId);
     if (status == NO_ERROR) {
@@ -567,7 +567,7 @@
         audioPolicyEffects->releaseOutputSessionEffects(
             client->io, client->stream, client->session);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (client != nullptr && client->active) {
         onUpdateActiveSpatializerTracks_l();
     }
@@ -692,7 +692,7 @@
         status_t status;
         AudioPolicyInterface::input_type_t inputType;
 
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         {
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
@@ -795,7 +795,7 @@
     }
     sp<AudioRecordClient> client;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
 
         ssize_t index = mAudioRecordClients.indexOfKey(portId);
         if (index < 0) {
@@ -819,7 +819,7 @@
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     ALOGW_IF(client->silenced, "startInput on silenced input for port %d, uid %d. Unsilencing.",
             portIdAidl,
@@ -939,7 +939,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     ssize_t index = mAudioRecordClients.indexOfKey(portId);
     if (index < 0) {
@@ -969,7 +969,7 @@
     sp<AudioPolicyEffects>audioPolicyEffects;
     sp<AudioRecordClient> client;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         audioPolicyEffects = mAudioPolicyEffects;
         ssize_t index = mAudioRecordClients.indexOfKey(portId);
         if (index < 0) {
@@ -997,7 +997,7 @@
         }
     }
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         AutoCallerClear acc;
         mAudioPolicyManager->releaseInput(portId);
     }
@@ -1021,7 +1021,7 @@
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     mAudioPolicyManager->initStreamVolume(stream, indexMin, indexMax);
     return binderStatusFromStatusT(NO_ERROR);
@@ -1045,7 +1045,7 @@
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->setStreamVolumeIndex(stream,
                                                                              index,
@@ -1067,7 +1067,7 @@
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getStreamVolumeIndex(stream, &index, device)));
@@ -1092,7 +1092,7 @@
     if (!settingsAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(
             mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, device));
@@ -1112,7 +1112,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getVolumeIndexForAttributes(attributes, index, device)));
@@ -1131,7 +1131,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getMinVolumeIndexForAttributes(attributes, index)));
@@ -1150,7 +1150,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getMaxVolumeIndexForAttributes(attributes, index)));
@@ -1192,7 +1192,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDevicesForAttributes(aa, &devices, forVolume)));
@@ -1212,7 +1212,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(mAudioPolicyManager->getOutputForEffect(&desc)));
@@ -1237,7 +1237,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(
             mAudioPolicyManager->registerEffect(&desc, io, strategy, session, id));
@@ -1249,7 +1249,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->unregisterEffect(id));
 }
@@ -1260,7 +1260,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->setEffectEnabled(id, enabled));
 }
@@ -1279,7 +1279,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->moveEffectsToIo(ids, io));
 }
@@ -1297,7 +1297,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isStreamActive(stream, inPastMs);
     return Status::ok();
@@ -1317,7 +1317,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isStreamActiveRemotely(stream, inPastMs);
     return Status::ok();
@@ -1329,7 +1329,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isSourceActive(source);
     return Status::ok();
@@ -1341,7 +1341,7 @@
         return NO_INIT;
     }
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         audioPolicyEffects = mAudioPolicyEffects;
     }
     if (audioPolicyEffects == 0) {
@@ -1465,7 +1465,7 @@
             convertRange(systemUsagesAidl.begin(), systemUsagesAidl.begin() + size,
                          std::back_inserter(systemUsages), aidl2legacy_AudioUsage_audio_usage_t)));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1485,7 +1485,7 @@
     audio_flags_mask_t capturePolicy = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_flags_mask_t_mask(capturePolicyAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         ALOGV("%s() mAudioPolicyManager == NULL", __func__);
         return binderStatusFromStatusT(NO_INIT);
@@ -1502,7 +1502,7 @@
         ALOGV("mAudioPolicyManager == NULL");
         return binderStatusFromStatusT(AUDIO_OFFLOAD_NOT_SUPPORTED);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_offload_mode_t_AudioOffloadMode(
             mAudioPolicyManager->getOffloadSupport(info)));
@@ -1527,7 +1527,7 @@
 
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
     return Status::ok();
 }
@@ -1563,7 +1563,7 @@
     std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
     unsigned int generation;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1591,7 +1591,7 @@
 
 Status AudioPolicyService::listDeclaredDevicePorts(media::AudioPortRole role,
                                                     std::vector<media::AudioPortFw>* _aidl_return) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1603,7 +1603,7 @@
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1630,7 +1630,7 @@
             aidl2legacy_int32_t_audio_port_handle_t(handleAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1649,7 +1649,7 @@
 {
     audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1674,7 +1674,7 @@
     std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
     unsigned int generation;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1712,7 +1712,7 @@
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1730,7 +1730,7 @@
     audio_devices_t device;
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         if (mAudioPolicyManager == NULL) {
             return binderStatusFromStatusT(NO_INIT);
         }
@@ -1752,7 +1752,7 @@
 {
     audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1771,7 +1771,7 @@
             convertRange(mixesAidl.begin(), mixesAidl.begin() + size, std::back_inserter(mixes),
                          aidl2legacy_AudioMix)));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
 
     // loopback|render only need a MediaProjection (checked in caller AudioService.java)
     bool needModifyAudioRouting = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
@@ -1812,9 +1812,26 @@
     }
 }
 
+Status
+AudioPolicyService::getRegisteredPolicyMixes(std::vector<::android::media::AudioMix>* mixesAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    std::vector<AudioMix> mixes;
+    int status = mAudioPolicyManager->getRegisteredPolicyMixes(mixes);
+
+    for (const auto& mix : mixes) {
+        media::AudioMix aidlMix = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_AudioMix(mix));
+        mixesAidl->push_back(aidlMix);
+    }
+
+    return binderStatusFromStatusT(status);
+}
+
 Status AudioPolicyService::updatePolicyMixes(
         const ::std::vector<::android::media::AudioMixUpdate>& updates) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     for (const auto& update : updates) {
         AudioMix mix = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_AudioMix(update.audioMix));
         std::vector<AudioMixMatchCriterion> newCriteria =
@@ -1836,7 +1853,7 @@
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1850,7 +1867,7 @@
 Status AudioPolicyService::removeUidDeviceAffinities(int32_t uidAidl) {
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1869,7 +1886,7 @@
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1883,7 +1900,7 @@
 Status AudioPolicyService::removeUserIdDeviceAffinities(int32_t userIdAidl) {
     int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if(!modifyAudioRoutingAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -1907,7 +1924,7 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "68953950")));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1928,7 +1945,7 @@
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1944,7 +1961,7 @@
     if (!settingsAllowed()) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->setMasterMono(mono));
 }
@@ -1954,7 +1971,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->getMasterMono(_aidl_return));
 }
@@ -1972,7 +1989,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->getStreamVolumeDB(stream, index, device);
     return Status::ok();
@@ -1993,7 +2010,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getSurroundFormats(&numSurroundFormats, surroundFormats.get(),
@@ -2024,7 +2041,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getReportedSurroundFormats(
@@ -2046,7 +2063,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
@@ -2066,7 +2083,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(
             mAudioPolicyManager->setSurroundFormatEnabled(audioFormat, enabled));
@@ -2089,7 +2106,7 @@
     std::vector<uid_t> uids;
     RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(uidsAidl, uids));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mUidPolicy->setAssistantUids(uids);
     return Status::ok();
 }
@@ -2099,7 +2116,7 @@
     std::vector<uid_t> activeUids;
     RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(activeUidsAidl, activeUids));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mUidPolicy->setActiveAssistantUids(activeUids);
     return Status::ok();
 }
@@ -2109,7 +2126,7 @@
     std::vector<uid_t> uids;
     RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(uidsAidl, uids));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mUidPolicy->setA11yUids(uids);
     return Status::ok();
 }
@@ -2117,7 +2134,7 @@
 Status AudioPolicyService::setCurrentImeUid(int32_t uidAidl)
 {
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mUidPolicy->setCurrentImeUid(uid);
     return Status::ok();
 }
@@ -2127,7 +2144,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isHapticPlaybackSupported();
     return Status::ok();
@@ -2138,7 +2155,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isUltrasoundSupported();
     return Status::ok();
@@ -2149,7 +2166,7 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isHotwordStreamSupported(lookbackAudio);
     return Status::ok();
@@ -2162,7 +2179,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(mAudioPolicyManager->listAudioProductStrategies(strategies)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2182,7 +2199,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getProductStrategyFromAudioAttributes(
                     aa, productStrategy, fallbackOnDefault)));
@@ -2197,7 +2214,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(mAudioPolicyManager->listAudioVolumeGroups(groups)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2216,7 +2233,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(
                     mAudioPolicyManager->getVolumeGroupFromAudioAttributes(
@@ -2227,7 +2244,7 @@
 
 Status AudioPolicyService::setRttEnabled(bool enabled)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mUidPolicy->setRttEnabled(enabled);
     return Status::ok();
 }
@@ -2237,7 +2254,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     *_aidl_return = mAudioPolicyManager->isCallScreenModeSupported();
     return Status::ok();
@@ -2258,7 +2275,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     status_t status = mAudioPolicyManager->setDevicesRoleForStrategy(strategy, role, devices);
     if (status == NO_ERROR) {
        onCheckSpatializer_l();
@@ -2281,7 +2298,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     status_t status = mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role, devices);
     if (status == NO_ERROR) {
        onCheckSpatializer_l();
@@ -2298,7 +2315,7 @@
    if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     status_t status = mAudioPolicyManager->clearDevicesRoleForStrategy(strategy, role);
     if (status == NO_ERROR) {
        onCheckSpatializer_l();
@@ -2319,7 +2336,7 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDevicesForRoleAndStrategy(strategy, role, devices)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2349,7 +2366,7 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->setDevicesRoleForCapturePreset(audioSource, role, devices));
 }
@@ -2369,7 +2386,7 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->addDevicesRoleForCapturePreset(audioSource, role, devices));
 }
@@ -2389,7 +2406,7 @@
    if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
 }
@@ -2404,7 +2421,7 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->clearDevicesRoleForCapturePreset(audioSource, role));
 }
@@ -2422,7 +2439,7 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2469,7 +2486,7 @@
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     *_aidl_return = mAudioPolicyManager->canBeSpatialized(&attr, &config, devices);
     return Status::ok();
 }
@@ -2488,7 +2505,7 @@
             aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     *_aidl_return = static_cast<media::AudioDirectMode>(
             VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_direct_mode_t_int32_t_mask(
                     mAudioPolicyManager->getDirectPlaybackSupport(&attr, &config))));
@@ -2505,7 +2522,7 @@
             aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     AudioProfileVector audioProfiles;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDirectProfilesForAttributes(&attr, audioProfiles)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2525,7 +2542,7 @@
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
     std::vector<audio_mixer_attributes_t> mixerAttrs;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(mAudioPolicyManager->getSupportedMixerAttributes(
                     portId, mixerAttrs)));
@@ -2553,7 +2570,7 @@
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->setPreferredMixerAttributes(&attr, portId, uid, &mixerAttr));
 }
@@ -2571,7 +2588,7 @@
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     audio_mixer_attributes_t mixerAttr = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(mAudioPolicyManager->getPreferredMixerAttributes(
@@ -2595,7 +2612,7 @@
     audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     return binderStatusFromStatusT(
             mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
 }
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 5d3788d..e95147e 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -166,7 +166,8 @@
 BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
-
+BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+                                                     \
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
     using Code = int;
@@ -265,7 +266,7 @@
             .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
             .record(); });
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
 
         // start audio commands thread
         mAudioCommandThread = new AudioCommandThread(String8("ApmAudio"), this);
@@ -280,11 +281,11 @@
 
     // load audio processing modules
     const sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
-    sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects(effectsFactoryHal);
-    sp<UidPolicy> uidPolicy = new UidPolicy(this);
-    sp<SensorPrivacyPolicy> sensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+    auto audioPolicyEffects = sp<AudioPolicyEffects>::make(effectsFactoryHal);
+    auto uidPolicy = sp<UidPolicy>::make(this);
+    auto sensorPrivacyPolicy = sp<SensorPrivacyPolicy>::make(this);
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         mAudioPolicyEffects = audioPolicyEffects;
         mUidPolicy = uidPolicy;
         mSensorPrivacyPolicy = sensorPrivacyPolicy;
@@ -294,16 +295,16 @@
 
     // Create spatializer if supported
     if (mAudioPolicyManager != nullptr) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         const audio_attributes_t attr = attributes_initializer(AUDIO_USAGE_MEDIA);
         AudioDeviceTypeAddrVector devices;
         bool hasSpatializer = mAudioPolicyManager->canBeSpatialized(&attr, nullptr, devices);
         if (hasSpatializer) {
             // Unlock as Spatializer::create() will use the callback and acquire the
             // AudioPolicyService_Mutex.
-            mLock.unlock();
+            mMutex.unlock();
             mSpatializer = Spatializer::create(this, effectsFactoryHal);
-            mLock.lock();
+            mMutex.lock();
         }
         if (mSpatializer == nullptr) {
             // No spatializer created, signal the reason: NO_INIT a failure, OK means intended.
@@ -312,9 +313,16 @@
         }
     }
     AudioSystem::audioPolicyReady();
-    // AudioFlinger will handle effect creation and register these effects on audio_policy
-    // service. Hence, audio_policy service must be ready.
-    audioPolicyEffects->setDefaultDeviceEffects();
+}
+
+void AudioPolicyService::onAudioSystemReady() {
+    sp<AudioPolicyEffects> audioPolicyEffects;
+    {
+        audio_utils::lock_guard _l(mMutex);
+
+        audioPolicyEffects = mAudioPolicyEffects;
+    }
+    audioPolicyEffects->initDefaultDeviceEffects();
 }
 
 void AudioPolicyService::unloadAudioPolicyManager()
@@ -356,7 +364,7 @@
         ALOGW("%s got NULL client", __FUNCTION__);
         return Status::ok();
     }
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
 
     uid_t uid = IPCThreadState::self()->getCallingUid();
     pid_t pid = IPCThreadState::self()->getCallingPid();
@@ -379,7 +387,7 @@
 
 Status AudioPolicyService::setAudioPortCallbacksEnabled(bool enabled)
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
 
     uid_t uid = IPCThreadState::self()->getCallingUid();
     pid_t pid = IPCThreadState::self()->getCallingPid();
@@ -394,7 +402,7 @@
 
 Status AudioPolicyService::setAudioVolumeGroupCallbacksEnabled(bool enabled)
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
 
     uid_t uid = IPCThreadState::self()->getCallingUid();
     pid_t pid = IPCThreadState::self()->getCallingPid();
@@ -412,7 +420,7 @@
 {
     bool hasSameUid = false;
     {
-        Mutex::Autolock _l(mNotificationClientsLock);
+        audio_utils::lock_guard _l(mNotificationClientsMutex);
         int64_t token = ((int64_t)uid<<32) | pid;
         mNotificationClients.removeItem(token);
         for (size_t i = 0; i < mNotificationClients.size(); i++) {
@@ -423,7 +431,7 @@
         }
     }
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         if (mAudioPolicyManager && !hasSameUid) {
             // called from binder death notification: no need to clear caller identity
             mAudioPolicyManager->releaseResourcesForUid(uid);
@@ -438,7 +446,7 @@
 
 void AudioPolicyService::doOnAudioPortListUpdate()
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onAudioPortListUpdate();
     }
@@ -451,7 +459,7 @@
 
 void AudioPolicyService::doOnAudioPatchListUpdate()
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onAudioPatchListUpdate();
     }
@@ -464,7 +472,7 @@
 
 void AudioPolicyService::doOnAudioVolumeGroupChanged(volume_group_t group, int flags)
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onAudioVolumeGroupChanged(group, flags);
     }
@@ -479,7 +487,7 @@
 
 void AudioPolicyService::doOnDynamicPolicyMixStateUpdate(const String8& regId, int32_t state)
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onDynamicPolicyMixStateUpdate(regId, state);
     }
@@ -509,7 +517,7 @@
                                                   audio_patch_handle_t patchHandle,
                                                   audio_source_t source)
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onRecordingConfigurationUpdate(event, clientInfo,
                 clientConfig, clientEffects, deviceConfig, effects, patchHandle, source);
@@ -523,7 +531,7 @@
 
 void AudioPolicyService::doOnRoutingUpdated()
 {
-  Mutex::Autolock _l(mNotificationClientsLock);
+  audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onRoutingUpdated();
     }
@@ -536,7 +544,7 @@
 
 void AudioPolicyService::doOnVolumeRangeInitRequest()
 {
-    Mutex::Autolock _l(mNotificationClientsLock);
+    audio_utils::lock_guard _l(mNotificationClientsMutex);
     for (size_t i = 0; i < mNotificationClients.size(); i++) {
         mNotificationClients.valueAt(i)->onVolumeRangeInitRequest();
     }
@@ -544,7 +552,7 @@
 
 void AudioPolicyService::onCheckSpatializer()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     onCheckSpatializer_l();
 }
 
@@ -568,7 +576,7 @@
             const audio_attributes_t attr = attributes_initializer(AUDIO_USAGE_MEDIA);
             audio_config_base_t config = mSpatializer->getAudioInConfig();
 
-            Mutex::Autolock _l(mLock);
+            audio_utils::lock_guard _l(mMutex);
             status_t status =
                     mAudioPolicyManager->getSpatializerOutput(&config, &attr, &newOutput);
             ALOGV("%s currentOutput %d newOutput %d channel_mask %#x",
@@ -577,13 +585,13 @@
                 return;
             }
             size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
-            mLock.unlock();
+            mMutex.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
             if (status == NO_ERROR && newOutput != AUDIO_IO_HANDLE_NONE) {
                 status = mSpatializer->attachOutput(newOutput, numActiveTracks);
             }
-            mLock.lock();
+            mMutex.lock();
             if (status != NO_ERROR) {
                 mAudioPolicyManager->releaseSpatializerOutput(newOutput);
             }
@@ -592,7 +600,7 @@
             audio_io_handle_t output = mSpatializer->detachOutput();
 
             if (output != AUDIO_IO_HANDLE_NONE) {
-                Mutex::Autolock _l(mLock);
+                audio_utils::lock_guard _l(mMutex);
                 mAudioPolicyManager->releaseSpatializerOutput(output);
             }
         }
@@ -627,7 +635,7 @@
     audio_io_handle_t output = mSpatializer->getOutput();
     size_t activeClients;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         activeClients = countActiveClientsOnOutput_l(output);
     }
     mSpatializer->updateActiveTracks(activeClients);
@@ -783,12 +791,8 @@
             IPCThreadState::self()->getCallingPid());
 }
 
-static bool dumpTryLock(Mutex& mutex) ACQUIRE(mutex) NO_THREAD_SAFETY_ANALYSIS
-{
-    return mutex.timedLock(kDumpLockTimeoutNs) == NO_ERROR;
-}
-
-static void dumpReleaseLock(Mutex& mutex, bool locked) RELEASE(mutex) NO_THREAD_SAFETY_ANALYSIS
+static void dumpReleaseLock(audio_utils::mutex& mutex, bool locked)
+        RELEASE(mutex) NO_THREAD_SAFETY_ANALYSIS
 {
     if (locked) mutex.unlock();
 }
@@ -825,7 +829,7 @@
 
 void AudioPolicyService::updateUidStates()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     updateUidStates_l();
 }
 
@@ -1027,7 +1031,7 @@
         bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
             current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
 
-        auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
+        auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mMutex) {
             uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
                 recordClient->attributionSource.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
@@ -1205,11 +1209,12 @@
 }
 
 status_t AudioPolicyService::dump(int fd, const Vector<String16>& args __unused)
+NO_THREAD_SAFETY_ANALYSIS  // update for trylock.
 {
     if (!dumpAllowed()) {
         dumpPermissionDenial(fd);
     } else {
-        const bool locked = dumpTryLock(mLock);
+        const bool locked = mMutex.try_lock(kDumpLockTimeoutNs);
         if (!locked) {
             String8 result(kDeadlockedString);
             write(fd, result.c_str(), result.size());
@@ -1238,7 +1243,7 @@
 
         mPackageManager.dump(fd);
 
-        dumpReleaseLock(mLock, locked);
+        dumpReleaseLock(mMutex, locked);
 
         if (mSpatializer != nullptr) {
             std::string dumpString = mSpatializer->toString(1 /* level */);
@@ -1351,7 +1356,8 @@
         case TRANSACTION_getDevicesForRoleAndCapturePreset:
         case TRANSACTION_getSpatializer:
         case TRANSACTION_setPreferredMixerAttributes:
-        case TRANSACTION_clearPreferredMixerAttributes: {
+        case TRANSACTION_clearPreferredMixerAttributes:
+        case TRANSACTION_getRegisteredPolicyMixes: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1483,7 +1489,7 @@
 
     sp<UidPolicy> uidPolicy;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         uidPolicy = mUidPolicy;
     }
     if (uidPolicy) {
@@ -1512,7 +1518,7 @@
 
     sp<UidPolicy> uidPolicy;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         uidPolicy = mUidPolicy;
     }
     if (uidPolicy) {
@@ -1541,7 +1547,7 @@
 
     sp<UidPolicy> uidPolicy;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         uidPolicy = mUidPolicy;
     }
     if (uidPolicy) {
@@ -1579,7 +1585,7 @@
             ActivityManager::PROCESS_STATE_UNKNOWN,
             String16("audioserver"));
     if (!res) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         mObserverRegistered = true;
     } else {
         ALOGE("UidPolicy::registerSelf linkToDeath failed: %d", res);
@@ -1591,12 +1597,12 @@
 void AudioPolicyService::UidPolicy::unregisterSelf() {
     mAm.unlinkToDeath(this);
     mAm.unregisterUidObserver(this);
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mObserverRegistered = false;
 }
 
 void AudioPolicyService::UidPolicy::binderDied(__unused const wp<IBinder> &who) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     mCachedUids.clear();
     mObserverRegistered = false;
 }
@@ -1604,7 +1610,7 @@
 void AudioPolicyService::UidPolicy::checkRegistered() {
     bool needToReregister = false;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         needToReregister = !mObserverRegistered;
     }
     if (needToReregister) {
@@ -1617,7 +1623,7 @@
     if (isServiceUid(uid)) return true;
     checkRegistered();
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         auto overrideIter = mOverrideUids.find(uid);
         if (overrideIter != mOverrideUids.end()) {
             return overrideIter->second.first;
@@ -1632,7 +1638,7 @@
     ActivityManager am;
     bool active = am.isUidActive(uid, String16("audioserver"));
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         mCachedUids.insert(std::pair<uid_t,
                            std::pair<bool, int>>(uid, std::pair<bool, int>(active,
                                                       ActivityManager::PROCESS_STATE_UNKNOWN)));
@@ -1646,7 +1652,7 @@
     }
     checkRegistered();
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         auto overrideIter = mOverrideUids.find(uid);
         if (overrideIter != mOverrideUids.end()) {
             if (overrideIter->second.first) {
@@ -1681,7 +1687,7 @@
         state = am.getUidProcessState(uid, String16("audioserver"));
     }
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         mCachedUids.insert(std::pair<uid_t,
                            std::pair<bool, int>>(uid, std::pair<bool, int>(active, state)));
     }
@@ -1736,7 +1742,7 @@
     bool wasActive = isUidActive(uid);
     int previousState = getUidState(uid);
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         updateUidLocked(uids, uid, active, state, insert);
     }
     if (wasActive != isUidActive(uid) || state != previousState) {
@@ -1771,7 +1777,7 @@
 }
 
 bool AudioPolicyService::UidPolicy::isA11yOnTop() {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mMutex);
     for (const auto &uid : mCachedUids) {
         if (!isA11yUid(uid.first)) {
             continue;
@@ -1902,7 +1908,7 @@
 {
     nsecs_t waitTime = -1;
 
-    mLock.lock();
+    audio_utils::unique_lock ul(mMutex);
     while (!exitPending())
     {
         sp<AudioPolicyService> svc;
@@ -1923,27 +1929,27 @@
                     VolumeData *data = (VolumeData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set volume stream %d, \
                             volume %f, output %d", data->mStream, data->mVolume, data->mIO);
-                    mLock.unlock();
+                    ul.unlock();
                     command->mStatus = AudioSystem::setStreamVolume(data->mStream,
                                                                     data->mVolume,
                                                                     data->mIO);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case SET_PARAMETERS: {
                     ParametersData *data = (ParametersData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
                             data->mKeyValuePairs.c_str(), data->mIO);
-                    mLock.unlock();
+                    ul.unlock();
                     command->mStatus = AudioSystem::setParameters(data->mIO, data->mKeyValuePairs);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case SET_VOICE_VOLUME: {
                     VoiceVolumeData *data = (VoiceVolumeData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set voice volume volume %f",
                             data->mVolume);
-                    mLock.unlock();
+                    ul.unlock();
                     command->mStatus = AudioSystem::setVoiceVolume(data->mVolume);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case STOP_OUTPUT: {
                     StopOutputData *data = (StopOutputData *)command->mParam.get();
@@ -1953,9 +1959,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doStopOutput(data->mPortId);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case RELEASE_OUTPUT: {
                     ReleaseOutputData *data = (ReleaseOutputData *)command->mParam.get();
@@ -1965,9 +1971,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doReleaseOutput(data->mPortId);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case CREATE_AUDIO_PATCH: {
                     CreateAudioPatchData *data = (CreateAudioPatchData *)command->mParam.get();
@@ -1976,9 +1982,9 @@
                     if (af == 0) {
                         command->mStatus = PERMISSION_DENIED;
                     } else {
-                        mLock.unlock();
+                        ul.unlock();
                         command->mStatus = af->createAudioPatch(&data->mPatch, &data->mHandle);
-                        mLock.lock();
+                        ul.lock();
                     }
                     } break;
                 case RELEASE_AUDIO_PATCH: {
@@ -1988,9 +1994,9 @@
                     if (af == 0) {
                         command->mStatus = PERMISSION_DENIED;
                     } else {
-                        mLock.unlock();
+                        ul.unlock();
                         command->mStatus = af->releaseAudioPatch(data->mHandle);
-                        mLock.lock();
+                        ul.lock();
                     }
                     } break;
                 case UPDATE_AUDIOPORT_LIST: {
@@ -1999,9 +2005,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnAudioPortListUpdate();
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case UPDATE_AUDIOPATCH_LIST: {
                     ALOGV("AudioCommandThread() processing update audio patch list");
@@ -2009,9 +2015,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnAudioPatchListUpdate();
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case CHANGED_AUDIOVOLUMEGROUP: {
                     AudioVolumeGroupData *data =
@@ -2021,9 +2027,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnAudioVolumeGroupChanged(data->mGroup, data->mFlags);
-                    mLock.lock();
+                    ul.lock();
                     }break;
                 case SET_AUDIOPORT_CONFIG: {
                     SetAudioPortConfigData *data = (SetAudioPortConfigData *)command->mParam.get();
@@ -2032,9 +2038,9 @@
                     if (af == 0) {
                         command->mStatus = PERMISSION_DENIED;
                     } else {
-                        mLock.unlock();
+                        ul.unlock();
                         command->mStatus = af->setAudioPortConfig(&data->mConfig);
-                        mLock.lock();
+                        ul.lock();
                     }
                     } break;
                 case DYN_POLICY_MIX_STATE_UPDATE: {
@@ -2046,9 +2052,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnDynamicPolicyMixStateUpdate(data->mRegId, data->mState);
-                    mLock.lock();
+                    ul.lock();
                     } break;
                 case RECORDING_CONFIGURATION_UPDATE: {
                     RecordingConfigurationUpdateData *data =
@@ -2058,21 +2064,21 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnRecordingConfigurationUpdate(data->mEvent, &data->mClientInfo,
                             &data->mClientConfig, data->mClientEffects,
                             &data->mDeviceConfig, data->mEffects,
                             data->mPatchHandle, data->mSource);
-                    mLock.lock();
+                    ul.lock();
                     } break;
                 case SET_EFFECT_SUSPENDED: {
                     SetEffectSuspendedData *data = (SetEffectSuspendedData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set effect suspended");
                     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
                     if (af != 0) {
-                        mLock.unlock();
+                        ul.unlock();
                         af->setEffectSuspended(data->mEffectId, data->mSessionId, data->mSuspended);
-                        mLock.lock();
+                        ul.lock();
                     }
                     } break;
                 case AUDIO_MODULES_UPDATE: {
@@ -2081,9 +2087,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnNewAudioModulesAvailable();
-                    mLock.lock();
+                    ul.lock();
                     } break;
                 case ROUTING_UPDATED: {
                     ALOGV("AudioCommandThread() processing routing update");
@@ -2091,9 +2097,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnRoutingUpdated();
-                    mLock.lock();
+                    ul.lock();
                     } break;
 
                 case UPDATE_UID_STATES: {
@@ -2102,9 +2108,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->updateUidStates();
-                    mLock.lock();
+                    ul.lock();
                     } break;
 
                 case CHECK_SPATIALIZER_OUTPUT: {
@@ -2113,9 +2119,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnCheckSpatializer();
-                    mLock.lock();
+                    ul.lock();
                     } break;
 
                 case UPDATE_ACTIVE_SPATIALIZER_TRACKS: {
@@ -2124,9 +2130,9 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnUpdateActiveSpatializerTracks();
-                    mLock.lock();
+                    ul.lock();
                     } break;
 
                 case VOL_RANGE_INIT_REQUEST: {
@@ -2135,28 +2141,28 @@
                     if (svc == 0) {
                         break;
                     }
-                    mLock.unlock();
+                    ul.unlock();
                     svc->doOnVolumeRangeInitRequest();
-                    mLock.lock();
+                    ul.lock();
                     } break;
 
                 default:
                     ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
                 }
                 {
-                    Mutex::Autolock _l(command->mLock);
+                    audio_utils::lock_guard _l(command->mMutex);
                     if (command->mWaitStatus) {
                         command->mWaitStatus = false;
-                        command->mCond.signal();
+                        command->mCond.notify_one();
                     }
                 }
                 waitTime = -1;
-                // release mLock before releasing strong reference on the service as
+                // release ul before releasing strong reference on the service as
                 // AudioPolicyService destructor calls AudioCommandThread::exit() which
-                // acquires mLock.
-                mLock.unlock();
+                // acquires ul.
+                ul.unlock();
                 svc.clear();
-                mLock.lock();
+                ul.lock();
             } else {
                 waitTime = mAudioCommands[0]->mTime - curTime;
                 break;
@@ -2174,9 +2180,10 @@
         if (!exitPending()) {
             ALOGV("AudioCommandThread() going to sleep");
             if (waitTime == -1) {
-                mWaitWorkCV.wait(mLock);
+                mWaitWorkCV.wait(ul);
             } else {
-                mWaitWorkCV.waitRelative(mLock, waitTime);
+                // discard return value.
+                mWaitWorkCV.wait_for(ul, std::chrono::nanoseconds(waitTime));
             }
         }
     }
@@ -2184,17 +2191,17 @@
     if (!mAudioCommands.isEmpty()) {
         release_wake_lock(mName.c_str());
     }
-    mLock.unlock();
     return false;
 }
 
 status_t AudioPolicyService::AudioCommandThread::dump(int fd)
+NO_THREAD_SAFETY_ANALYSIS  // trylock
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
     String8 result;
 
-    const bool locked = dumpTryLock(mLock);
+    const bool locked = mMutex.try_lock(kDumpLockTimeoutNs);
     if (!locked) {
         String8 result2(kCmdDeadlockedString);
         write(fd, result2.c_str(), result2.size());
@@ -2217,7 +2224,7 @@
 
     write(fd, result.c_str(), result.size());
 
-    dumpReleaseLock(mLock, locked);
+    dumpReleaseLock(mMutex, locked);
 
     return NO_ERROR;
 }
@@ -2475,14 +2482,15 @@
 status_t AudioPolicyService::AudioCommandThread::sendCommand(sp<AudioCommand>& command, int delayMs)
 {
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         insertCommand_l(command, delayMs);
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
-    Mutex::Autolock _l(command->mLock);
+    audio_utils::unique_lock ul(command->mMutex);
     while (command->mWaitStatus) {
         nsecs_t timeOutNs = kAudioCommandTimeoutNs + milliseconds(delayMs);
-        if (command->mCond.waitRelative(command->mLock, timeOutNs) != NO_ERROR) {
+        if (command->mCond.wait_for(
+                ul, std::chrono::nanoseconds(timeOutNs), getTid()) == std::cv_status::timeout) {
             command->mStatus = TIMED_OUT;
             command->mWaitStatus = false;
         }
@@ -2490,7 +2498,7 @@
     return command->mStatus;
 }
 
-// insertCommand_l() must be called with mLock held
+// insertCommand_l() must be called with mMutex held
 void AudioPolicyService::AudioCommandThread::insertCommand_l(sp<AudioCommand>& command, int delayMs)
 {
     ssize_t i;  // not size_t because i will count down to -1
@@ -2678,9 +2686,9 @@
 {
     ALOGV("AudioCommandThread::exit");
     {
-        AutoMutex _l(mLock);
+        audio_utils::lock_guard _l(mMutex);
         requestExit();
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
     // Note that we can call it from the thread loop if all other references have been released
     // but it will safely return WOULD_BLOCK in this case
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index aaf0b1b..7aa80cf 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -20,6 +20,7 @@
 #include <android/media/BnAudioPolicyService.h>
 #include <android/media/GetSpatializerResponse.h>
 #include <android-base/thread_annotations.h>
+#include <audio_utils/mutex.h>
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
 #include <cutils/compiler.h>
@@ -310,6 +311,8 @@
     binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
                                                  int32_t portId,
                                                  int32_t uid) override;
+    binder::Status getRegisteredPolicyMixes(
+            std::vector <::android::media::AudioMix>* mixes) override;
 
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
@@ -319,6 +322,9 @@
     // RefBase
     virtual     void        onFirstRef();
 
+    // Commence initialization when AudioSystem is ready.
+    void onAudioSystemReady();
+
     //
     // Helpers for the struct audio_policy_service_ops implementation.
     // This is used by the audio policy manager for certain operations that
@@ -387,10 +393,10 @@
      * by audio policy manager and attach/detach the spatializer effect accordingly.
      */
     void onCheckSpatializer() override;
-    void onCheckSpatializer_l() REQUIRES(mLock);
+    void onCheckSpatializer_l() REQUIRES(mMutex);
     void doOnCheckSpatializer();
 
-    void onUpdateActiveSpatializerTracks_l() REQUIRES(mLock);
+    void onUpdateActiveSpatializerTracks_l() REQUIRES(mMutex);
     void doOnUpdateActiveSpatializerTracks();
 
 
@@ -402,14 +408,14 @@
                         AudioPolicyService() ANDROID_API;
     virtual             ~AudioPolicyService();
 
-            status_t dumpInternals(int fd) REQUIRES(mLock);
+    status_t dumpInternals(int fd) REQUIRES(mMutex);
 
     // Handles binder shell commands
     virtual status_t shellCommand(int in, int out, int err, Vector<String16>& args);
 
 
     // Sets whether the given UID records only silence
-    virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mLock);
+    virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mMutex);
 
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(Vector<String16>& args, int err);
@@ -435,9 +441,9 @@
                            const AttributionSourceState& attributionSource);
 
     void updateUidStates();
-    void updateUidStates_l() REQUIRES(mLock);
+    void updateUidStates_l() REQUIRES(mMutex);
 
-    void silenceAllRecordings_l() REQUIRES(mLock);
+    void silenceAllRecordings_l() REQUIRES(mMutex);
 
     static bool isVirtualSource(audio_source_t source);
 
@@ -510,11 +516,11 @@
         void checkRegistered();
 
         wp<AudioPolicyService> mService;
-        Mutex mLock;
+        audio_utils::mutex mMutex{audio_utils::MutexOrder::kUidPolicy_Mutex};
         ActivityManager mAm;
         bool mObserverRegistered = false;
-        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mLock);
-        std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mLock);
+        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mMutex);
+        std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mMutex);
         std::vector<uid_t> mAssistantUids;
         std::vector<uid_t> mActiveAssistantUids;
         std::vector<uid_t> mA11yUids;
@@ -539,6 +545,10 @@
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
 
+            binder::Status onSensorPrivacyStateChanged(int, int, int) {
+                return binder::Status::ok();
+            }
+
         private:
             wp<AudioPolicyService> mService;
             std::atomic_bool mSensorPrivacyEnabled = false;
@@ -641,8 +651,8 @@
 
             int mCommand;   // SET_VOLUME, SET_PARAMETERS...
             nsecs_t mTime;  // time stamp
-            Mutex mLock;    // mutex associated to mCond
-            Condition mCond; // condition for status return
+            audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioCommand_Mutex};
+            audio_utils::condition_variable mCond; // condition for status return
             status_t mStatus; // command status
             bool mWaitStatus; // true if caller is waiting for status
             sp<AudioCommandData> mParam;     // command specific parameter data
@@ -730,8 +740,8 @@
             bool mSuspended;
         };
 
-        Mutex   mLock;
-        Condition mWaitWorkCV;
+        mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kCommandThread_Mutex};
+        audio_utils::condition_variable mWaitWorkCV;
         Vector < sp<AudioCommand> > mAudioCommands; // list of pending commands
         sp<AudioCommand> mLastCommand;      // last processed command (used by dump)
         String8 mName;                      // string used by wake lock fo delayed commands
@@ -996,12 +1006,12 @@
      * @return the number of active tracks.
      */
     size_t countActiveClientsOnOutput_l(
-        audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mLock);
+            audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mMutex);
 
-    mutable Mutex mLock;    // prevents concurrent access to AudioPolicy manager functions changing
-                            // device connection state  or routing
-    // Note: lock acquisition order is always mLock > mEffectsLock:
-    // mLock protects AudioPolicyManager methods that can call into audio flinger
+    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyService_Mutex};
+    // prevents concurrent access to AudioPolicy manager functions changing
+    // device connection state or routing.
+    // mMutex protects AudioPolicyManager methods that can call into audio flinger
     // and possibly back in to audio policy service and acquire mEffectsLock.
     sp<AudioCommandThread> mAudioCommandThread;     // audio commands thread
     sp<AudioCommandThread> mOutputCommandThread;    // process stop and release output
@@ -1009,29 +1019,30 @@
     AudioPolicyClient *mAudioPolicyClient;
     std::vector<audio_usage_t> mSupportedSystemUsages;
 
-    Mutex mNotificationClientsLock;
+    mutable audio_utils::mutex mNotificationClientsMutex{
+            audio_utils::MutexOrder::kAudioPolicyService_NotificationClientsMutex};
     DefaultKeyedVector<int64_t, sp<NotificationClient>> mNotificationClients
-        GUARDED_BY(mNotificationClientsLock);
+            GUARDED_BY(mNotificationClientsMutex);
     // Manage all effects configured in audio_effects.conf
-    // never hold AudioPolicyService::mLock when calling AudioPolicyEffects methods as
+    // never hold AudioPolicyService::mMutex when calling AudioPolicyEffects methods as
     // those can call back into AudioPolicyService methods and try to acquire the mutex
-    sp<AudioPolicyEffects> mAudioPolicyEffects GUARDED_BY(mLock);
-    audio_mode_t mPhoneState GUARDED_BY(mLock);
-    uid_t mPhoneStateOwnerUid GUARDED_BY(mLock);
+    sp<AudioPolicyEffects> mAudioPolicyEffects GUARDED_BY(mMutex);
+    audio_mode_t mPhoneState GUARDED_BY(mMutex);
+    uid_t mPhoneStateOwnerUid GUARDED_BY(mMutex);
 
-    sp<UidPolicy> mUidPolicy GUARDED_BY(mLock);
-    sp<SensorPrivacyPolicy> mSensorPrivacyPolicy GUARDED_BY(mLock);
+    sp<UidPolicy> mUidPolicy GUARDED_BY(mMutex);
+    sp<SensorPrivacyPolicy> mSensorPrivacyPolicy GUARDED_BY(mMutex);
 
     DefaultKeyedVector<audio_port_handle_t, sp<AudioRecordClient>> mAudioRecordClients
-        GUARDED_BY(mLock);
+            GUARDED_BY(mMutex);
     DefaultKeyedVector<audio_port_handle_t, sp<AudioPlaybackClient>> mAudioPlaybackClients
-        GUARDED_BY(mLock);
+            GUARDED_BY(mMutex);
 
     MediaPackageManager mPackageManager; // To check allowPlaybackCapture
 
     CaptureStateNotifier mCaptureStateNotifier;
 
-    // created in onFirstRef() and never cleared: does not need to be guarded by mLock
+    // created in onFirstRef() and never cleared: does not need to be guarded by mMutex
     sp<Spatializer> mSpatializer;
 
     void *mLibraryHandle = nullptr;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 90418a5..16f3a9a 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
 #include <audio_utils/fixedfft.h>
 #include <com_android_media_audio.h>
 #include <cutils/bitops.h>
+#include <cutils/properties.h>
 #include <hardware/sensors.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -49,12 +50,12 @@
 using aidl_utils::statusTFromBinderStatus;
 using android::content::AttributionSourceState;
 using binder::Status;
+using internal::ToString;
 using media::HeadTrackingMode;
 using media::Pose3f;
 using media::SensorPoseProvider;
 using media::audio::common::HeadTracking;
 using media::audio::common::Spatialization;
-using ::android::internal::ToString;
 
 using namespace std::chrono_literals;
 
@@ -229,7 +230,7 @@
         return;
     }
     auto latencyModesStrs = android::sysprop::BluetoothProperties::dsa_transport_preference();
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     // First load preferred low latency modes ordered from the property
     for (auto str : latencyModesStrs) {
         if (!str.has_value()) continue;
@@ -348,7 +349,8 @@
     bool activeLevelFound = false;
     for (const auto spatializationLevel : spatializationLevels) {
         if (!aidl_utils::isValidEnum(spatializationLevel)) {
-            ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+            ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+                  ToString(spatializationLevel).c_str());
             continue;
         }
         if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +377,8 @@
 
     for (const auto spatializationMode : spatializationModes) {
         if (!aidl_utils::isValidEnum(spatializationMode)) {
-            ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+            ALOGW("%s: ignoring spatializationMode:%s", __func__,
+                  ToString(spatializationMode).c_str());
             continue;
         }
         // we don't detect duplicates.
@@ -394,7 +397,13 @@
         return status;
     }
     for (const auto channelMask : channelMasks) {
-        if (!audio_is_channel_mask_spatialized(channelMask)) {
+        static const bool stereo_spatialization_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+        const bool channel_mask_spatialized =
+                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
+                ? audio_channel_mask_contains_stereo(channelMask)
+                : audio_is_channel_mask_spatialized(channelMask);
+        if (!channel_mask_spatialized) {
             ALOGW("%s: ignoring channelMask:%#x", __func__, channelMask);
             continue;
         }
@@ -406,27 +415,26 @@
         return BAD_VALUE;
     }
 
-    //TODO b/273373363: use AIDL enum when available
     if (com::android::media::audio::dsa_over_bt_le_audio()
             && mSupportsHeadTracking) {
-        mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
-        std::vector<uint8_t> headtrackingConnectionModes;
+        mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+        std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
         status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
                 &headtrackingConnectionModes);
         if (status == NO_ERROR) {
             for (const auto htConnectionMode : headtrackingConnectionModes) {
-                if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
-                        htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
-                    ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+                if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+                    htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+                    ALOGW("%s: ignoring HT connection mode:%s", __func__,
+                          ToString(htConnectionMode).c_str());
                     continue;
                 }
-                mSupportedHeadtrackingConnectionModes.insert(
-                        static_cast<headtracking_connection_t> (htConnectionMode));
+                mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
             }
             ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
-                    HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
-                        == mSupportedHeadtrackingConnectionModes.end(),
-                    "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+                    HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+                        mSupportedHeadtrackingConnectionModes.end(),
+                    "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
         }
     }
 
@@ -461,7 +469,7 @@
 
 /** Gets the channel mask, sampling rate and format set for the spatializer input. */
 audio_config_base_t Spatializer::getAudioInConfig() const {
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     // For now use highest supported channel count
     config.channel_mask = getMaxChannelMask(mChannelMasks, FCC_LIMIT);
@@ -470,7 +478,7 @@
 
 status_t Spatializer::registerCallback(
         const sp<media::INativeSpatializerCallback>& callback) {
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (callback == nullptr) {
         return BAD_VALUE;
     }
@@ -498,7 +506,7 @@
 // IBinder::DeathRecipient
 void Spatializer::binderDied(__unused const wp<IBinder> &who) {
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         mLevel = Spatialization::Level::NONE;
         mSpatializerCallback.clear();
     }
@@ -527,7 +535,7 @@
     sp<media::INativeSpatializerCallback> callback;
     bool levelChanged = false;
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         levelChanged = mLevel != level;
         mLevel = level;
         callback = mSpatializerCallback;
@@ -551,25 +559,25 @@
     if (level == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     *level = mLevel;
-    ALOGV("%s level %d", __func__, (int)*level);
+    ALOGV("%s level %s", __func__, ToString(*level).c_str());
     return Status::ok();
 }
 
 Status Spatializer::isHeadTrackingSupported(bool *supports) {
-    ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+    ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
     if (supports == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     *supports = mSupportsHeadTracking;
     return Status::ok();
 }
 
 Status Spatializer::getSupportedHeadTrackingModes(
         std::vector<HeadTracking::Mode>* modes) {
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     ALOGV("%s", __func__);
     if (modes == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
@@ -585,7 +593,7 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
     mLocalLog.log("%s with %s", __func__, ToString(mode).c_str());
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     switch (mode) {
         case HeadTracking::Mode::OTHER:
             return binderStatusFromStatusT(BAD_VALUE);
@@ -610,7 +618,7 @@
     if (mode == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     *mode = mActualHeadTrackingMode;
     ALOGV("%s mode %d", __func__, (int)*mode);
     return Status::ok();
@@ -620,7 +628,7 @@
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mPoseController != nullptr) {
         mPoseController->recenter();
     }
@@ -637,7 +645,7 @@
         ALOGW("Invalid screenToStage vector.");
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mPoseController != nullptr) {
         mLocalLog.log("%s with screenToStage %s", __func__,
                 media::VectorRecorder::toString<float>(screenToStage).c_str());
@@ -650,7 +658,7 @@
     ALOGV("%s", __func__);
     bool levelChanged = false;
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         if (mSpatializerCallback == nullptr) {
             return binderStatusFromStatusT(INVALID_OPERATION);
         }
@@ -674,7 +682,7 @@
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mHeadSensor != sensorHandle) {
         mLocalLog.log("%s with 0x%08x", __func__, sensorHandle);
         mHeadSensor = sensorHandle;
@@ -689,7 +697,7 @@
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mScreenSensor != sensorHandle) {
         mLocalLog.log("%s with 0x%08x", __func__, sensorHandle);
         mScreenSensor = sensorHandle;
@@ -708,7 +716,7 @@
     // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
     ALOGI_IF(angle != physicalToLogicalAngle,
             "%s: clamping %f to %f", __func__, physicalToLogicalAngle, angle);
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     mDisplayOrientation = angle;
     if (mPoseController != nullptr) {
         // This turns on the rate-limiter.
@@ -728,7 +736,7 @@
     // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
     ALOGI_IF(angle != hingeAngle,
             "%s: clamping %f to %f", __func__, hingeAngle, angle);
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     mHingeAngle = angle;
     if (mEngine != nullptr) {
         setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{angle});
@@ -739,7 +747,7 @@
 Status Spatializer::setFoldState(bool folded) {
     ALOGV("%s foldState %d", __func__, (int)folded);
     mLocalLog.log("%s with %d", __func__, (int)folded);
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     mFoldedState = folded;
     if (mEngine != nullptr) {
         // we don't suppress multiple calls with the same folded state - that's
@@ -761,7 +769,7 @@
 Status Spatializer::registerHeadTrackingCallback(
         const sp<media::ISpatializerHeadTrackingCallback>& callback) {
     ALOGV("%s callback %p", __func__, callback.get());
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
@@ -771,7 +779,7 @@
 
 Status Spatializer::setParameter(int key, const std::vector<unsigned char>& value) {
     ALOGV("%s key %d", __func__, key);
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     status_t status = INVALID_OPERATION;
     if (mEngine != nullptr) {
         status = setEffectParameter_l(key, value);
@@ -785,7 +793,7 @@
     if (value == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     status_t status = INVALID_OPERATION;
     if (mEngine != nullptr) {
         ALOGV("%s key %d mEngine %p", __func__, key, mEngine.get());
@@ -799,7 +807,7 @@
     if (output == nullptr) {
         binderStatusFromStatusT(BAD_VALUE);
     }
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     *output = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_io_handle_t_int32_t(mOutput));
     ALOGV("%s got output %d", __func__, *output);
     return Status::ok();
@@ -837,7 +845,7 @@
     ALOGV("%s", __func__);
     sp<media::ISpatializerHeadTrackingCallback> callback;
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         callback = mHeadTrackingCallback;
         if (mEngine != nullptr) {
             setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
@@ -853,7 +861,7 @@
 }
 
 void Spatializer::onActualModeChange(HeadTrackingMode mode) {
-    std::string modeStr = media::toString(mode);
+    std::string modeStr = ToString(mode);
     ALOGV("%s(%s)", __func__, modeStr.c_str());
     sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
     msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -861,11 +869,11 @@
 }
 
 void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
-    ALOGV("%s(%d)", __func__, (int) mode);
+    ALOGV("%s(%s)", __func__, ToString(mode).c_str());
     sp<media::ISpatializerHeadTrackingCallback> callback;
     HeadTracking::Mode spatializerMode;
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         if (!mSupportsHeadTracking) {
             spatializerMode = HeadTracking::Mode::DISABLED;
         } else {
@@ -880,7 +888,7 @@
                     spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
                     break;
                 default:
-                    LOG_ALWAYS_FATAL("Unknown mode: %d", mode);
+                    LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
             }
         }
         mActualHeadTrackingMode = spatializerMode;
@@ -894,7 +902,7 @@
             }
         }
         callback = mHeadTrackingCallback;
-        mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+        mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
     }
     if (callback != nullptr) {
         callback->onHeadTrackingModeChanged(spatializerMode);
@@ -932,7 +940,7 @@
     sp<media::INativeSpatializerCallback> callback;
 
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         ALOGV("%s output %d mOutput %d", __func__, (int)output, (int)mOutput);
         mLocalLog.log("%s with output %d tracks %zu (mOutput %d)", __func__, (int)output,
                       numActiveTracks, (int)mOutput);
@@ -998,7 +1006,7 @@
     sp<media::INativeSpatializerCallback> callback;
 
     {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         mLocalLog.log("%s with output %d tracks %zu", __func__, (int)mOutput, mNumActiveTracks);
         ALOGV("%s mOutput %d", __func__, (int)mOutput);
         if (mOutput == AUDIO_IO_HANDLE_NONE) {
@@ -1032,7 +1040,7 @@
 
 void Spatializer::onSupportedLatencyModesChangedMsg(
         audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes) {
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     ALOGV("%s output %d mOutput %d num modes %zu",
             __func__, (int)output, (int)mOutput, modes.size());
     if (output == mOutput) {
@@ -1043,7 +1051,7 @@
 }
 
 void Spatializer::updateActiveTracks(size_t numActiveTracks) {
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mNumActiveTracks != numActiveTracks) {
         mLocalLog.log("%s from %zu to %zu", __func__, mNumActiveTracks, numActiveTracks);
         mNumActiveTracks = numActiveTracks;
@@ -1052,24 +1060,23 @@
     }
 }
 
-//TODO b/273373363: use AIDL enum when available
 audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
     if (!com::android::media::audio::dsa_over_bt_le_audio()) {
         return AUDIO_LATENCY_MODE_LOW;
     }
     // mSupportedLatencyModes is ordered according to system preferences loaded in
     // mOrderedLowLatencyModes
-    mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+    mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
     audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
     if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
         if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
         } else if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
         } else {
             // if the engine does not support direct reading of IMU data, do not allow
             // DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1174,7 +1181,7 @@
 
 void Spatializer::calculateHeadPose() {
     ALOGV("%s", __func__);
-    std::lock_guard lock(mLock);
+    audio_utils::lock_guard lock(mMutex);
     if (mPoseController != nullptr) {
         mPoseController->calculateAsync();
     }
@@ -1193,7 +1200,7 @@
     bool needUnlock = false;
 
     prefixSpace += ' ';
-    if (!mLock.try_lock()) {
+    if (!mMutex.try_lock()) {
         // dumpsys even try_lock failed, information dump can be useful although may not accurate
         ss.append(prefixSpace).append("try_lock failed, dumpsys below maybe INACCURATE!\n");
     } else {
@@ -1213,7 +1220,7 @@
         base::StringAppendF(&ss, " %s", ToString(mode).c_str());
     }
     base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
-                        media::toString(mDesiredHeadTrackingMode).c_str(),
+                        ToString(mDesiredHeadTrackingMode).c_str(),
                         ToString(mActualHeadTrackingMode).c_str());
 
     base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
@@ -1239,6 +1246,10 @@
     base::StringAppendF(&ss, "%sDisplayOrientation: %f\n", prefixSpace.c_str(),
                         mDisplayOrientation);
 
+    // 4. Show flag or property state.
+    base::StringAppendF(&ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
+            com_android_media_audio_stereo_spatialization() ? "true" : "false");
+
     ss.append(prefixSpace + "CommandLog:\n");
     ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
 
@@ -1258,7 +1269,7 @@
     }
 
     if (needUnlock) {
-        mLock.unlock();
+        mMutex.unlock();
     }
     return ss;
 }
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 123517e..355df18 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -23,6 +23,7 @@
 #include <android/media/audio/common/AudioLatencyMode.h>
 #include <android/media/audio/common/HeadTracking.h>
 #include <android/media/audio/common/Spatialization.h>
+#include <audio_utils/mutex.h>
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
@@ -148,7 +149,7 @@
 
     /** Level getter for use by local classes. */
     media::audio::common::Spatialization::Level getLevel() const {
-        std::lock_guard lock(mLock);
+        audio_utils::lock_guard lock(mMutex);
         return mLevel;
     }
 
@@ -161,7 +162,7 @@
      */
     audio_io_handle_t detachOutput();
     /** Returns the output stream the spatializer is attached to. */
-    audio_io_handle_t getOutput() const { std::lock_guard lock(mLock); return mOutput; }
+    audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
 
     void updateActiveTracks(size_t numActiveTracks);
 
@@ -261,7 +262,7 @@
      *  according to values vector size.
      */
     template<typename T>
-    status_t setEffectParameter_l(uint32_t type, const std::vector<T>& values) REQUIRES(mLock) {
+    status_t setEffectParameter_l(uint32_t type, const std::vector<T>& values) REQUIRES(mMutex) {
         static_assert(sizeof(T) <= sizeof(uint32_t), "The size of T must less than 32 bits");
 
         uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + values.size()];
@@ -286,7 +287,7 @@
      * The variant is for compound parameters with two values of different base types
      */
     template<typename P1, typename P2>
-    status_t setEffectParameter_l(uint32_t type, const P1 val1, const P2 val2) REQUIRES(mLock) {
+    status_t setEffectParameter_l(uint32_t type, const P1 val1, const P2 val2) REQUIRES(mMutex) {
         static_assert(sizeof(P1) <= sizeof(uint32_t), "The size of P1 must less than 32 bits");
         static_assert(sizeof(P2) <= sizeof(uint32_t), "The size of P2 must less than 32 bits");
 
@@ -314,7 +315,7 @@
      * by specifying values vector size.
      */
     template<typename T>
-    status_t getEffectParameter_l(uint32_t type, std::vector<T> *values) REQUIRES(mLock) {
+    status_t getEffectParameter_l(uint32_t type, std::vector<T> *values) REQUIRES(mMutex) {
         static_assert(sizeof(T) <= sizeof(uint32_t), "The size of T must less than 32 bits");
 
         uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + values->size()];
@@ -345,7 +346,7 @@
      * The variant is for compound parameters with two values of different base types
      */
     template<typename P1, typename P2>
-    status_t getEffectParameter_l(uint32_t type, P1 *val1, P2 *val2) REQUIRES(mLock) {
+    status_t getEffectParameter_l(uint32_t type, P1 *val1, P2 *val2) REQUIRES(mMutex) {
         static_assert(sizeof(P1) <= sizeof(uint32_t), "The size of P1 must less than 32 bits");
         static_assert(sizeof(P2) <= sizeof(uint32_t), "The size of P2 must less than 32 bits");
 
@@ -375,25 +376,25 @@
      * spatializer state and playback activity and configures the pose controller
      * accordingly.
      */
-    void checkSensorsState_l() REQUIRES(mLock);
+    void checkSensorsState_l() REQUIRES(mMutex);
 
     /**
      * Checks if the head pose controller should be created or destroyed according
      * to desired head tracking mode.
      */
-    void checkPoseController_l() REQUIRES(mLock);
+    void checkPoseController_l() REQUIRES(mMutex);
 
     /**
      * Checks if the spatializer effect should be enabled based on
      * playback activity and requested level.
      */
-    void checkEngineState_l() REQUIRES(mLock);
+    void checkEngineState_l() REQUIRES(mMutex);
 
     /**
      * Reset head tracking mode and recenter pose in engine: Called when the head tracking
      * is disabled.
      */
-    void resetEngineHeadPose_l() REQUIRES(mLock);
+    void resetEngineHeadPose_l() REQUIRES(mMutex);
 
     /** Read bluetooth.core.le.dsa_transport_preference property and populate the ordered list of
      * preferred low latency modes in mOrderedLowLatencyModes.
@@ -406,7 +407,7 @@
      * Note: Because MODE_FREE is not in mOrderedLowLatencyModes, it will always be at
      * the end of the list.
      */
-    void sortSupportedLatencyModes_l() REQUIRES(mLock);
+    void sortSupportedLatencyModes_l() REQUIRES(mMutex);
 
     /**
      * Called after enabling head tracking in the spatializer engine to indicate which
@@ -415,14 +416,14 @@
      * When the connection mode is direct to the sensor, the sensor ID is also communicated
      * to the spatializer engine.
      */
-    void setEngineHeadtrackingConnectionMode_l() REQUIRES(mLock);
+    void setEngineHeadtrackingConnectionMode_l() REQUIRES(mMutex);
 
     /**
      * Select the desired head tracking connection mode for the spatializer engine among the list
      * stored in mSupportedHeadtrackingConnectionModes at init time.
      * Also returns the desired low latency mode according to selected connection mode.
      */
-    audio_latency_mode_t selectHeadtrackingConnectionMode_l() REQUIRES(mLock);
+    audio_latency_mode_t selectHeadtrackingConnectionMode_l() REQUIRES(mMutex);
 
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
@@ -435,48 +436,48 @@
     const std::string mMetricsId = kDefaultMetricsId;
 
     /** Mutex protecting internal state */
-    mutable std::mutex mLock;
+    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kSpatializer_Mutex};
 
     /** Client AudioEffect for the engine */
-    sp<AudioEffect> mEngine GUARDED_BY(mLock);
+    sp<AudioEffect> mEngine GUARDED_BY(mMutex);
     /** Output stream the spatializer mixer thread is attached to */
-    audio_io_handle_t mOutput GUARDED_BY(mLock) = AUDIO_IO_HANDLE_NONE;
+    audio_io_handle_t mOutput GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
 
     /** Callback interface to the client (AudioService) controlling this`Spatializer */
-    sp<media::INativeSpatializerCallback> mSpatializerCallback GUARDED_BY(mLock);
+    sp<media::INativeSpatializerCallback> mSpatializerCallback GUARDED_BY(mMutex);
 
     /** Callback interface for head tracking */
-    sp<media::ISpatializerHeadTrackingCallback> mHeadTrackingCallback GUARDED_BY(mLock);
+    sp<media::ISpatializerHeadTrackingCallback> mHeadTrackingCallback GUARDED_BY(mMutex);
 
     /** Requested spatialization level */
-    media::audio::common::Spatialization::Level mLevel GUARDED_BY(mLock) =
+    media::audio::common::Spatialization::Level mLevel GUARDED_BY(mMutex) =
             media::audio::common::Spatialization::Level::NONE;
 
     /** Control logic for head-tracking, etc. */
-    std::shared_ptr<SpatializerPoseController> mPoseController GUARDED_BY(mLock);
+    std::shared_ptr<SpatializerPoseController> mPoseController GUARDED_BY(mMutex);
 
     /** Last requested head tracking mode */
-    media::HeadTrackingMode mDesiredHeadTrackingMode GUARDED_BY(mLock)
+    media::HeadTrackingMode mDesiredHeadTrackingMode GUARDED_BY(mMutex)
             = media::HeadTrackingMode::STATIC;
 
     /** Last-reported actual head-tracking mode. */
-    media::audio::common::HeadTracking::Mode mActualHeadTrackingMode GUARDED_BY(mLock)
+    media::audio::common::HeadTracking::Mode mActualHeadTrackingMode GUARDED_BY(mMutex)
             = media::audio::common::HeadTracking::Mode::DISABLED;
 
     /** Selected Head pose sensor */
-    int32_t mHeadSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
+    int32_t mHeadSensor GUARDED_BY(mMutex) = SpatializerPoseController::INVALID_SENSOR;
 
     /** Selected Screen pose sensor */
-    int32_t mScreenSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
+    int32_t mScreenSensor GUARDED_BY(mMutex) = SpatializerPoseController::INVALID_SENSOR;
 
     /** Last display orientation received */
-    float mDisplayOrientation GUARDED_BY(mLock) = 0.f;  // aligned to natural up orientation.
+    float mDisplayOrientation GUARDED_BY(mMutex) = 0.f;  // aligned to natural up orientation.
 
     /** Last folded state */
-    bool mFoldedState GUARDED_BY(mLock) = false;  // foldable: true means folded.
+    bool mFoldedState GUARDED_BY(mMutex) = false;  // foldable: true means folded.
 
     /** Last hinge angle */
-    float mHingeAngle GUARDED_BY(mLock) = 0.f;  // foldable: 0.f is closed, M_PI flat open.
+    float mHingeAngle GUARDED_BY(mMutex) = 0.f;  // foldable: 0.f is closed, M_PI flat open.
 
     std::vector<media::audio::common::Spatialization::Level> mLevels;
     std::vector<media::audio::common::HeadTracking::Mode> mHeadTrackingModes;
@@ -485,11 +486,13 @@
     bool mSupportsHeadTracking;
     /** List of supported headtracking connection modes reported by the spatializer.
      * If the list is empty, the spatializer does not support any optional connection
-     * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+     * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
      */
-    std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            mSupportedHeadtrackingConnectionModes;
     /** Selected HT connection mode when several modes are supported by the spatializer */
-    headtracking_connection_t mHeadtrackingConnectionMode;
+    media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+            media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
 
     // Looper thread for mEngine callbacks
     class EngineCallbackHandler;
@@ -497,8 +500,8 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
-    size_t mNumActiveTracks GUARDED_BY(mLock) = 0;
-    std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mLock);
+    size_t mNumActiveTracks GUARDED_BY(mMutex) = 0;
+    std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
     /** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
@@ -514,10 +517,10 @@
      * Dump to local log with max/average pose angle every mPoseRecordThreshold.
      */
     // Record one log line per second (up to mMaxLocalLogLine) to capture most recent sensor data.
-    media::VectorRecorder mPoseRecorder GUARDED_BY(mLock) {
+    media::VectorRecorder mPoseRecorder GUARDED_BY(mMutex) {
         6 /* vectorSize */, std::chrono::seconds(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
     // Record one log line per minute (up to mMaxLocalLogLine) to capture durable sensor data.
-    media::VectorRecorder mPoseDurableRecorder  GUARDED_BY(mLock) {
+    media::VectorRecorder mPoseDurableRecorder GUARDED_BY(mMutex) {
         6 /* vectorSize */, std::chrono::minutes(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
 };  // Spatializer
 
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index a4a0cd4..34bd3b4 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -33,11 +34,14 @@
         "libutils",
         "libcutils",
         "libxml2",
+        "server_configurable_flags",
     ],
 
     static_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "libaudiopolicycomponents",
+        "libflagtest",
         "libgmock",
     ],
 
@@ -49,7 +53,7 @@
 
     srcs: ["audiopolicymanager_tests.cpp"],
 
-    data: [":audiopolicytest_configuration_files",],
+    data: [":audiopolicytest_configuration_files"],
 
     cflags: [
         "-Werror",
@@ -63,7 +67,6 @@
 
 }
 
-
 cc_test {
     name: "audio_health_tests",
 
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 31ee252..aa7c9cd 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -31,6 +31,7 @@
     using AudioPolicyManager::getConfig;
     using AudioPolicyManager::initialize;
     using AudioPolicyManager::getOutputs;
+    using AudioPolicyManager::getInputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
     using AudioPolicyManager::setSurroundFormatEnabled;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..e02c93a 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -28,6 +28,8 @@
 #include <android-base/file.h>
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
+#include <android_media_audiopolicy.h>
+#include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
@@ -43,6 +45,7 @@
 
 using namespace android;
 using testing::UnorderedElementsAre;
+using testing::IsEmpty;
 using android::content::AttributionSourceState;
 
 namespace {
@@ -92,6 +95,12 @@
     return attributionSourceState;
 }
 
+bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
+    return config1.format == config2.format
+            && config1.sample_rate == config2.sample_rate
+            && config1.channel_mask == config2.channel_mask;
+}
+
 } // namespace
 
 TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -1266,6 +1275,53 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
+    const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(deviceChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+                               AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
+    AudioPolicyInterface::input_type_t inputType;
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+    audio_config_base_t requestedConfig = {
+            .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+            .format = AUDIO_FORMAT_PCM_16_BIT,
+            .sample_rate = 48000
+    };
+    audio_config_base_t config = requestedConfig;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
+
+    attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+            AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
+    requestedConfig.channel_mask = deviceChannelMask;
+    config = requestedConfig;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    input = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+}
+
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void TearDown() override;
@@ -1273,6 +1329,12 @@
     status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
             std::string mixAddress, const audio_config_t& audioConfig,
             const std::vector<AudioMixMatchCriterion>& matchCriteria);
+
+    status_t addPolicyMix(const AudioMix& mix);
+
+    status_t removePolicyMixes(const Vector<AudioMix>& mixes);
+
+    std::vector<AudioMix> getRegisteredPolicyMixes();
     void clearPolicyMix();
     void addPolicyMixAndStartInputForLoopback(
             int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
@@ -1309,7 +1371,11 @@
     myAudioMix.mDeviceType = deviceType;
     // Clear mAudioMix before add new one to make sure we don't add already exist mixes.
     mAudioMixes.clear();
-    mAudioMixes.add(myAudioMix);
+    return addPolicyMix(myAudioMix);
+}
+
+status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(const AudioMix& mix) {
+    mAudioMixes.add(mix);
 
     // As the policy mixes registration may fail at some case,
     // caller need to check the returned status.
@@ -1317,6 +1383,20 @@
     return ret;
 }
 
+status_t AudioPolicyManagerTestDynamicPolicy::removePolicyMixes(const Vector<AudioMix>& mixes) {
+    status_t ret = mManager->unregisterPolicyMixes(mixes);
+    return ret;
+}
+
+std::vector<AudioMix> AudioPolicyManagerTestDynamicPolicy::getRegisteredPolicyMixes() {
+    std::vector<AudioMix> audioMixes;
+    if (mManager != nullptr) {
+        status_t ret = mManager->getRegisteredPolicyMixes(audioMixes);
+        EXPECT_EQ(NO_ERROR, ret);
+    }
+    return audioMixes;
+}
+
 void AudioPolicyManagerTestDynamicPolicy::clearPolicyMix() {
     if (mManager != nullptr) {
         mManager->stopInput(mLoopbackInputPortId);
@@ -1470,6 +1550,142 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        RegisterInvalidMixesDoesNotImpactPriorMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    mAudioMixes.add(validAudioMix);
+    status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.add(invalidAudioMix);
+    ret = mManager->registerPolicyMixes(mAudioMixes);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(registeredMixes.size(), remainingMixes.size());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        UnregisterInvalidMixesReturnsError,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    mAudioMixes.add(validAudioMix);
+    status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    Vector<AudioMix> mixes;
+    mixes.add(invalidAudioMix);
+    mixes.add(validAudioMix);
+    ret = removePolicyMixes(mixes);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(remainingMixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        GetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(mixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestDynamicPolicy,
+        AddPolicyMixAndVerifyGetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+                                mixMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(mixes.size(), 1);
+
+    const AudioMix& mix = mixes[0];
+    ASSERT_EQ(mix.mCriteria.size(), mixMatchCriteria.size());
+    for (uint32_t i = 0; i < mixMatchCriteria.size(); i++) {
+        EXPECT_EQ(mix.mCriteria[i].mRule, mixMatchCriteria[i].mRule);
+        EXPECT_EQ(mix.mCriteria[i].mValue.mUsage, mixMatchCriteria[i].mValue.mUsage);
+    }
+    EXPECT_EQ(mix.mDeviceType, AUDIO_DEVICE_OUT_REMOTE_SUBMIX);
+    EXPECT_EQ(mix.mRouteFlags, MIX_ROUTE_FLAG_LOOP_BACK);
+    EXPECT_EQ(mix.mMixType, MIX_TYPE_PLAYERS);
+    EXPECT_EQ(mix.mFormat.channel_mask, audioConfig.channel_mask);
+    EXPECT_EQ(mix.mFormat.format, audioConfig.format);
+    EXPECT_EQ(mix.mFormat.sample_rate, audioConfig.sample_rate);
+    EXPECT_EQ(mix.mFormat.frame_count, audioConfig.frame_count);
+}
+
 class AudioPolicyManagerTestForHdmi
         : public AudioPolicyManagerTestWithConfigurationFile,
           public testing::WithParamInterface<audio_format_t> {
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 5e71210..43e2e39 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 4efdf8a..1a299c6 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -65,6 +65,7 @@
                         samplingRates="48000"
                         channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
                 </mixPort>
+                <mixPort name="hifi_input" role="sink" />
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -111,6 +112,8 @@
                        sources="primary output,hifi_output,mmap_no_irq_out"/>
                 <route type="mix" sink="mixport_bus_input"
                     sources="BUS Device In"/>
+                <route type="mix" sink="hifi_input"
+                        sources="USB Device In" />
             </routes>
         </module>
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 4883a09..5b76bb0 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -101,7 +101,7 @@
         "android.frameworks.cameraservice.device-V2-ndk",
         "android.hardware.camera.common-V1-ndk",
         "android.hardware.camera.device-V3-ndk",
-        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.metadata-V3-ndk",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
@@ -112,6 +112,7 @@
         "libcameraservice_device_independent",
         "libdynamic_depth",
         "libprocessinfoservice_aidl",
+        "libvirtualdevicebuildflags",
         "media_permission-aidl-cpp",
     ],
 }
@@ -195,6 +196,7 @@
         "utils/SessionStatsBuilder.cpp",
         "utils/TagMonitor.cpp",
         "utils/LatencyHistogram.cpp",
+        "utils/Utils.cpp",
     ],
 
     header_libs: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 6acf0b6..2d55f39 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -123,6 +123,10 @@
     android_atomic_write(level, &gLogLevel);
 }
 
+int32_t format_as(CameraService::StatusInternal s) {
+  return fmt::underlying(s);
+}
+
 // ----------------------------------------------------------------------------
 
 static const std::string sDumpPermission("android.permission.DUMP");
@@ -131,6 +135,8 @@
 static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
 static const std::string sCameraHeadlessSystemUserPermission(
         "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
+static const std::string sCameraPrivacyAllowlistPermission(
+        "android.permission.CAMERA_PRIVACY_ALLOWLIST");
 static const std::string
         sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
 static const std::string sCameraOpenCloseListenerPermission(
@@ -510,8 +516,8 @@
     }
 
     if (newStatus == StatusInternal::NOT_PRESENT) {
-        logDeviceRemoved(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
-                newStatus));
+        logDeviceRemoved(cameraId, fmt::format("Device status changed from {} to {}",
+                oldStatus, newStatus));
 
         // Set the device status to NOT_PRESENT, clients will no longer be able to connect
         // to this device until the status changes
@@ -537,8 +543,8 @@
         removeStates(cameraId);
     } else {
         if (oldStatus == StatusInternal::NOT_PRESENT) {
-            logDeviceAdded(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
-                    newStatus));
+            logDeviceAdded(cameraId, fmt::format("Device status changed from {} to {}",
+                    oldStatus, newStatus));
         }
         updateStatus(newStatus, cameraId);
     }
@@ -578,9 +584,9 @@
     if (updated) {
         std::string idCombo = id + " : " + physicalId;
         if (newStatus == StatusInternal::PRESENT) {
-            logDeviceAdded(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
+            logDeviceAdded(idCombo, fmt::format("Device status changed to {}", newStatus));
         } else {
-            logDeviceRemoved(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
+            logDeviceRemoved(idCombo, fmt::format("Device status changed to {}", newStatus));
         }
         // Avoid calling getSystemCameraKind() with mStatusListenerLock held (b/141756275)
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
@@ -816,6 +822,14 @@
             std::string(), AppOpsManager::OP_NONE);
 }
 
+bool CameraService::hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const{
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    return checkPermission(std::string(), sCameraPrivacyAllowlistPermission, attributionSource,
+            std::string(), AppOpsManager::OP_NONE);
+}
+
 Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -902,13 +916,6 @@
                 "request for system only device %s: ", cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "createDefaultRequest");
-    }
-
     CameraMetadata metadata;
     status_t err = mCameraProviderManager->createDefaultRequest(cameraId, tempId, &metadata);
     if (err == OK) {
@@ -957,13 +964,6 @@
                 cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "isSessionConfigurationWithParametersSupported");
-    }
-
     *supported = false;
     status_t ret = mCameraProviderManager->isSessionConfigurationSupported(cameraId.c_str(),
             sessionConfiguration, /*mOverrideForPerfClass*/false, /*checkSessionParams*/true,
@@ -994,6 +994,61 @@
     return res;
 }
 
+Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
+                                                int targetSdkVersion, bool overrideToPortrait,
+                                                const SessionConfiguration& sessionConfiguration,
+                                                /*out*/ CameraMetadata* outMetadata) {
+    ATRACE_CALL();
+
+    if (!mInitialized) {
+        ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
+        return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
+    }
+
+    const std::string cameraId =
+            resolveCameraId(unresolvedCameraId, CameraThreadState::getCallingUid());
+
+    if (outMetadata == nullptr) {
+        std::string msg =
+                fmt::sprintf("Camera %s: Invalid 'outMetadata' input!", unresolvedCameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+
+    bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+            mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
+    status_t ret = mCameraProviderManager->getSessionCharacteristics(
+            cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+
+    // TODO(b/303645857): Remove fingerprintable metadata if the caller process does not have
+    //                    camera access permission.
+
+    Status res = Status::ok();
+    switch (ret) {
+        case OK:
+            // Expected, no handling needed.
+            break;
+        case INVALID_OPERATION: {
+                std::string msg = fmt::sprintf(
+                        "Camera %s: Session characteristics query not supported!",
+                        cameraId.c_str());
+                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
+                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+            }
+            break;
+        default: {
+                std::string msg = fmt::sprintf("Camera %s: Error: %s (%d)", cameraId.c_str(),
+                                               strerror(-ret), ret);
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
+    }
+
+    return res;
+}
+
 Status CameraService::parseCameraIdRemapping(
         const hardware::CameraIdRemapping& cameraIdRemapping,
         /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
@@ -2341,6 +2396,39 @@
     return ret;
 }
 
+bool CameraService::isCameraPrivacyEnabled(const String16& packageName, const std::string& cam_id,
+        int callingPid, int callingUid) {
+    if (!isAutomotiveDevice()) {
+        return mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+    }
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for
+    // safety-critical use cases cannot be disabled and are exempt from camera privacy policy.
+    if ((isAutomotivePrivilegedClient(callingUid) && isAutomotiveExteriorSystemCamera(cam_id))) {
+        ALOGI("Camera privacy cannot be enabled for automotive privileged client %d "
+                "using camera %s", callingUid, cam_id.c_str());
+        return false;
+    }
+
+    if (mSensorPrivacyPolicy->isCameraPrivacyEnabled(packageName)) {
+        return true;
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
+        return false;
+    } else if ((mSensorPrivacyPolicy->getCameraPrivacyState()
+            == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS) ||
+            (mSensorPrivacyPolicy->getCameraPrivacyState()
+            == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS) ||
+            (mSensorPrivacyPolicy->getCameraPrivacyState() ==
+            SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)) {
+        if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
+            return false;
+        } else {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string CameraService::getPackageNameFromUid(int clientUid) {
     std::string packageName("");
 
@@ -2552,73 +2640,100 @@
         // Enable/disable camera service watchdog
         client->setCameraServiceWatchdog(mCameraServiceWatchdogEnabled);
 
-        // Set rotate-and-crop override behavior
-        if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
-            client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
-        } else if (overrideToPortrait && portraitRotation != 0) {
-            uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
-            switch (portraitRotation) {
-                case 90:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
-                    break;
-                case 180:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
-                    break;
-                case 270:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
-                    break;
-                default:
-                    ALOGE("Unexpected portrait rotation: %d", portraitRotation);
-                    break;
+        CameraMetadata chars;
+        bool rotateAndCropSupported = true;
+        err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
+                &chars, overrideToPortrait);
+        if (err == OK) {
+            auto availableRotateCropEntry = chars.find(
+                    ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
+            if (availableRotateCropEntry.count <= 1) {
+                rotateAndCropSupported = false;
             }
-            client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
-            client->setRotateAndCropOverride(
-                mCameraServiceProxyWrapper->getRotateAndCropOverride(
-                    clientPackageName, facing, multiuser_get_user_id(clientUid)));
+            ALOGE("%s: Unable to query static metadata for camera %s: %s (%d)", __FUNCTION__,
+                    cameraId.c_str(), strerror(-err), err);
         }
 
-        // Set autoframing override behaviour
-        if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
-            client->setAutoframingOverride(mOverrideAutoframingMode);
-        } else {
-            client->setAutoframingOverride(
-                mCameraServiceProxyWrapper->getAutoframingOverride(
-                    clientPackageName));
+        if (rotateAndCropSupported) {
+            // Set rotate-and-crop override behavior
+            if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+                client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+            } else if (overrideToPortrait && portraitRotation != 0) {
+                uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+                switch (portraitRotation) {
+                    case 90:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+                        break;
+                    case 180:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+                        break;
+                    case 270:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+                        break;
+                    default:
+                        ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+                        break;
+                }
+                client->setRotateAndCropOverride(rotateAndCropMode);
+            } else {
+                client->setRotateAndCropOverride(
+                    mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                        clientPackageName, facing, multiuser_get_user_id(clientUid)));
+            }
         }
 
-        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
-        // cases such as rear view and surround view cannot be disabled and are exempt from camera
-        // privacy policy.
-        if ((!isAutomotivePrivilegedClient(packageUid) ||
-                !isAutomotiveExteriorSystemCamera(cameraId))) {
+        bool autoframingSupported = true;
+        auto availableAutoframingEntry = chars.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+        if ((availableAutoframingEntry.count == 1) && (availableAutoframingEntry.data.u8[0] ==
+                    ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE)) {
+            autoframingSupported = false;
+        }
+
+        if (autoframingSupported) {
+            // Set autoframing override behaviour
+            if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+                client->setAutoframingOverride(mOverrideAutoframingMode);
+            } else {
+                client->setAutoframingOverride(
+                    mCameraServiceProxyWrapper->getAutoframingOverride(
+                        clientPackageName));
+            }
+        }
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
             // Set camera muting behavior.
-            bool isCameraPrivacyEnabled =
+            isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
+                    toString16(client->getPackageName()), cameraId, packagePid, packageUid);
+        } else {
+            isCameraPrivacyEnabled =
                     mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-            if (client->supportsCameraMute()) {
-                client->setCameraMute(
-                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-            } else if (isCameraPrivacyEnabled) {
-                // no camera mute supported, but privacy is on! => disconnect
-                ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                        client->getPackageName().c_str(), cameraId.c_str());
-                // Do not hold mServiceLock while disconnecting clients, but
-                // retain the condition blocking other clients from connecting
-                // in mServiceLockWrapper if held.
-                mServiceLock.unlock();
-                // Clear caller identity temporarily so client disconnect PID
-                // checks work correctly
-                int64_t token = CameraThreadState::clearCallingIdentity();
-                // Note AppOp to trigger the "Unblock" dialog
-                client->noteAppOp();
-                client->disconnect();
-                CameraThreadState::restoreCallingIdentity(token);
-                // Reacquire mServiceLock
-                mServiceLock.lock();
+        }
 
-                return STATUS_ERROR_FMT(ERROR_DISABLED,
-                        "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
-            }
+        if (client->supportsCameraMute()) {
+            client->setCameraMute(
+                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+        } else if (isCameraPrivacyEnabled) {
+            // no camera mute supported, but privacy is on! => disconnect
+            ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                    client->getPackageName().c_str(), cameraId.c_str());
+            // Do not hold mServiceLock while disconnecting clients, but
+            // retain the condition blocking other clients from connecting
+            // in mServiceLockWrapper if held.
+            mServiceLock.unlock();
+            // Clear caller identity temporarily so client disconnect PID
+            // checks work correctly
+            int64_t token = CameraThreadState::clearCallingIdentity();
+            // Note AppOp to trigger the "Unblock" dialog
+            client->noteAppOp();
+            client->disconnect();
+            CameraThreadState::restoreCallingIdentity(token);
+            // Reacquire mServiceLock
+            mServiceLock.lock();
+
+            return STATUS_ERROR_FMT(ERROR_DISABLED,
+                    "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
         }
 
         if (shimUpdateOnly) {
@@ -4139,8 +4254,15 @@
         // return MODE_IGNORED. Do not treat such case as error.
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
                 mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+        }
         // We don't want to return EACCESS if the CameraPrivacy is enabled.
         // We prefer to successfully open the camera and perform camera muting
         // or blocking in connectHelper as handleAppOpMode can be called before the
@@ -4166,8 +4288,15 @@
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
-        mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+
+        if (flags::watch_foreground_changes()) {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+                toString16(mClientPackageName),
+                AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+        } else {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
                 toString16(mClientPackageName), mOpsCallback);
+        }
 
         // Just check for camera acccess here on open - delay startOp until
         // camera frames start streaming in startCameraStreamingOps
@@ -4327,20 +4456,42 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        // Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
+        // If not visible, but still active, then we want to block instead of muting the camera.
+        int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+        bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
-                mCameraIdStr.c_str(), mClientPackageName.c_str(),
-                mUidIsTrusted, isUidActive);
-        // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
-        // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
-        // error.
+        }
+
+        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
+                " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
+                mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
+                isCameraPrivacyEnabled);
+        // If the calling Uid is trusted (a native service), or the client Uid is active / visible
+        // (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
+        // cases as error.
         if (!mUidIsTrusted) {
-            if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
-                setCameraMute(true);
-            } else if (!isUidActive
-                || (isCameraPrivacyEnabled && !supportsCameraMute())) {
-                block();
+            if (flags::watch_foreground_changes()) {
+                if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else {
+                    block();
+                }
+            } else {
+                if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else if (!isUidActive
+                    || (isCameraPrivacyEnabled && !supportsCameraMute())) {
+                    block();
+                }
             }
         }
     } else if (res == AppOpsManager::MODE_ALLOWED) {
@@ -4711,7 +4862,15 @@
     }
     hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.addToggleSensorPrivacyListener(this);
+    }
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
+    if (flags::camera_privacy_allowlist()) {
+        mCameraPrivacyState = mSpm.getToggleSensorPrivacyState(
+                SensorPrivacyManager::TOGGLE_TYPE_SOFTWARE,
+                SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+    }
     status_t res = mSpm.linkToDeath(this);
     if (res == OK) {
         mRegistered = true;
@@ -4743,6 +4902,9 @@
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.removeToggleSensorPrivacyListener(this);
+    }
     mSpm.unlinkToDeath(this);
     mRegistered = false;
     ALOGV("SensorPrivacyPolicy: Unregistered with SensorPrivacyManager");
@@ -4757,6 +4919,15 @@
     return mSensorPrivacyEnabled;
 }
 
+int CameraService::SensorPrivacyPolicy::getCameraPrivacyState() {
+    if (!mRegistered) {
+        registerWithSensorPrivacyManager();
+    }
+
+    Mutex::Autolock _l(mSensorPrivacyLock);
+    return mCameraPrivacyState;
+}
+
 bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled() {
     if (!hasCameraPrivacyFeature()) {
         return false;
@@ -4764,18 +4935,53 @@
     return mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
+    if (!hasCameraPrivacyFeature()) {
+        return SensorPrivacyManager::DISABLED;
+    }
+    return mSpm.isCameraPrivacyEnabled(packageName);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(
-    int toggleType __unused, int sensor __unused, bool enabled) {
+    int toggleType, int sensor, bool enabled) {
+    if ((toggleType == SensorPrivacyManager::TOGGLE_TYPE_UNKNOWN)
+            && (sensor == SensorPrivacyManager::TOGGLE_SENSOR_UNKNOWN)) {
+        {
+            Mutex::Autolock _l(mSensorPrivacyLock);
+            mSensorPrivacyEnabled = enabled;
+        }
+        // if sensor privacy is enabled then block all clients from accessing the camera
+        if (enabled) {
+            sp<CameraService> service = mService.promote();
+            if (service != nullptr) {
+                service->blockAllClients();
+            }
+        }
+    }
+    return binder::Status::ok();
+}
+
+binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyStateChanged(
+    int, int sensor, int state) {
+    if (!flags::camera_privacy_allowlist()
+            || (sensor != SensorPrivacyManager::TOGGLE_SENSOR_CAMERA)) {
+        return binder::Status::ok();
+    }
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
-        mSensorPrivacyEnabled = enabled;
+        mCameraPrivacyState = state;
+    }
+    sp<CameraService> service = mService.promote();
+    if (!service) {
+        return binder::Status::ok();
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    if (enabled) {
-        sp<CameraService> service = mService.promote();
-        if (service != nullptr) {
-            service->blockAllClients();
-        }
+    if (state == SensorPrivacyManager::ENABLED) {
+        service->blockAllClients();
+    } else if ((state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)
+            || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS)
+            || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS)) {
+        service->blockPrivacyEnabledClients();
     }
     return binder::Status::ok();
 }
@@ -5453,8 +5659,7 @@
             for (auto& listener : mListenerList) {
                 bool isVendorListener = listener->isVendorListener();
                 if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
-                        listener->getListenerPid(), listener->getListenerUid()) ||
-                        isVendorListener) {
+                        listener->getListenerPid(), listener->getListenerUid())) {
                     ALOGV("Skipping discovery callback for system-only camera device %s",
                             cameraId.c_str());
                     continue;
@@ -5647,6 +5852,23 @@
     }
 }
 
+void CameraService::blockPrivacyEnabledClients() {
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                std::string pkgName = basicClient->getPackageName();
+                bool cameraPrivacyEnabled =
+                        mSensorPrivacyPolicy->isCameraPrivacyEnabled(toString16(pkgName));
+                if (cameraPrivacyEnabled) {
+                    basicClient->block();
+                }
+           }
+        }
+    }
+}
+
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(toString16(sManageCameraPermission), nullptr, nullptr)) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 7dc0b91..8822cd3 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -248,6 +248,11 @@
             /*out*/
             bool* supported);
 
+    virtual binder::Status getSessionCharacteristics(
+            const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
+            const SessionConfiguration& sessionConfiguration,
+            /*out*/ CameraMetadata* outMetadata);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -681,6 +686,9 @@
             int callingUid) const;
 
     bool hasCameraPermissions() const;
+
+    bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const;
+
    /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -693,6 +701,8 @@
         UNKNOWN = static_cast<int32_t>(hardware::ICameraServiceListener::STATUS_UNKNOWN)
     };
 
+    friend int32_t format_as(StatusInternal s);
+
     /**
      * Container class for the state of each logical camera device, including: ID, status, and
      * dependencies on other devices.  The mapping of camera ID -> state saved in mCameraStates
@@ -866,16 +876,20 @@
             public virtual IServiceManager::LocalRegistrationCallback {
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service)
-                    : mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
+                    : mService(service), mSensorPrivacyEnabled(false),
+                    mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
 
             void registerSelf();
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
             bool isCameraPrivacyEnabled();
+            int getCameraPrivacyState();
+            bool isCameraPrivacyEnabled(const String16& packageName);
 
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
+            binder::Status onSensorPrivacyStateChanged(int toggleType, int sensor, int state);
 
             // Implementation of IServiceManager::LocalRegistrationCallback
             virtual void onServiceRegistration(const String16& name,
@@ -888,6 +902,7 @@
             wp<CameraService> mService;
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
+            int mCameraPrivacyState;
             bool mRegistered;
 
             bool hasCameraPrivacyFeature();
@@ -924,6 +939,9 @@
             const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
             /*out*/int& originalClientPid) const;
 
+    bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
+           int clientPid, int ClientUid);
+
     // Handle active client evictions, and update service state.
     // Only call with with mServiceLock held.
     status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
@@ -1383,6 +1401,9 @@
     // Blocks all active clients.
     void blockAllClients();
 
+    // Blocks clients whose privacy is enabled.
+    void blockPrivacyEnabledClients();
+
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index 954cb8b..9e6a925 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -20,6 +20,7 @@
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
 #include <android-base/properties.h>
+#include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::device::implementation {
 
@@ -56,7 +57,7 @@
 AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
       mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 bool AidlCameraDeviceUser::initDevice() {
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 8cd7d1f..79dbfed 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -27,6 +27,7 @@
 #include <android/binder_manager.h>
 #include <binder/Status.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
 
@@ -79,7 +80,7 @@
 
 AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
       mCameraService(cameraService) {
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
                                                           SCameraMetadata* _aidl_return) {
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index f5d68eb..14e5fad 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -17,12 +17,13 @@
 #define LOG_TAG "AidlUtils"
 
 #include <aidl/AidlUtils.h>
+#include <aidl/ExtensionMetadataTags.h>
 #include <aidl/VndkVersionMetadataTags.h>
 #include <aidlcommonsupport/NativeHandle.h>
+#include <camera/StringUtils.h>
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
-#include <camera/StringUtils.h>
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -310,8 +311,8 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
     if (vndkVersion == __ANDROID_API_FUTURE__) {
-        // VNDK version in ro.vndk.version is a version code-name that
-        // corresponds to the current version.
+        // VNDK version derived from ro.board.api_level is a version code-name that
+        // corresponds to the current SDK version.
         return OK;
     }
     const auto &apiLevelToKeys =
@@ -333,4 +334,47 @@
     return OK;
 }
 
+bool areExtensionKeysSupported(const CameraMetadata& metadata) {
+    auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    if (requestKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    auto resultKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (resultKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_RESULT_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    for (const auto& extensionKey : extension_metadata_keys) {
+        if (std::find(requestKeys.data.i32, requestKeys.data.i32 + requestKeys.count, extensionKey)
+                != requestKeys.data.i32 + requestKeys.count) {
+            return true;
+        }
+
+        if (std::find(resultKeys.data.i32, resultKeys.data.i32 + resultKeys.count, extensionKey)
+                != resultKeys.data.i32 + resultKeys.count) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/) {
+    if (metadata == nullptr) {
+        return BAD_VALUE;
+    }
+
+    for (const auto& key : extension_metadata_keys) {
+        status_t res = metadata->erase(key);
+        if (res != OK) {
+            ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+            return res;
+        }
+    }
+    return OK;
+}
+
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index c89d7ff..562aa70 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,9 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
 
+bool areExtensionKeysSupported(const CameraMetadata& metadata);
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
 
 #endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
new file mode 100644
index 0000000..86af36c
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from extensions_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::vector<camera_metadata_tag> extension_metadata_keys{
+            ANDROID_EXTENSION_STRENGTH,
+            ANDROID_EXTENSION_CURRENT_TYPE,
+            ANDROID_EFV_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_AUTO_ZOOM,
+            ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_STABILIZATION_MODE,
+            ANDROID_EFV_TRANSLATE_VIEWPORT,
+            ANDROID_EFV_ROTATE_VIEWPORT,
+            ANDROID_EFV_PADDING_REGION,
+            ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+            ANDROID_EFV_TARGET_COORDINATES,
+};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index e403b97..7965474 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -78,6 +78,7 @@
           ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
           ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
           ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
           ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
           ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
           ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
@@ -112,6 +113,15 @@
           ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
           ANDROID_CONTROL_SETTINGS_OVERRIDE,
           ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+          ANDROID_EFV_AUTO_ZOOM,
+          ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+          ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_PADDING_REGION,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_ROTATE_VIEWPORT,
+          ANDROID_EFV_STABILIZATION_MODE,
+          ANDROID_EFV_TARGET_COORDINATES,
+          ANDROID_EFV_TRANSLATE_VIEWPORT,
           ANDROID_EXTENSION_CURRENT_TYPE,
           ANDROID_EXTENSION_STRENGTH,
           ANDROID_FLASH_STRENGTH_LEVEL,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 7443788..508d487 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -777,60 +777,6 @@
     return res;
 }
 
-binder::Status CameraDeviceClient::getSessionCharacteristics(
-        const SessionConfiguration& sessionConfiguration,
-        /*out*/
-        hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) {
-    ATRACE_CALL();
-    binder::Status res;
-    status_t ret = OK;
-    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
-
-    Mutex::Autolock icl(mBinderSerializationLock);
-
-    if (!mDevice.get()) {
-        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
-    }
-
-    auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
-            mCameraIdStr);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {
-          return mDevice->infoPhysical(id);};
-    ret = mProviderManager->getSessionCharacteristics(mCameraIdStr.c_str(),
-            sessionConfiguration, mOverrideForPerfClass, getMetadata,
-            sessionCharacteristics);
-
-    switch (ret) {
-        case OK:
-            // Expected, do nothing.
-            break;
-        case INVALID_OPERATION: {
-                std::string msg = fmt::sprintf(
-                        "Camera %s: Session characteristics query not supported!",
-                        mCameraIdStr.c_str());
-                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
-            }
-
-            break;
-        default: {
-                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", mCameraIdStr.c_str(),
-                        strerror(-ret), ret);
-                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.c_str());
-            }
-    }
-
-    return res;
-}
-
 binder::Status CameraDeviceClient::deleteStream(int streamId) {
     ATRACE_CALL();
     ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId);
@@ -1063,7 +1009,7 @@
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
                 mCameraIdStr.c_str(), streamInfo.width, streamInfo.height, streamInfo.format,
-                streamInfo.dataSpace, strerror(-err), err);
+                static_cast<int>(streamInfo.dataSpace), strerror(-err), err);
     } else {
         int i = 0;
         for (auto& binder : binders) {
@@ -1160,7 +1106,8 @@
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
-                mCameraIdStr.c_str(), width, height, format, dataSpace, strerror(-err), err);
+                mCameraIdStr.c_str(), width, height, format, static_cast<int>(dataSpace),
+                strerror(-err), err);
     } else {
         // Can not add streamId to mStreamMap here, as the surface is deferred. Add it to
         // a separate list to track. Once the deferred surface is set, this id will be
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c2f7f56..b2c9626 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -109,11 +109,6 @@
             /*out*/
             bool* streamStatus) override;
 
-    virtual binder::Status getSessionCharacteristics(
-            const SessionConfiguration& sessionConfiguration,
-            /*out*/
-            hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) override;
-
     // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 1ba3de4..15e2755 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -33,6 +33,7 @@
 #include <future>
 #include <inttypes.h>
 #include <android_companion_virtualdevice_flags.h>
+#include <android_companion_virtualdevice_build_flags.h>
 #include <android/binder_manager.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
@@ -139,7 +140,7 @@
 }
 
 std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-CameraProviderManager::AidlServiceInteractionProxyImpl::getAidlService(
+CameraProviderManager::AidlServiceInteractionProxyImpl::getService(
         const std::string& serviceName) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
@@ -147,19 +148,35 @@
     if (flags::lazy_aidl_wait_for_service()) {
         binder = AServiceManager_waitForService(serviceName.c_str());
     } else {
-        binder = AServiceManager_getService(serviceName.c_str());
+        binder = AServiceManager_checkService(serviceName.c_str());
     }
 
     if (binder == nullptr) {
-        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
-              serviceName.c_str());
+        ALOGE("%s: AIDL Camera provider HAL '%s' is not actually available, despite waiting "
+              "indefinitely?", __FUNCTION__, serviceName.c_str());
         return nullptr;
     }
     std::shared_ptr<ICameraProvider> interface =
             ICameraProvider::fromBinder(ndk::SpAIBinder(binder));
 
     return interface;
-};
+}
+
+std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+CameraProviderManager::AidlServiceInteractionProxyImpl::tryGetService(
+        const std::string& serviceName) {
+    using aidl::android::hardware::camera::provider::ICameraProvider;
+
+    std::shared_ptr<ICameraProvider> interface = ICameraProvider::fromBinder(
+                    ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+    if (interface == nullptr) {
+        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+              serviceName.c_str());
+        return nullptr;
+    }
+
+    return interface;
+}
 
 static std::string getFullAidlProviderName(const std::string instance) {
     std::string aidlHalServiceDescriptor =
@@ -442,19 +459,31 @@
     return OK;
 }
 
-status_t CameraProviderManager::getSessionCharacteristics(const std::string& id,
-        const SessionConfiguration &configuration, bool overrideForPerfClass,
-        metadataGetter getMetadata,
-        CameraMetadata* sessionCharacteristics /*out*/) const {
+status_t CameraProviderManager::getSessionCharacteristics(
+        const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
+        bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
     if (!flags::feature_combination_query()) {
         return INVALID_OPERATION;
     }
+
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) {
         return NAME_NOT_FOUND;
     }
 
+    metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+                                                            bool overrideForPerfClass) {
+        CameraMetadata metadata;
+        status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
+                                                            overrideToPortrait);
+        if (ret != OK) {
+            ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
+                  id.c_str());
+        }
+        return metadata;
+    };
+
     return deviceInfo->getSessionCharacteristics(configuration,
             overrideForPerfClass, getMetadata, sessionCharacteristics);
 }
@@ -1807,18 +1836,13 @@
     auto& c = mCameraCharacteristics;
 
     auto entry = c.find(ANDROID_SENSOR_READOUT_TIMESTAMP);
-    if (entry.count != 0) {
-        ALOGE("%s: CameraCharacteristics must not contain ANDROID_SENSOR_READOUT_TIMESTAMP!",
-                __FUNCTION__);
+    if (entry.count == 0) {
+        uint8_t defaultReadoutTimestamp = readoutTimestampSupported ?
+                                          ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE :
+                                          ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
+        res = c.update(ANDROID_SENSOR_READOUT_TIMESTAMP, &defaultReadoutTimestamp, 1);
     }
 
-    uint8_t readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
-    if (readoutTimestampSupported) {
-        readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
-    }
-
-    res = c.update(ANDROID_SENSOR_READOUT_TIMESTAMP, &readoutTimestamp, 1);
-
     return res;
 }
 
@@ -2101,8 +2125,14 @@
         const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
-    std::shared_ptr<ICameraProvider> interface =
-            mAidlServiceProxy->getAidlService(providerName.c_str());
+    std::shared_ptr<ICameraProvider> interface;
+    if (flags::delay_lazy_hal_instantiation()) {
+        // Only get remote instance if already running. Lazy Providers will be
+        // woken up later.
+        interface = mAidlServiceProxy->tryGetService(providerName);
+    } else {
+        interface = mAidlServiceProxy->getService(providerName);
+    }
 
     if (interface == nullptr) {
         ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -3249,7 +3279,8 @@
 }
 
 bool CameraProviderManager::isVirtualCameraHalEnabled() {
-    return vd_flags::virtual_camera_service_discovery();
+    return vd_flags::virtual_camera_service_discovery() &&
+           vd_flags::virtual_camera_service_build_flag();
 }
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 53a2102..5ff3fcd 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -178,9 +178,15 @@
     // Proxy to inject fake services in test.
     class AidlServiceInteractionProxy {
       public:
-        // Returns the Aidl service with the given serviceName
+        // Returns the Aidl service with the given serviceName. Will wait indefinitely
+        // for the service to come up if not running.
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) = 0;
+        getService(const std::string& serviceName) = 0;
+
+        // Attempts to get an already running AIDL service of the given serviceName.
+        // Returns nullptr immediately if service is not running.
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) = 0;
 
         virtual ~AidlServiceInteractionProxy() = default;
     };
@@ -190,7 +196,10 @@
     class AidlServiceInteractionProxyImpl : public AidlServiceInteractionProxy {
       public:
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) override;
+        getService(const std::string& serviceName) override;
+
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) override;
     };
 
     /**
@@ -321,7 +330,8 @@
      */
      status_t getSessionCharacteristics(const std::string& id,
             const SessionConfiguration &configuration,
-            bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+            bool overrideForPerfClass,
+            bool overrideToPortrait,
             CameraMetadata* sessionCharacteristics /*out*/) const;
 
     /**
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index d773af3..a721d28 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -275,54 +275,58 @@
     if (mSavedInterface != nullptr) {
         return mSavedInterface;
     }
+
     if (!kEnableLazyHal) {
         ALOGE("Bad provider state! Should not be here on a non-lazy HAL!");
         return nullptr;
     }
 
     auto interface = mActiveInterface.lock();
-    if (interface == nullptr) {
-        // Try to get service without starting
-        interface =
-                    ICameraProvider::fromBinder(
-                            ndk::SpAIBinder(AServiceManager_checkService(mProviderName.c_str())));
-        if (interface == nullptr) {
-            ALOGV("Camera provider actually needs restart, calling getService(%s)",
-                  mProviderName.c_str());
-            interface = mManager->mAidlServiceProxy->getAidlService(mProviderName.c_str());
-
-            if (interface == nullptr) {
-                ALOGD("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
-                return nullptr;
-            }
-
-            // Set all devices as ENUMERATING, provider should update status
-            // to PRESENT after initializing.
-            // This avoids failing getCameraDeviceInterface_V3_x before devices
-            // are ready.
-            for (auto& device : mDevices) {
-              device->mIsDeviceAvailable = false;
-            }
-
-            interface->setCallback(mCallbacks);
-            auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
-                    this);
-            if (link != STATUS_OK) {
-                ALOGW("%s: Unable to link to provider '%s' death notifications",
-                        __FUNCTION__, mProviderName.c_str());
-                mManager->removeProvider(mProviderInstance);
-                return nullptr;
-            }
-
-            // Send current device state
-            interface->notifyDeviceStateChange(mDeviceState);
-        }
-        mActiveInterface = interface;
-    } else {
-        ALOGV("Camera provider (%s) already in use. Re-using instance.",
-              mProviderName.c_str());
+    if (interface != nullptr) {
+        ALOGV("Camera provider (%s) already in use. Re-using instance.", mProviderName.c_str());
+        return interface;
     }
 
+    // Try to get service without starting
+    interface = ICameraProvider::fromBinder(
+            ndk::SpAIBinder(AServiceManager_checkService(mProviderName.c_str())));
+    if (interface != nullptr) {
+        // Service is already running. Cache and return.
+        mActiveInterface = interface;
+        return interface;
+    }
+
+    ALOGV("Camera provider actually needs restart, calling getService(%s)", mProviderName.c_str());
+    interface = mManager->mAidlServiceProxy->getService(mProviderName);
+
+    if (interface == nullptr) {
+        ALOGE("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
+        return nullptr;
+    }
+
+    // Set all devices as ENUMERATING, provider should update status
+    // to PRESENT after initializing.
+    // This avoids failing getCameraDeviceInterface_V3_x before devices
+    // are ready.
+    for (auto& device : mDevices) {
+      device->mIsDeviceAvailable = false;
+    }
+
+    interface->setCallback(mCallbacks);
+    auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
+            this);
+    if (link != STATUS_OK) {
+        ALOGW("%s: Unable to link to provider '%s' death notifications",
+                __FUNCTION__, mProviderName.c_str());
+        mManager->removeProvider(mProviderInstance);
+        return nullptr;
+    }
+
+    // Send current device state
+    interface->notifyDeviceStateChange(mDeviceState);
+    // Cache interface to return early for future calls.
+    mActiveInterface = interface;
+
     return interface;
 }
 
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d2643c1..065f0c5 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -692,6 +692,14 @@
         mHasFlashUnit = false;
     }
 
+    if (flags::feature_combination_query()) {
+        res = addSessionConfigQueryVersionTag();
+        if (OK != res) {
+            ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+        }
+    }
+
     camera_metadata_entry entry =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
     if (entry.count == 1) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 107cc19..9792089 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -69,6 +69,7 @@
 #include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include <algorithm>
 #include <optional>
@@ -77,10 +78,11 @@
 using namespace android::camera3;
 using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+using namespace android::hardware::cameraservice::utils::conversion::aidl;
 
 namespace flags = com::android::internal::camera::flags;
 namespace android {
-namespace flags = com::android::internal::camera::flags;
+
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient):
@@ -145,17 +147,6 @@
     /** Register in-flight map to the status tracker */
     mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
 
-    if (mUseHalBufManager) {
-        res = mRequestBufferSM.initialize(mStatusTracker);
-        if (res != OK) {
-            SET_ERR_L("Unable to start request buffer state machine: %s (%d)",
-                    strerror(-res), res);
-            mInterface->close();
-            mStatusTracker.clear();
-            return res;
-        }
-    }
-
     /** Create buffer manager */
     mBufferManager = new Camera3BufferManager();
 
@@ -264,6 +255,8 @@
         return res;
     }
 
+    mSupportsExtensionKeys = areExtensionKeysSupported(mDeviceInfo);
+
     return OK;
 }
 
@@ -1622,7 +1615,9 @@
     mStatusWaiters++;
 
     bool signalPipelineDrain = false;
-    if (!active && mUseHalBufManager) {
+    if (!active &&
+            (mUseHalBufManager ||
+                    (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() != 0))) {
         auto streamIds = mOutputStreams.getStreamIds();
         if (mStatus == STATUS_ACTIVE) {
             mRequestThread->signalPipelineDrain(streamIds);
@@ -2538,11 +2533,14 @@
     }
 
     config.streams = streams.editArray();
+    config.hal_buffer_managed_streams = mHalBufManagedStreamIds;
     config.use_hal_buf_manager = mUseHalBufManager;
 
     // Do the HAL configuration; will potentially touch stream
-    // max_buffers, usage, and priv fields, as well as data_space and format
-    // fields for IMPLEMENTATION_DEFINED formats.
+    // max_buffers, usage, priv fields, data_space and format
+    // fields for IMPLEMENTATION_DEFINED formats as well as hal buffer managed
+    // streams and use_hal_buf_manager (in case aconfig flag session_hal_buf_manager
+    // is not enabled but the HAL supports session specific hal buffer manager).
 
     int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
     const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
@@ -2562,13 +2560,19 @@
                 strerror(-res), res);
         return res;
     }
+    // It is possible that use hal buffer manager behavior was changed by the
+    // configureStreams call.
+    mUseHalBufManager = config.use_hal_buf_manager;
     if (flags::session_hal_buf_manager()) {
-        bool prevSessionHalBufManager = mUseHalBufManager;
-        // It is possible that configureStreams() changed config.use_hal_buf_manager
-        mUseHalBufManager = config.use_hal_buf_manager;
-        if (prevSessionHalBufManager && !mUseHalBufManager) {
+        bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
+        // It is possible that configureStreams() changed config.hal_buffer_managed_streams
+        mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
+
+        bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
+
+        if (prevSessionHalBufManager && !thisSessionHalBufManager) {
             mRequestBufferSM.deInit();
-        } else if (!prevSessionHalBufManager && mUseHalBufManager) {
+        } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
             res = mRequestBufferSM.initialize(mStatusTracker);
             if (res != OK) {
                 SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
@@ -2576,7 +2580,7 @@
                 return res;
             }
         }
-        mRequestThread->setHalBufferManager(mUseHalBufManager);
+        mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
     }
     // Finish all stream configuration immediately.
     // TODO: Try to relax this later back to lazy completion, which should be
@@ -2904,7 +2908,8 @@
 
     FlushInflightReqStates states {
         mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
-        listener, *this, *mInterface, *this, mSessionStatsBuilder};
+        mHalBufManagedStreamIds, listener, *this, *mInterface, *this,
+        mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 }
@@ -2969,6 +2974,11 @@
     return mBufferRecords.verifyBufferIds(streamId, bufIds);
 }
 
+bool Camera3Device::HalInterface::isHalBufferManagedStream(int32_t streamId) const {
+    return (mUseHalBufManager || (flags::session_hal_buf_manager() &&
+                                  contains(mHalBufManagedStreamIds, streamId)));
+}
+
 status_t Camera3Device::HalInterface::popInflightBuffer(
         int32_t frameNumber, int32_t streamId,
         /*out*/ buffer_handle_t **buffer) {
@@ -3061,7 +3071,7 @@
         mOverrideToPortrait(overrideToPortrait),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
-    mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -3287,8 +3297,9 @@
     mDoPauseSignal.signal();
 }
 
-void Camera3Device::RequestThread::setHalBufferManager(bool enabled) {
-    mUseHalBufManager = enabled;
+void Camera3Device::RequestThread::setHalBufferManagedStreams(
+            const std::set<int32_t> &halBufferManagedStreams) {
+    mHalBufManagedStreamIds = halBufferManagedStreams;
 }
 
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
@@ -3590,6 +3601,7 @@
     for (size_t i = 0; i < mNextRequests.size(); i++) {
         auto& nextRequest = mNextRequests.editItemAt(i);
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+        captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
         // Do not override rotate&crop for stream configurations that include
         // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
         // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
@@ -3748,7 +3760,6 @@
         bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
         mPrevTriggers = triggerCount;
 
-        bool testPatternChanged = overrideTestPattern(captureRequest);
         bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
@@ -3757,7 +3768,7 @@
                 (mPrevRequest != captureRequest || triggersMixedIn ||
                          captureRequest->mRotateAndCropChanged ||
                          captureRequest->mAutoframingChanged ||
-                         testPatternChanged || settingsOverrideChanged ||
+                         captureRequest->mTestPatternChanged || settingsOverrideChanged ||
                          (flags::inject_session_params() && mForceNewRequestAfterReconfigure)) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
@@ -3884,13 +3895,22 @@
 
                     for (it = captureRequest->mSettingsList.begin();
                             it != captureRequest->mSettingsList.end(); it++) {
-                        res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
-                                mVndkVersion, it->metadata, false /*isStatic*/);
+                        res = filterVndkKeys(mVndkVersion, it->metadata, false /*isStatic*/);
                         if (res != OK) {
                             SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
                                     "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
                             return INVALID_OPERATION;
                         }
+
+                        if (!parent->mSupportsExtensionKeys) {
+                            res = filterExtensionKeys(&it->metadata);
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Failed during extension filter of capture "
+                                        "requests %d: %s (%d)", halRequest->frame_number,
+                                        strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                        }
                     }
                 }
             }
@@ -3972,11 +3992,15 @@
         nsecs_t waitDuration = kBaseGetBufferWait + parent->getExpectedInFlightDuration();
 
         SurfaceMap uniqueSurfaceIdMap;
+        bool containsHalBufferManagedStream = false;
         for (size_t j = 0; j < captureRequest->mOutputStreams.size(); j++) {
             sp<Camera3OutputStreamInterface> outputStream =
                     captureRequest->mOutputStreams.editItemAt(j);
             int streamId = outputStream->getId();
-
+            if (!containsHalBufferManagedStream) {
+                containsHalBufferManagedStream =
+                        contains(mHalBufManagedStreamIds, streamId);
+            }
             // Prepare video buffers for high speed recording on the first video request.
             if (mPrepareVideoStream && outputStream->isVideoStream()) {
                 // Only try to prepare video stream on the first video request.
@@ -4008,7 +4032,7 @@
                 uniqueSurfaceIdMap.insert({streamId, std::move(uniqueSurfaceIds)});
             }
 
-            if (mUseHalBufManager) {
+            if (parent->isHalBufferManagedStream(streamId)) {
                 if (outputStream->isAbandoned()) {
                     ALOGV("%s: stream %d is abandoned, skipping request", __FUNCTION__, streamId);
                     return TIMED_OUT;
@@ -4099,6 +4123,9 @@
                 isZslCapture = true;
             }
         }
+        bool passSurfaceMap =
+                mUseHalBufManager ||
+                        (flags::session_hal_buf_manager() && containsHalBufferManagedStream);
         auto expectedDurationInfo = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
@@ -4110,7 +4137,7 @@
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
                 mPrevCameraIdsWithZoom,
-                (mUseHalBufManager) ? uniqueSurfaceIdMap :
+                passSurfaceMap ? uniqueSurfaceIdMap :
                                       SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
                ", burstId = %" PRId32 ".",
@@ -4213,7 +4240,8 @@
 }
 
 void Camera3Device::RequestThread::signalPipelineDrain(const std::vector<int>& streamIds) {
-    if (!mUseHalBufManager) {
+    if (!mUseHalBufManager &&
+            (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() == 0)) {
         ALOGE("%s called for camera device not supporting HAL buffer management", __FUNCTION__);
         return;
     }
@@ -4365,22 +4393,28 @@
             captureRequest->mInputStream->returnInputBuffer(captureRequest->mInputBuffer);
         }
 
-        // No output buffer can be returned when using HAL buffer manager
-        if (!mUseHalBufManager) {
-            for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
-                //Buffers that failed processing could still have
-                //valid acquire fence.
-                int acquireFence = (*outputBuffers)[i].acquire_fence;
-                if (0 <= acquireFence) {
-                    close(acquireFence);
-                    outputBuffers->editItemAt(i).acquire_fence = -1;
-                }
-                outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
-                captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i],
-                        /*timestamp*/0, /*readoutTimestamp*/0,
-                        /*timestampIncreasing*/true, std::vector<size_t> (),
-                        captureRequest->mResultExtras.frameNumber);
+        for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
+            //Buffers that failed processing could still have
+            //valid acquire fence.
+            Camera3Stream *stream = Camera3Stream::cast((*outputBuffers)[i].stream);
+            int32_t streamId = stream->getId();
+            bool skipBufferForStream =
+                    mUseHalBufManager || (flags::session_hal_buf_manager() &&
+                            contains(mHalBufManagedStreamIds, streamId));
+            if (skipBufferForStream) {
+                // No output buffer can be returned when using HAL buffer manager for its stream
+                continue;
             }
+            int acquireFence = (*outputBuffers)[i].acquire_fence;
+            if (0 <= acquireFence) {
+                close(acquireFence);
+                outputBuffers->editItemAt(i).acquire_fence = -1;
+            }
+            outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
+            captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i],
+                    /*timestamp*/0, /*readoutTimestamp*/0,
+                    /*timestampIncreasing*/true, std::vector<size_t> (),
+                    captureRequest->mResultExtras.frameNumber);
         }
 
         if (sendRequestError) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 25f841a..ac4b039 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -97,6 +97,10 @@
         return mInterface->getTransportType();
     }
 
+    bool isHalBufferManagedStream(int32_t streamId) const {
+        return mInterface->isHalBufferManagedStream(streamId);
+    };
+
     /**
      * CameraDeviceBase interface
      */
@@ -476,6 +480,9 @@
 
         /////////////////////////////////////////////////////////////////////
 
+        //Check if a stream is hal buffer managed
+        bool isHalBufferManagedStream(int32_t streamId) const;
+
         // Get a vector of (frameNumber, streamId) pair of currently inflight
         // buffers
         void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
@@ -547,7 +554,9 @@
 
         uint32_t mNextStreamConfigCounter = 1;
 
+        // TODO: This can be removed after flags::session_hal_buf_manager is removed
         bool mUseHalBufManager = false;
+        std::set<int32_t > mHalBufManagedStreamIds;
         bool mIsReconfigurationQuerySupported;
 
         const bool mSupportOfflineProcessing;
@@ -655,6 +664,8 @@
         bool                                mAutoframingAuto;
         // Indicates that the auto framing value within 'mSettingsList' was modified
         bool                                mAutoframingChanged = false;
+        // Indicates that the camera test pattern setting is modified
+        bool                                mTestPatternChanged = false;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -946,11 +957,11 @@
         void     setPaused(bool paused);
 
         /**
-         * Set Hal buffer manager behavior
-         * @param enabled Whether HAL buffer manager is enabled for the current session.
+         * Set Hal buffer managed streams
+         * @param halBufferManagedStreams The streams for which hal buffer manager is enabled
          *
          */
-        void setHalBufferManager(bool enabled);
+        void setHalBufferManagedStreams(const std::set<int32_t> &halBufferManagedStreams);
 
         /**
          * Wait until thread processes the capture request with settings'
@@ -1201,6 +1212,7 @@
         std::map<int32_t, std::set<std::string>> mGroupIdPhysicalCameraMap;
 
         bool               mUseHalBufManager = false;
+        std::set<int32_t > mHalBufManagedStreamIds;
         const bool         mSupportCameraMute;
         const bool         mOverrideToPortrait;
         const bool         mSupportSettingsOverride;
@@ -1391,6 +1403,7 @@
 
     // Whether HAL request buffers through requestStreamBuffers API
     bool mUseHalBufManager = false;
+    std::set<int32_t > mHalBufManagedStreamIds;
     bool mSessionHalBufManager = false;
     // Lock to ensure requestStreamBuffers() callbacks are serialized
     std::mutex mRequestBufferInterfaceLock;
@@ -1510,6 +1523,9 @@
     // AE_TARGET_FPS_RANGE
     bool mIsFixedFps = false;
 
+    // Flag to indicate that we shouldn't forward extension related metadata
+    bool mSupportsExtensionKeys = false;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index c59138c..152687e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -84,10 +84,10 @@
     status_t res = getEndpointUsage(&consumerUsage);
     if (res != OK) consumerUsage = 0;
 
-    lines << fmt::sprintf("      State: %d\n", mState);
+    lines << fmt::sprintf("      State: %d\n", static_cast<int>(mState));
     lines << fmt::sprintf("      Dims: %d x %d, format 0x%x, dataspace 0x%x\n",
             camera_stream::width, camera_stream::height,
-            camera_stream::format, camera_stream::data_space);
+            camera_stream::format, static_cast<int>(camera_stream::data_space));
     lines << fmt::sprintf("      Max size: %zu\n", mMaxSize);
     lines << fmt::sprintf("      Combined usage: 0x%" PRIx64 ", max HAL buffers: %d\n",
             mUsage | consumerUsage, camera_stream::max_buffers);
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 172b62a..1025061 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -58,6 +58,7 @@
         mTagMonitor(offlineStates.mTagMonitor),
         mVendorTagId(offlineStates.mVendorTagId),
         mUseHalBufManager(offlineStates.mUseHalBufManager),
+        mHalBufManagedStreamIds(offlineStates.mHalBufManagedStreamIds),
         mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
         mUsePartialResult(offlineStates.mUsePartialResult),
         mNumPartialResults(offlineStates.mNumPartialResults),
@@ -136,7 +137,7 @@
 
     FlushInflightReqStates states {
         mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
-        listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
+        mHalBufManagedStreamIds, listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index b5fd486..1ef3921 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -51,7 +51,8 @@
 struct Camera3OfflineStates {
     Camera3OfflineStates(
             const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
-            const bool useHalBufManager, const bool needFixupMonochromeTags,
+            const bool useHalBufManager, const std::set<int32_t> &halBufferManagedStreamIds,
+            const bool needFixupMonochromeTags,
             const bool usePartialResult, const uint32_t numPartialResults,
             const int64_t lastCompletedRegularFN, const int64_t lastCompletedReprocessFN,
             const int64_t lastCompletedZslFN, const uint32_t nextResultFN,
@@ -64,7 +65,8 @@
             const std::unordered_map<std::string, camera3::RotateAndCropMapper>&
                 rotateAndCropMappers) :
             mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
-            mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
+            mUseHalBufManager(useHalBufManager), mHalBufManagedStreamIds(halBufferManagedStreamIds),
+            mNeedFixupMonochromeTags(needFixupMonochromeTags),
             mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
             mLastCompletedRegularFrameNumber(lastCompletedRegularFN),
             mLastCompletedReprocessFrameNumber(lastCompletedReprocessFN),
@@ -85,6 +87,7 @@
     const metadata_vendor_id_t mVendorTagId;
 
     const bool mUseHalBufManager;
+    const std::set<int32_t > &mHalBufManagedStreamIds;
     const bool mNeedFixupMonochromeTags;
 
     const bool mUsePartialResult;
@@ -181,6 +184,7 @@
     const metadata_vendor_id_t mVendorTagId;
 
     const bool mUseHalBufManager;
+    const std::set<int32_t > &mHalBufManagedStreamIds;
     const bool mNeedFixupMonochromeTags;
 
     const bool mUsePartialResult;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index f98636b..2ce04e8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -235,65 +235,6 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
-    status_t res;
-
-    if ((res = getBufferPreconditionCheckLocked()) != OK) {
-        return res;
-    }
-
-    if (mUseBufferManager) {
-        ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
-                __FUNCTION__, mId);
-        return INVALID_OPERATION;
-    }
-
-    sp<Surface> consumer = mConsumer;
-    /**
-     * Release the lock briefly to avoid deadlock for below scenario:
-     * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
-     * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
-     * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
-     * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
-     * StreamingProcessor lock.
-     * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
-     * and try to lock bufferQueue lock.
-     * Then there is circular locking dependency.
-     */
-    mLock.unlock();
-
-    size_t numBuffersRequested = outBuffers->size();
-    std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
-
-    nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
-    res = consumer->dequeueBuffers(&buffers);
-    nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-    mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
-
-    mLock.lock();
-
-    if (res != OK) {
-        if (shouldLogError(res, mState)) {
-            ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
-                    __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
-        }
-        checkRetAndSetAbandonedLocked(res);
-        return res;
-    }
-    checkRemovedBuffersLocked();
-
-    /**
-     * FenceFD now owned by HAL except in case of error,
-     * in which case we reassign it to acquire_fence
-     */
-    for (size_t i = 0; i < numBuffersRequested; i++) {
-        handoutBufferLocked(*(outBuffers->at(i).outBuffer),
-                &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
-                /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
-    }
-    return OK;
-}
-
 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
             ANativeWindowBuffer* buffer, int anwReleaseFence,
             const std::vector<size_t>&) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 65791a9..da0ed87 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -388,8 +388,6 @@
     virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids);
 
-    virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override;
-
     virtual status_t returnBufferLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp, nsecs_t readoutTimestamp,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 450f3dd..89e08a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -45,13 +45,17 @@
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera_metadata_hidden.h>
+#include <com_android_internal_camera_flags.h>
 
 #include "device3/Camera3OutputUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 
 #include "system/camera_metadata.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace camera3 {
@@ -495,7 +499,8 @@
     states.inflightIntf.onInflightEntryRemovedLocked(duration);
 }
 
-void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx,
+        std::vector<BufferToReturn> *returnableBuffers) {
     InFlightRequestMap& inflightMap = states.inflightMap;
     const InFlightRequest &request = inflightMap.valueAt(idx);
     const uint32_t frameNumber = inflightMap.keyAt(idx);
@@ -533,11 +538,13 @@
         assert(request.requestStatus != OK ||
                request.pendingOutputBuffers.size() == 0);
 
-        returnOutputBuffers(
-            states.useHalBufManager, states.listener,
+        collectReturnableOutputBuffers(
+            states.useHalBufManager, states.halBufManagedStreamIds,
+            states.listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+            /*out*/ returnableBuffers,
             /*timestampIncreasing*/true,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
@@ -632,6 +639,7 @@
     // in-flight request and they will be returned when the shutter timestamp
     // arrives. Update the in-flight status and remove the in-flight entry if
     // all result data and shutter timestamp have been received.
+    std::vector<BufferToReturn> returnableBuffers{};
     nsecs_t shutterTimestamp = 0;
     {
         std::lock_guard<std::mutex> l(states.inflightLock);
@@ -793,9 +801,11 @@
         request.pendingOutputBuffers.appendArray(result->output_buffers,
                 result->num_output_buffers);
         if (shutterTimestamp != 0) {
-            returnAndRemovePendingOutputBuffers(
-                states.useHalBufManager, states.listener,
-                request, states.sessionStatsBuilder);
+            collectAndRemovePendingOutputBuffers(
+                states.useHalBufManager, states.halBufManagedStreamIds,
+                states.listener,
+                request, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers);
         }
 
         if (result->result != NULL && !isPartialResult) {
@@ -820,9 +830,18 @@
                     request.physicalMetadatas);
             }
         }
-        removeInFlightRequestIfReadyLocked(states, idx);
+        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+        if (!flags::return_buffers_outside_locks()) {
+            finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+        }
     } // scope for states.inFlightLock
 
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
     if (result->input_buffer != NULL) {
         if (hasInputBufferInRequest) {
             Camera3Stream *stream =
@@ -843,16 +862,17 @@
     }
 }
 
-void returnOutputBuffers(
+void collectReturnableOutputBuffers(
         bool useHalBufManager,
+        const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener,
         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
         nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
         nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
+        /*out*/ std::vector<BufferToReturn> *returnableBuffers,
         bool timestampIncreasing, const SurfaceMap& outputSurfaces,
-        const CaptureResultExtras &inResultExtras,
+        const CaptureResultExtras &resultExtras,
         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
-
     for (size_t i = 0; i < numBuffers; i++)
     {
         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
@@ -862,7 +882,7 @@
         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
@@ -871,7 +891,9 @@
         }
 
         if (outputBuffers[i].buffer == nullptr) {
-            if (!useHalBufManager) {
+            if (!useHalBufManager &&
+                    !(flags::session_hal_buf_manager() &&
+                            contains(halBufferManagedStreams, streamId))) {
                 // With HAL buffer management API, HAL sometimes will have to return buffers that
                 // has not got a output buffer handle filled yet. This is though illegal if HAL
                 // buffer management API is not being used.
@@ -885,22 +907,35 @@
         }
 
         const auto& it = outputSurfaces.find(streamId);
-        status_t res = OK;
 
         // Do not return the buffer if the buffer status is error, and the error
         // buffer strategy is CACHE.
         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
                 errorBufStrategy != ERROR_BUF_CACHE) {
             if (it != outputSurfaces.end()) {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        it->second, inResultExtras.frameNumber, transform);
+                        it->second, resultExtras,
+                        transform, requested ? requestTimeNs : 0);
             } else {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        std::vector<size_t> (), inResultExtras.frameNumber, transform);
+                        std::vector<size_t> (), resultExtras,
+                        transform, requested ? requestTimeNs : 0 );
             }
         }
+    }
+}
+
+void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+        sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder) {
+    for (auto& b : returnableBuffers) {
+        const int streamId = b.stream->getId();
+
+        status_t res = b.stream->returnBuffer(b.buffer, b.timestamp,
+                b.readoutTimestamp, b.timestampIncreasing,
+                b.surfaceIds, b.resultExtras.frameNumber, b.transform);
+
         // Note: stream may be deallocated at this point, if this buffer was
         // the last reference to it.
         bool dropped = false;
@@ -911,50 +946,55 @@
             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
             dropped = true;
         } else {
-            if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
+            if (b.buffer.status == CAMERA_BUFFER_STATUS_ERROR || b.timestamp == 0) {
                 dropped = true;
             }
         }
-        if (requested) {
+        if (b.requestTimeNs > 0) {
             nsecs_t bufferTimeNs = systemTime();
-            int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+            int32_t captureLatencyMs = ns2ms(bufferTimeNs - b.requestTimeNs);
             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
         }
 
         // Long processing consumers can cause returnBuffer timeout for shared stream
         // If that happens, cancel the buffer and send a buffer error to client
-        if (it != outputSurfaces.end() && res == TIMED_OUT &&
-                outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
+        if (b.surfaceIds.size() > 0 && res == TIMED_OUT &&
+                b.buffer.status == CAMERA_BUFFER_STATUS_OK) {
             // cancel the buffer
-            camera_stream_buffer_t sb = outputBuffers[i];
+            camera_stream_buffer_t sb = b.buffer;
             sb.status = CAMERA_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
-                    timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber, transform);
+            b.stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
+                    b.timestampIncreasing, std::vector<size_t> (),
+                    b.resultExtras.frameNumber, b.transform);
 
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = b.resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
                         extras);
             }
         }
+
     }
 }
 
-void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+void collectAndRemovePendingOutputBuffers(bool useHalBufManager,
+        const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener, InFlightRequest& request,
-        SessionStatsBuilder& sessionStatsBuilder) {
+        SessionStatsBuilder& sessionStatsBuilder,
+        std::vector<BufferToReturn> *returnableBuffers) {
     bool timestampIncreasing =
             !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
     nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
             request.resultExtras.readoutTimestamp : 0;
-    returnOutputBuffers(useHalBufManager, listener,
+    collectReturnableOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
             request.shutterTimestamp, readoutTimestamp,
-            /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+            /*requested*/true, request.requestTimeNs, sessionStatsBuilder,
+            /*out*/ returnableBuffers,
+            timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
 
@@ -974,6 +1014,9 @@
     ATRACE_CALL();
     ssize_t idx;
 
+    std::vector<BufferToReturn> returnableBuffers{};
+    CaptureResultExtras pendingNotificationResultExtras{};
+
     // Set timestamp for the request in the in-flight tracking
     // and get the request ID to send upstream
     {
@@ -1040,9 +1083,13 @@
                             states.lastCompletedReprocessFrameNumber;
                     r.resultExtras.lastCompletedZslFrameNumber =
                             states.lastCompletedZslFrameNumber;
-                    states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    if (flags::return_buffers_outside_locks()) {
+                        pendingNotificationResultExtras = r.resultExtras;
+                    } else {
+                        states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    }
                 }
-                // send pending result and buffers
+                // send pending result and buffers; this queues them up for delivery later
                 const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
                         inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
                 sendCaptureResult(states,
@@ -1051,16 +1098,35 @@
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
                     r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
             }
-            returnAndRemovePendingOutputBuffers(
-                    states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
+            collectAndRemovePendingOutputBuffers(
+                    states.useHalBufManager, states.halBufManagedStreamIds,
+                    states.listener, r, states.sessionStatsBuilder, &returnableBuffers);
 
-            removeInFlightRequestIfReadyLocked(states, idx);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
+            removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+
         }
     }
     if (idx < 0) {
         SET_ERR("Shutter notification for non-existent frame number %d",
                 msg.frame_number);
     }
+    // Call notifyShutter outside of in-flight mutex
+    if (flags::return_buffers_outside_locks() && pendingNotificationResultExtras.isValid()) {
+        states.listener->notifyShutter(pendingNotificationResultExtras, msg.timestamp);
+    }
+
+    // With no locks held, finish returning buffers to streams, which may take a while since
+    // binder calls are involved
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
 }
 
 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
@@ -1106,6 +1172,8 @@
             break;
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+        {
+            std::vector<BufferToReturn> returnableBuffers{};
             {
                 std::lock_guard<std::mutex> l(states.inflightLock);
                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -1142,7 +1210,12 @@
 
                         // Check whether the buffers returned. If they returned,
                         // remove inflight request.
-                        removeInFlightRequestIfReadyLocked(states, idx);
+                        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+                        if (!flags::return_buffers_outside_locks()) {
+                            finishReturningOutputBuffers(returnableBuffers,
+                                    states.listener, states.sessionStatsBuilder);
+                        }
+
                     }
                 } else {
                     resultExtras.frameNumber = msg.frame_number;
@@ -1151,6 +1224,12 @@
                             resultExtras.frameNumber);
                 }
             }
+
+            if (flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
             resultExtras.errorStreamId = streamId;
             if (states.listener != nullptr) {
                 states.listener->notifyError(errorCode, resultExtras);
@@ -1159,6 +1238,7 @@
                         states.cameraId.c_str(), __FUNCTION__);
             }
             break;
+        }
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
             // callback to the app. Rather, use STATUS_ERROR of image buffers.
@@ -1188,17 +1268,24 @@
 
 void flushInflightRequests(FlushInflightReqStates& states) {
     ATRACE_CALL();
+    std::vector<BufferToReturn> returnableBuffers{};
     { // First return buffers cached in inFlightMap
         std::lock_guard<std::mutex> l(states.inflightLock);
         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
             const InFlightRequest &request = states.inflightMap.valueAt(idx);
-            returnOutputBuffers(
-                states.useHalBufManager, states.listener,
+            collectReturnableOutputBuffers(
+                states.useHalBufManager, states.halBufManagedStreamIds,
+                states.listener,
                 request.pendingOutputBuffers.array(),
                 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
                 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers,
                 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
                 request.errorBufStrategy);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1209,6 +1296,10 @@
         states.inflightMap.clear();
         states.inflightIntf.onInflightMapFlushedLocked();
     }
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
 
     // Then return all inflight buffers not returned by HAL
     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 134c037..75864d7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -44,15 +44,50 @@
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
 
-    // helper function to return the output buffers to output streams. The
-    // function also optionally calls notify(ERROR_BUFFER).
-    void returnOutputBuffers(
+    struct BufferToReturn {
+        Camera3StreamInterface *stream;
+        camera_stream_buffer_t buffer;
+        nsecs_t timestamp;
+        nsecs_t readoutTimestamp;
+        bool timestampIncreasing;
+        std::vector<size_t> surfaceIds;
+        const CaptureResultExtras resultExtras;
+        int32_t transform;
+        nsecs_t requestTimeNs;
+
+        BufferToReturn(Camera3StreamInterface *stream,
+                camera_stream_buffer_t buffer,
+                nsecs_t timestamp, nsecs_t readoutTimestamp,
+                bool timestampIncreasing, std::vector<size_t> surfaceIds,
+                const CaptureResultExtras &resultExtras,
+                int32_t transform, nsecs_t requestTimeNs):
+            stream(stream),
+            buffer(buffer),
+            timestamp(timestamp),
+            readoutTimestamp(readoutTimestamp),
+            timestampIncreasing(timestampIncreasing),
+            surfaceIds(surfaceIds),
+            resultExtras(resultExtras),
+            transform(transform),
+            requestTimeNs(requestTimeNs) {}
+    };
+
+    // helper function to return the output buffers to output
+    // streams. The function also optionally calls
+    // notify(ERROR_BUFFER).  Returns the list of buffers to hand back
+    // to streams in returnableBuffers.  Does not make any two-way
+    // binder calls, so suitable for use when critical locks are being
+    // held
+    void collectReturnableOutputBuffers(
             bool useHalBufManager,
+            const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
             const camera_stream_buffer_t *outputBuffers,
             size_t numBuffers, nsecs_t timestamp,
             nsecs_t readoutTimestamp, bool requested, nsecs_t requestTimeNs,
-            SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
+            SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers,
+            bool timestampIncreasing = true,
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
             // Used to send buffer error callback when failing to return buffer
@@ -60,13 +95,24 @@
             ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN,
             int32_t transform = -1);
 
-    // helper function to return the output buffers to output streams, and
-    // remove the returned buffers from the inflight request's pending buffers
-    // vector.
-    void returnAndRemovePendingOutputBuffers(
+    // helper function to collect the output buffers ready to be
+    // returned to output streams, and to remove these buffers from
+    // the inflight request's pending buffers vector.  Does not make
+    // any two-way binder calls, so suitable for use when critical
+    // locks are being held
+    void collectAndRemovePendingOutputBuffers(
             bool useHalBufManager,
+            const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
-            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
+            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers);
+
+    // Actually return filled output buffers to the consumer to use, using the list
+    // provided by collectReturnableOutputBuffers / collectAndRemovePendingOutputBuffers
+    // Makes two-way binder calls to applications, so do not hold any critical locks when
+    // calling.
+    void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+            sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder);
 
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
@@ -87,6 +133,7 @@
         uint32_t& nextReprocResultFrameNum;
         uint32_t& nextZslResultFrameNum; // end of outputLock scope
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         const bool usePartialResult;
         const bool needFixupMonoChrome;
         const uint32_t numPartialResults;
@@ -118,6 +165,7 @@
         const std::string& cameraId;
         std::mutex& reqBufferLock; // lock to serialize request buffer calls
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         StreamSet& outputStreams;
         SessionStatsBuilder& sessionStatsBuilder;
         SetErrorInterface& setErrIntf;
@@ -128,6 +176,7 @@
     struct ReturnBufferStates {
         const std::string& cameraId;
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         StreamSet& outputStreams;
         SessionStatsBuilder& sessionStatsBuilder;
         BufferRecordsInterface& bufferRecordsIntf;
@@ -138,6 +187,7 @@
         std::mutex& inflightLock;
         InFlightRequestMap& inflightMap; // end of inflightLock scope
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         sp<NotificationListener> listener;
         InflightRequestUpdateInterface& inflightIntf;
         BufferRecordsInterface& bufferRecordsIntf;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index 3ac666b..aca7a67 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -32,13 +32,17 @@
 
 #include <camera/CameraUtils.h>
 #include <camera_metadata_hidden.h>
+#include <com_android_internal_camera_flags.h>
 
 #include "device3/Camera3OutputUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 
 #include "system/camera_metadata.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace camera3 {
@@ -207,7 +211,9 @@
 
         bool noBufferReturned = false;
         buffer_handle_t *buffer = nullptr;
-        if (states.useHalBufManager) {
+        if (states.useHalBufManager ||
+                (flags::session_hal_buf_manager() &&
+                        contains(states.halBufManagedStreamIds, bSrc.streamId))) {
             // This is suspicious most of the time but can be correct during flush where HAL
             // has to return capture result before a buffer is requested
             if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
@@ -294,13 +300,15 @@
 template <class VecStreamBufferType>
 void returnStreamBuffersT(ReturnBufferStates& states,
         const VecStreamBufferType& buffers) {
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer managerment",
-                __FUNCTION__, states.cameraId.c_str());
-        return;
-    }
 
     for (const auto& buf : buffers) {
+        if (!states.useHalBufManager &&
+            !(flags::session_hal_buf_manager() &&
+             contains(states.halBufManagedStreamIds, buf.streamId))) {
+            ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
+                  __FUNCTION__, states.cameraId.c_str(), buf.streamId);
+            return;
+        }
         if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
             ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
             continue;
@@ -337,9 +345,15 @@
             continue;
         }
         streamBuffer.stream = stream->asHalStream();
-        returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                &streamBuffer, /*size*/1, /*timestamp*/ 0, /*readoutTimestamp*/0,
-                /*requested*/false, /*requestTimeNs*/0, states.sessionStatsBuilder);
+        std::vector<BufferToReturn> returnableBuffers{};
+        collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+                /*listener*/nullptr, &streamBuffer, /*size*/1, /*timestamp*/ 0,
+                /*readoutTimestamp*/0, /*requested*/false, /*requestTimeNs*/0,
+                states.sessionStatsBuilder,
+                /*out*/&returnableBuffers);
+        finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+                states.sessionStatsBuilder);
+
     }
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 701c472..79a767a 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -969,11 +969,6 @@
     return INVALID_OPERATION;
 }
 
-status_t Camera3Stream::getBuffersLocked(std::vector<OutstandingBuffer>*) {
-    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
 status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
                                            nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
@@ -1047,92 +1042,6 @@
     mBufferFreedListener = listener;
 }
 
-status_t Camera3Stream::getBuffers(std::vector<OutstandingBuffer>* buffers,
-        nsecs_t waitBufferTimeout) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mLock);
-    status_t res = OK;
-
-    if (buffers == nullptr) {
-        ALOGI("%s: buffers must not be null!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    size_t numBuffersRequested = buffers->size();
-    if (numBuffersRequested == 0) {
-        ALOGE("%s: 0 buffers are requested!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    // This function should be only called when the stream is configured already.
-    if (mState != STATE_CONFIGURED) {
-        ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
-                __FUNCTION__, mId, mState);
-        if (mState == STATE_ABANDONED) {
-            return DEAD_OBJECT;
-        } else {
-            return INVALID_OPERATION;
-        }
-    }
-
-    size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
-    size_t numCachedBuffers = getCachedOutputBufferCountLocked();
-    size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
-    // Wait for new buffer returned back if we are running into the limit. There
-    // are 2 limits:
-    // 1. The number of HAL buffers is greater than max_buffers
-    // 2. The number of HAL buffers + cached buffers is greater than max_buffers
-    //    + maxCachedBuffers
-    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
-            numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
-            camera_stream::max_buffers + maxNumCachedBuffers) {
-        ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
-                "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
-                numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
-                maxNumCachedBuffers);
-        nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
-        if (waitBufferTimeout < kWaitForBufferDuration) {
-            waitBufferTimeout = kWaitForBufferDuration;
-        }
-        res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
-        nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-        mBufferLimitLatency.add(waitStart, waitEnd);
-        if (res != OK) {
-            if (res == TIMED_OUT) {
-                ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
-                        __FUNCTION__, waitBufferTimeout / 1000000LL,
-                        camera_stream::max_buffers);
-            }
-            return res;
-        }
-        size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
-        size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
-        if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
-                updatedNumCachedBuffers == numCachedBuffers) {
-            ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
-                    "getBuffer(s) call must not run in parallel!", __FUNCTION__,
-                    numOutstandingBuffers, updatedNumOutstandingBuffers);
-            return INVALID_OPERATION;
-        }
-        numOutstandingBuffers = updatedNumOutstandingBuffers;
-        numCachedBuffers = updatedNumCachedBuffers;
-    }
-
-    res = getBuffersLocked(buffers);
-    if (res == OK) {
-        for (auto& outstandingBuffer : *buffers) {
-            camera_stream_buffer* buffer = outstandingBuffer.outBuffer;
-            fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
-            if (buffer->buffer) {
-                Mutex::Autolock l(mOutstandingBuffersLock);
-                mOutstandingBuffers.push_back(*buffer->buffer);
-            }
-        }
-    }
-
-    return res;
-}
-
 void Camera3Stream::queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
         int64_t dynamicRangeProfile) {
     auto& mapper = GraphicBufferMapper::get();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f06ccf3..0df09cd 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -343,12 +343,6 @@
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    status_t         getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout);
-
-    /**
      * Return a buffer to the stream after use by the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -535,8 +529,6 @@
             nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
-    virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
-
     virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
 
     virtual status_t returnInputBufferLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7fa6273..26fa04f 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -395,11 +395,6 @@
          */
         std::vector<size_t> surface_ids;
     };
-    /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    virtual status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout) = 0;
 
     /**
      * Return a buffer to the stream after use by the HAL.
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index a7bd312..3626f20 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -35,6 +35,7 @@
     uint32_t operation_mode;
     bool input_is_multi_resolution;
     bool use_hal_buf_manager = false;
+    std::set<int32_t> hal_buffer_managed_streams;
 } camera_stream_configuration_t;
 
 typedef struct camera_capture_request {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 97475f0..e8ef692 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -59,6 +59,7 @@
 #include <com_android_internal_camera_flags.h>
 
 #include "utils/CameraTraces.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "mediautils/SchedulingPolicyService.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
@@ -79,6 +80,7 @@
 #include "AidlCamera3Device.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace aidl::android::hardware;
 using aidl::android::hardware::camera::metadata::SensorPixelMode;
 using aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
@@ -337,6 +339,16 @@
 
     mBatchSizeLimitEnabled = (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_2);
 
+    camera_metadata_entry readoutSupported = mDeviceInfo.find(ANDROID_SENSOR_READOUT_TIMESTAMP);
+    if (readoutSupported.count == 0) {
+        ALOGW("%s: Could not find value corresponding to ANDROID_SENSOR_READOUT_TIMESTAMP. "
+              "Assuming true.", __FUNCTION__);
+        mSensorReadoutTimestampSupported = true;
+    } else {
+        mSensorReadoutTimestampSupported =
+                readoutSupported.data.u8[0] == ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
+    }
+
     return initializeCommonLocked();
 }
 
@@ -400,7 +412,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -442,7 +454,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -450,7 +462,7 @@
         mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
+        camera3::notify(states, msg, mSensorReadoutTimestampSupported);
     }
     return ::ndk::ScopedAStatus::ok();
 
@@ -531,7 +543,7 @@
         }
 
         // When not using HAL buf manager, only allow streams requested by app to be preserved
-        if (!mUseHalBufManager) {
+        if (!isHalBufferManagedStream(id)) {
             if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
                 SET_ERR("stream ID %d must not be switched to offline!", id);
                 return UNKNOWN_ERROR;
@@ -611,17 +623,18 @@
     // TODO: check if we need to lock before copying states
     //       though technically no other thread should be talking to Camera3Device at this point
     Camera3OfflineStates offlineStates(
-            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
-            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
-            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
-            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
-            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
-            mZoomRatioMappers, mRotateAndCropMappers);
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mHalBufManagedStreamIds,
+            mNeedFixupMonochromeTags, mUsePartialResult, mNumPartialResults,
+            mLastCompletedRegularFrameNumber, mLastCompletedReprocessFrameNumber,
+            mLastCompletedZslFrameNumber, mNextResultFrameNumber,
+            mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+            mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+            mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+            mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers);
 
     *session = new AidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
-            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+                                             std::move(bufferRecords), offlineReqs, offlineStates,
+                                             offlineSession, mSensorReadoutTimestampSupported);
 
     // Delete all streams that has been transferred to offline session
     Mutex::Autolock l(mLock);
@@ -688,8 +701,8 @@
         aidl::android::hardware::camera::device::BufferRequestStatus* status) {
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
-        mSessionStatsBuilder, *this, *(mInterface), *this};
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder, *this, *(mInterface), *this};
     camera3::requestStreamBuffers(states, bufReqs, outBuffers, status);
     return ::ndk::ScopedAStatus::ok();
 }
@@ -713,7 +726,7 @@
 ::ndk::ScopedAStatus AidlCamera3Device::returnStreamBuffers(
         const std::vector<camera::device::StreamBuffer>& buffers) {
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams,  mSessionStatsBuilder,
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams,  mSessionStatsBuilder,
         *(mInterface)};
     camera3::returnStreamBuffers(states, buffers);
     return ::ndk::ScopedAStatus::ok();
@@ -905,6 +918,12 @@
         camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        // For stream configurations with multi res streams, hal buffer manager has to be used.
+        if (!flags::session_hal_buf_manager() && cam3stream->getHalStreamGroupId() != -1 &&
+                src->stream_type != CAMERA_STREAM_INPUT) {
+            mUseHalBufManager = true;
+            config->use_hal_buf_manager = true;
+        }
         cam3stream->setBufferFreedListener(this);
         int streamId = cam3stream->getId();
         StreamType streamType;
@@ -975,31 +994,38 @@
     requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
     requestedConfiguration.logId = logId;
     ndk::ScopedAStatus err = ndk::ScopedAStatus::ok();
+    int32_t interfaceVersion = 0;
     camera::device::ConfigureStreamsRet configureStreamsRet;
-    if (flags::session_hal_buf_manager()) {
-        int32_t interfaceVersion = 0;
-        err = mAidlSession->getInterfaceVersion(&interfaceVersion);
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error getting interface version: %s", __FUNCTION__,
-                    err.getMessage());
-            return AidlProviderInfo::mapToStatusT(err);
-        }
-        if (interfaceVersion >= AIDL_DEVICE_SESSION_V3 && mSupportSessionHalBufManager) {
-            err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
-            finalConfiguration = std::move(configureStreamsRet.halStreams);
-        } else {
-            err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
-        }
+    err = mAidlSession->getInterfaceVersion(&interfaceVersion);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error getting interface version: %s", __FUNCTION__,
+              err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+    if (flags::session_hal_buf_manager() && interfaceVersion >= AIDL_DEVICE_SESSION_V3
+            && mSupportSessionHalBufManager) {
+        err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
+        finalConfiguration = std::move(configureStreamsRet.halStreams);
     } else {
         err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
     }
+
     if (!err.isOk()) {
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
         return AidlProviderInfo::mapToStatusT(err);
     }
-    if (flags::session_hal_buf_manager() && mSupportSessionHalBufManager) {
-        mUseHalBufManager = configureStreamsRet.enableHalBufferManager;
-        config->use_hal_buf_manager = configureStreamsRet.enableHalBufferManager;
+
+    if (flags::session_hal_buf_manager()) {
+        std::set<int32_t> halBufferManagedStreamIds;
+        for (const auto &halStream: finalConfiguration) {
+            if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
+                    mSupportSessionHalBufManager && halStream.enableHalBufferManager)
+                    || mUseHalBufManager) {
+                halBufferManagedStreamIds.insert(halStream.id);
+            }
+        }
+        mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
+        config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
     }
     // And convert output stream configuration from AIDL
     for (size_t i = 0; i < config->num_streams; i++) {
@@ -1070,9 +1096,9 @@
             }
             dstStream->setUsage(
                     mapProducerToFrameworkUsage(src.producerUsage));
-
             if (flags::session_hal_buf_manager()) {
-                dstStream->setHalBufferManager(mUseHalBufManager);
+                dstStream->setHalBufferManager(
+                        contains(config->hal_buffer_managed_streams, streamId));
             }
         }
         dst->max_buffers = src.maxBuffers;
@@ -1396,7 +1422,7 @@
                     handlesCreated->push_back(acquireFence);
                 }
                 dst.acquireFence = camera3::dupToAidlIfNotNull(acquireFence);
-            } else if (mUseHalBufManager) {
+            } else if (isHalBufferManagedStream(streamId)) {
                 // HAL buffer management path
                 dst.bufferId = BUFFER_ID_NO_BUFFER;
                 dst.buffer = aidl::android::hardware::common::NativeHandle();
@@ -1410,7 +1436,7 @@
             dst.releaseFence = aidl::android::hardware::common::NativeHandle();
 
             // Output buffers are empty when using HAL buffer manager
-            if (!mUseHalBufManager) {
+            if (!isHalBufferManagedStream(streamId)) {
                 mBufferRecords.pushInflightBuffer(
                         captureRequest->frameNumber, streamId, src->buffer);
                 inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
@@ -1456,8 +1482,9 @@
                 bool supportCameraMute,
                 bool overrideToPortrait,
                 bool supportSettingsOverride) :
-          RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+          RequestThread(parent, statusTracker, interface, sessionParamKeys,
+                  useHalBufManager, supportCameraMute, overrideToPortrait,
+                  supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1690,7 +1717,8 @@
                 bool overrideToPortrait,
                 bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
+            useHalBufManager, supportCameraMute, overrideToPortrait,
+            supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 90e2f97..f0a5f7e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -289,6 +289,9 @@
     // capture requests.
     bool mBatchSizeLimitEnabled = false;
 
+    // Whether the HAL supports reporting sensor readout timestamp
+    bool mSensorReadoutTimestampSupported = true;
+
 }; // class AidlCamera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 01c4e88..f8308df 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -122,7 +122,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -169,7 +169,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -177,7 +177,7 @@
         /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
+        camera3::notify(states, msg, mSensorReadoutTimestampSupported);
     }
     return ::ndk::ScopedAStatus::ok();
 }
@@ -208,7 +208,8 @@
     }
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager,
+        mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
         *this, mBufferRecords, *this};
     camera3::requestStreamBuffers(states, bufReqs, buffers, status);
     return ::ndk::ScopedAStatus::ok();
@@ -241,7 +242,7 @@
     }
 
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
         mBufferRecords};
 
     camera3::returnStreamBuffers(states, buffers);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index 33b638c..f8fdeb9 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -105,19 +105,20 @@
     };
 
     // initialize by Camera3Device.
-    explicit AidlCamera3OfflineSession(const std::string& id,
-            const sp<camera3::Camera3Stream>& inputStream,
-            const camera3::StreamSet& offlineStreamSet,
-            camera3::BufferRecords&& bufferRecords,
+    explicit AidlCamera3OfflineSession(
+            const std::string& id, const sp<camera3::Camera3Stream>& inputStream,
+            const camera3::StreamSet& offlineStreamSet, camera3::BufferRecords&& bufferRecords,
             const camera3::InFlightRequestMap& offlineReqs,
             const Camera3OfflineStates& offlineStates,
             std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession>
-                    offlineSession) :
-      Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
-              offlineReqs, offlineStates),
-      mSession(offlineSession) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
-      };
+                    offlineSession,
+            bool sensorReadoutTimestampSupported)
+        : Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
+                                offlineReqs, offlineStates),
+          mSession(offlineSession),
+          mSensorReadoutTimestampSupported(sensorReadoutTimestampSupported) {
+            mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+    };
 
     /**
      * End of CameraOfflineSessionBase interface
@@ -130,6 +131,8 @@
 
     std::shared_ptr<AidlCameraDeviceCallbacks> mCallbacks;
 
+    bool mSensorReadoutTimestampSupported;
+
     virtual void closeSessionLocked() override;
 
     virtual void releaseSessionLocked() override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 74d4230..d9c8e57 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -67,7 +67,8 @@
 }
 
 void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg) {
+            const aidl::android::hardware::camera::device::NotifyMsg& msg,
+            bool hasReadoutTimestamp) {
 
     using ErrorCode = aidl::android::hardware::camera::device::ErrorCode;
     using Tag = aidl::android::hardware::camera::device::NotifyMsg::Tag;
@@ -110,8 +111,9 @@
             m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.get<Tag::shutter>().frameNumber;
             m.message.shutter.timestamp = msg.get<Tag::shutter>().timestamp;
-            m.message.shutter.readout_timestamp_valid = true;
-            m.message.shutter.readout_timestamp = msg.get<Tag::shutter>().readoutTimestamp;
+            m.message.shutter.readout_timestamp_valid = hasReadoutTimestamp;
+            m.message.shutter.readout_timestamp =
+                    hasReadoutTimestamp ? msg.get<Tag::shutter>().readoutTimestamp : 0LL;
             break;
     }
     notify(states, &m);
@@ -143,12 +145,6 @@
     std::lock_guard<std::mutex> lock(states.reqBufferLock);
     std::vector<StreamBufferRet> bufRets;
     outBuffers->clear();
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer management",
-                __FUNCTION__, states.cameraId.c_str());
-        *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
-        return;
-    }
 
     SortedVector<int32_t> streamIds;
     ssize_t sz = streamIds.setCapacity(bufReqs.size());
@@ -174,6 +170,13 @@
             *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
             return;
         }
+        if (!states.useHalBufManager &&
+                !contains(states.halBufManagedStreamIds, bufReq.streamId)) {
+            ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
+                  __FUNCTION__, states.cameraId.c_str(), bufReq.streamId);
+            *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+            return;
+        }
         streamIds.add(bufReq.streamId);
     }
 
@@ -316,10 +319,15 @@
                 sb.acquire_fence = -1;
                 sb.status = CAMERA_BUFFER_STATUS_ERROR;
             }
-            returnOutputBuffers(states.useHalBufManager, nullptr,
-                    streamBuffers.data(), numAllocatedBuffers, 0,
-                    0, false,
-                    0, states.sessionStatsBuilder);
+            std::vector<BufferToReturn> returnableBuffers{};
+            collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+                    /*listener*/ nullptr,
+                    streamBuffers.data(), numAllocatedBuffers, /*timestamp*/ 0,
+                    /*readoutTimestamp*/ 0, /*requested*/ false,
+                    /*requestTimeNs*/ 0, states.sessionStatsBuilder,
+                    /*out*/ &returnableBuffers);
+            finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+                    states.sessionStatsBuilder);
             for (auto buf : newBuffers) {
                 states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
             }
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
index e795624..d3a8ede 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
@@ -79,11 +79,8 @@
                     &physicalCameraMetadata);
 
     void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg,
-        bool hasReadoutTime, uint64_t readoutTime);
-
-    void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg);
+            const aidl::android::hardware::camera::device::NotifyMsg& msg,
+            bool hasReadoutTimestamp);
 
     void requestStreamBuffers(RequestBufferStates& states,
         const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4488067..f2e618f 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -47,6 +47,7 @@
 #include <utils/Timers.h>
 #include <cutils/properties.h>
 #include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
 
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
@@ -66,6 +67,7 @@
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -307,7 +309,8 @@
         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
         requestStreamBuffers_cb _hidl_cb) {
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder,
         *this, *mInterface, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -316,7 +319,8 @@
 hardware::Return<void> HidlCamera3Device::returnStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams,
+        mSessionStatsBuilder, *mInterface};
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
@@ -362,7 +366,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -425,7 +429,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -473,7 +477,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -641,14 +645,14 @@
     // TODO: check if we need to lock before copying states
     //       though technically no other thread should be talking to Camera3Device at this point
     Camera3OfflineStates offlineStates(
-            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
-            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
-            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
-            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
-            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
-            mZoomRatioMappers, mRotateAndCropMappers);
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mHalBufManagedStreamIds,
+            mNeedFixupMonochromeTags, mUsePartialResult, mNumPartialResults,
+            mLastCompletedRegularFrameNumber, mLastCompletedReprocessFrameNumber,
+            mLastCompletedZslFrameNumber, mNextResultFrameNumber,
+            mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+            mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+            mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+            mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers);
 
     *session = new HidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
             std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
@@ -716,7 +720,8 @@
                 bool overrideToPortrait,
                 bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
+                useHalBufManager, supportCameraMute, overrideToPortrait,
+                supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -909,6 +914,7 @@
     requestedConfiguration3_2.streams.resize(config->num_streams);
     requestedConfiguration3_4.streams.resize(config->num_streams);
     requestedConfiguration3_7.streams.resize(config->num_streams);
+    mHalBufManagedStreamIds.clear();
     for (size_t i = 0; i < config->num_streams; i++) {
         device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
         device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
@@ -922,6 +928,9 @@
         switch (src->stream_type) {
             case CAMERA_STREAM_OUTPUT:
                 streamType = StreamType::OUTPUT;
+                if (flags::session_hal_buf_manager() && mUseHalBufManager) {
+                    mHalBufManagedStreamIds.insert(streamId);
+                }
                 break;
             case CAMERA_STREAM_INPUT:
                 streamType = StreamType::INPUT;
@@ -931,6 +940,7 @@
                         __FUNCTION__, streamId, config->streams[i]->stream_type);
                 return BAD_VALUE;
         }
+
         dst3_2.id = streamId;
         dst3_2.streamType = streamType;
         dst3_2.width = src->width;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index e328ef6..aa4b762 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -103,7 +103,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -145,7 +145,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -182,7 +182,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -207,7 +207,8 @@
     }
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager,mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder,
         *this, mBufferRecords, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -224,7 +225,8 @@
     }
 
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
+        mBufferRecords};
 
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 59fc1cd..d607d10 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -25,6 +25,7 @@
 #include <hidl/Utils.h>
 #include <android/hardware/camera/device/3.2/types.h>
 #include <android-base/properties.h>
+#include <utils/Utils.h>
 
 namespace android {
 namespace frameworks {
@@ -58,7 +59,7 @@
     const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
   : mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 bool HidlCameraDeviceUser::initDevice() {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 94bf653..1a5a6b9 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -25,6 +25,8 @@
 
 #include <hidl/HidlTransportSupport.h>
 
+#include <utils/Utils.h>
+
 namespace android {
 namespace frameworks {
 namespace cameraservice {
@@ -56,8 +58,8 @@
 }
 
 HidlCameraService::HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) {
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
-};
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+}
 
 Return<void>
 HidlCameraService::getCameraCharacteristics(const hidl_string& cameraId,
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a53d26d..939126c 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -372,28 +372,22 @@
 };
 
 /**
- * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
+ * Simple test version of HidlServiceInteractionProxy, to use to inject onRegistered calls to the
  * CameraProviderManager
  */
-struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy,
-                              public CameraProviderManager::AidlServiceInteractionProxy {
+struct TestHidlInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
     sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
     sp<TestICameraProvider> mTestCameraProvider;
-    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
 
-    TestInteractionProxy() {}
+    TestHidlInteractionProxy() {}
 
     void setProvider(sp<TestICameraProvider> provider) {
         mTestCameraProvider = provider;
     }
 
-    void setAidlProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
-        mTestAidlCameraProvider = provider;
-    }
-
     std::vector<std::string> mLastRequestedServiceNames;
 
-    virtual ~TestInteractionProxy() {}
+    virtual ~TestHidlInteractionProxy() {}
 
     virtual bool registerForNotifications(
             [[maybe_unused]] const std::string &serviceName,
@@ -430,9 +424,47 @@
         hardware::hidl_vec<hardware::hidl_string> ret = {"test/0"};
         return ret;
     }
+};
+
+/**
+ * Simple test version of AidlServiceInteractionProxy, to use to inject onRegistered calls to the
+ * CameraProviderManager
+ */
+struct TestAidlInteractionProxy : public CameraProviderManager::AidlServiceInteractionProxy {
+    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
+
+    TestAidlInteractionProxy() {}
+
+    void setProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
+        mTestAidlCameraProvider = provider;
+    }
+
+    std::vector<std::string> mLastRequestedServiceNames;
+
+    virtual ~TestAidlInteractionProxy() {}
 
     virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-    getAidlService(const std::string&) {
+            getService(const std::string& serviceName) override {
+        if (!flags::delay_lazy_hal_instantiation()) {
+            return mTestAidlCameraProvider;
+        }
+
+        // If no provider has been given, fail; in reality, getService would
+        // block for HALs that don't start correctly, so we should never use
+        // getService when we don't have a valid HAL running
+        if (mTestAidlCameraProvider == nullptr) {
+            ADD_FAILURE() << __FUNCTION__ << "called with no valid provider;"
+                          << " would block indefinitely";
+            // Real getService would block, but that's bad in unit tests. So
+            // just record an error and return nullptr
+            return nullptr;
+        }
+        mLastRequestedServiceNames.push_back(serviceName);
+        return mTestAidlCameraProvider;
+    }
+
+    virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+    tryGetService(const std::string&) override {
         return mTestAidlCameraProvider;
     }
 };
@@ -462,7 +494,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -510,7 +542,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -560,7 +592,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
@@ -696,7 +728,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -730,7 +762,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -779,7 +811,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -821,7 +853,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -857,9 +889,11 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
@@ -868,7 +902,7 @@
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
     ndk::SpAIBinder spBinder = aidlProvider->asBinder();
     AIBinder* aiBinder = spBinder.get();
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
     providerManager->onServiceRegistration(
             String16("android.hardware.camera.provider.ICameraProvider/virtual/0"),
             AIBinder_toPlatformBinder(aiBinder));
@@ -883,15 +917,17 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
 
     std::shared_ptr<TestAidlICameraProvider> aidlProvider =
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 384f53e..11ef9b7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -623,7 +623,7 @@
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         if (dataSpace != streamInfo.dataSpace) {
             std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    logicalCameraId.c_str(), dataSpace, streamInfo.dataSpace);
+                    logicalCameraId.c_str(), static_cast<int>(dataSpace), static_cast<int>(streamInfo.dataSpace));
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 5b2ea5c..0545cea 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -179,6 +179,10 @@
 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
         metadata_vendor_id_t vendorTagId, CameraMetadata& dst);
 
+template <typename T> bool contains(std::set<T> container, T value) {
+    return container.find(value) != container.end();
+}
+
 constexpr int32_t MAX_SURFACES_PER_STREAM = 4;
 
 constexpr int32_t ROUNDING_WIDTH_CAP = 1920;
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
new file mode 100644
index 0000000..c8f5e86
--- /dev/null
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Utils.h"
+#include <android-base/properties.h>
+#include <com_android_internal_camera_flags.h>
+
+
+namespace android {
+
+using namespace com::android::internal::camera::flags;
+
+constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
+constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
+constexpr int MAX_VENDOR_API_LEVEL = 1000000;
+constexpr int FIRST_VNDK_VERSION = 202404;
+
+int getVNDKVersionFromProp(int defaultVersion) {
+    if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
+        return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
+    }
+
+    int vndkVersion = base::GetIntProperty(BOARD_API_LEVEL_PROP, MAX_VENDOR_API_LEVEL);
+
+    if (vndkVersion == MAX_VENDOR_API_LEVEL) {
+        // Couldn't find property
+        return defaultVersion;
+    }
+
+    if (vndkVersion < __ANDROID_API_V__) {
+        // VNDK versions below V return the corresponding SDK version.
+        return vndkVersion;
+    }
+
+    // VNDK for Android V and above are of the format YYYYMM starting with 202404 and is bumped
+    // up once a year. So V would be 202404 and the next one would be 202504.
+    // This is the same assumption as that made in system/core/init/property_service.cpp.
+    vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
+    return __ANDROID_API_V__ + vndkVersion;
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
new file mode 100644
index 0000000..f8a107d
--- /dev/null
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_UTILS_H
+#define ANDROID_SERVERS_CAMERA_UTILS_H
+
+namespace android {
+
+/**
+ * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
+ * instead of release SDK version. This function maps year/month format back to release
+ * SDK version.
+ *
+ * Returns defaultVersion if the property is not found.
+ */
+int getVNDKVersionFromProp(int defaultVersion);
+
+} // namespace android
+
+#endif //ANDROID_SERVERS_CAMERA_UTILS_H
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index cb4e10f..90530f6 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -11,6 +12,7 @@
         "libbinder",
         "libbinder_ndk",
         "libcamera_metadata",
+        "libexif",
         "liblog",
         "libfmq",
         "libgui",
@@ -46,7 +48,7 @@
     name: "libvirtualcamera_utils",
     srcs: [
         "util/JpegUtil.cc",
-        "util/MetadataBuilder.cc",
+        "util/MetadataUtil.cc",
         "util/Util.cc",
         "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
@@ -54,7 +56,7 @@
         "util/EglProgram.cc",
         "util/EglSurfaceTexture.cc",
         "util/EglUtil.cc",
-        "util/Permissions.cc"
+        "util/Permissions.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 66c5e52..25fca73 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -2,9 +2,7 @@
   "presubmit" : [
     {
       "name": "virtual_camera_tests"
-    }
-  ],
-  "postsubmit": [
+    },
     {
       "name": "CtsVirtualDevicesCameraTestCases",
       "options": [
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 84f721b..7636cbd 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -23,11 +23,14 @@
 #include <chrono>
 #include <cstdint>
 #include <iterator>
+#include <numeric>
 #include <optional>
 #include <string>
+#include <vector>
 
 #include "VirtualCameraSession.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
@@ -35,7 +38,7 @@
 #include "android/binder_status.h"
 #include "log/log.h"
 #include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -44,7 +47,10 @@
 
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraResourceCost;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -64,27 +70,81 @@
 // Prefix of camera name - "device@1.1/virtual/{numerical_id}"
 const char* kDevicePathPrefix = "device@1.1/virtual/";
 
-constexpr std::chrono::nanoseconds kMinFrameDuration30Fps = 1s / 30;
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
+constexpr int32_t kMinFps = 15;
+
+constexpr std::chrono::nanoseconds kMaxFrameDuration =
+    std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+
+constexpr uint8_t kPipelineMaxDepth = 2;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
-struct Resolution {
-  Resolution(const int w, const int h) : width(w), height(h) {
-  }
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+    Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+    Resolution(240, 160), Resolution(240, 180)};
 
-  bool operator<(const Resolution& other) const {
-    return width * height < other.width * other.height;
-  }
+const std::array<PixelFormat, 3> kOutputFormats{
+    PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
+    PixelFormat::BLOB};
 
-  bool operator==(const Resolution& other) const {
-    return width == other.width && height == other.height;
-  }
-
-  const int width;
-  const int height;
+// The resolutions below will used to extend the set of supported output formats.
+// All resolutions with lower pixel count and same aspect ratio as some supported
+// input resolution will be added to the set of supported output resolutions.
+const std::array<Resolution, 10> kOutputResolutions{
+    Resolution(320, 240),   Resolution(640, 360),  Resolution(640, 480),
+    Resolution(720, 480),   Resolution(720, 576),  Resolution(800, 600),
+    Resolution(1024, 576),  Resolution(1280, 720), Resolution(1280, 960),
+    Resolution(1280, 1080),
 };
 
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  auto isSupportedByAnyInputConfig =
+      [&configs](const Resolution thumbnailResolution) {
+        return std::any_of(
+            configs.begin(), configs.end(),
+            [thumbnailResolution](const SupportedStreamConfiguration& config) {
+              return isApproximatellySameAspectRatio(
+                  thumbnailResolution, Resolution(config.width, config.height));
+            });
+      };
+
+  std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+  std::copy_if(kStandardJpegThumbnailSizes.begin(),
+               kStandardJpegThumbnailSizes.end(),
+               std::back_insert_iterator(supportedThumbnailSizes),
+               isSupportedByAnyInputConfig);
+  return supportedThumbnailSizes;
+}
+
+bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
+  return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
+         kOutputFormats.end();
+}
+
+std::vector<MetadataBuilder::FpsRange> fpsRangesForInputConfig(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  std::set<MetadataBuilder::FpsRange> availableRanges;
+
+  for (const SupportedStreamConfiguration& config : configs) {
+    availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
+  }
+
+  if (std::any_of(configs.begin(), configs.end(),
+                  [](const SupportedStreamConfiguration& config) {
+                    return config.maxFps >= 30;
+                  })) {
+    availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
+    availableRanges.insert({.minFps = 30, .maxFps = 30});
+  }
+
+  return std::vector<MetadataBuilder::FpsRange>(availableRanges.begin(),
+                                                availableRanges.end());
+}
+
 std::optional<Resolution> getMaxResolution(
     const std::vector<SupportedStreamConfiguration>& configs) {
   auto itMax = std::max_element(configs.begin(), configs.end(),
@@ -103,24 +163,64 @@
   return Resolution(itMax->width, itMax->height);
 }
 
-std::set<Resolution> getUniqueResolutions(
+// Returns a map of unique resolution to maximum maxFps for all streams with
+// that resolution.
+std::map<Resolution, int> getResolutionToMaxFpsMap(
     const std::vector<SupportedStreamConfiguration>& configs) {
-  std::set<Resolution> uniqueResolutions;
-  std::transform(configs.begin(), configs.end(),
-                 std::inserter(uniqueResolutions, uniqueResolutions.begin()),
-                 [](const SupportedStreamConfiguration& config) {
-                   return Resolution(config.width, config.height);
-                 });
-  return uniqueResolutions;
+  std::map<Resolution, int> resolutionToMaxFpsMap;
+
+  for (const SupportedStreamConfiguration& config : configs) {
+    Resolution resolution(config.width, config.height);
+    if (resolutionToMaxFpsMap.find(resolution) == resolutionToMaxFpsMap.end()) {
+      resolutionToMaxFpsMap[resolution] = config.maxFps;
+    } else {
+      int currentMaxFps = resolutionToMaxFpsMap[resolution];
+      resolutionToMaxFpsMap[resolution] = std::max(currentMaxFps, config.maxFps);
+    }
+  }
+
+  std::map<Resolution, int> additionalResolutionToMaxFpsMap;
+  // Add additional resolutions we can support by downscaling input streams with
+  // same aspect ratio.
+  for (const Resolution& outputResolution : kOutputResolutions) {
+    for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
+      if (resolutionToMaxFpsMap.find(outputResolution) !=
+          resolutionToMaxFpsMap.end()) {
+        // Resolution is already in the map, skip it.
+        continue;
+      }
+
+      if (outputResolution < resolution &&
+          isApproximatellySameAspectRatio(outputResolution, resolution)) {
+        // Lower resolution with same aspect ratio, we can achieve this by
+        // downscaling, let's add it to the map.
+        ALOGD(
+            "Extending set of output resolutions with %dx%d which has same "
+            "aspect ratio as supported input %dx%d.",
+            outputResolution.width, outputResolution.height, resolution.width,
+            resolution.height);
+        additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
+        break;
+      }
+    }
+  }
+
+  // Add all resolution we can achieve by downscaling to the map.
+  resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
+                               additionalResolutionToMaxFpsMap.end());
+
+  return resolutionToMaxFpsMap;
 }
 
 // TODO(b/301023410) - Populate camera characteristics according to camera configuration.
 std::optional<CameraMetadata> initCameraCharacteristics(
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig) {
+    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
+    const SensorOrientation sensorOrientation, const LensFacing lensFacing) {
   if (!std::all_of(supportedInputConfig.begin(), supportedInputConfig.end(),
                    [](const SupportedStreamConfiguration& config) {
                      return isFormatSupportedForInput(
-                         config.width, config.height, config.pixelFormat);
+                         config.width, config.height, config.pixelFormat,
+                         config.maxFps);
                    })) {
     ALOGE("%s: input configuration contains unsupported format", __func__);
     return std::nullopt;
@@ -131,26 +231,86 @@
           .setSupportedHardwareLevel(
               ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
           .setFlashAvailable(false)
-          .setLensFacing(ANDROID_LENS_FACING_EXTERNAL)
-          .setSensorOrientation(0)
+          .setLensFacing(
+              static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
+          .setAvailableFocalLengths({VirtualCameraDevice::kFocalLength})
+          .setSensorOrientation(static_cast<int32_t>(sensorOrientation))
+          .setSensorReadoutTimestamp(
+              ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED)
+          .setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
+          .setSensorPhysicalSize(36.0, 24.0)
+          .setAvailableAberrationCorrectionModes(
+              {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
+          .setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
           .setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+          .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
           .setAvailableMaxDigitalZoom(1.0)
           .setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
           .setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
-          .setControlAeAvailableFpsRange(10, 30)
+          .setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
+          .setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
+          .setControlAvailableVideoStabilizationModes(
+              {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
+          .setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
+          .setControlAeAvailableFpsRanges(
+              fpsRangesForInputConfig(supportedInputConfig))
           .setControlMaxRegions(0, 0, 0)
           .setControlAfRegions({kDefaultEmptyControlRegion})
           .setControlAeRegions({kDefaultEmptyControlRegion})
           .setControlAwbRegions({kDefaultEmptyControlRegion})
-          .setControlAeCompensationRange(0, 1)
+          .setControlAeCompensationRange(0, 0)
           .setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
+          .setControlAwbLockAvailable(false)
+          .setControlAeLockAvailable(false)
+          .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
           .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
+          .setCroppingType(ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY)
+          .setJpegAvailableThumbnailSizes(
+              getSupportedJpegThumbnailSizes(supportedInputConfig))
           .setMaxJpegSize(kMaxJpegSize)
-          .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
-          .setAvailableResultKeys({ANDROID_CONTROL_AF_MODE})
+          .setMaxFaceCount(0)
+          .setMaxFrameDuration(kMaxFrameDuration)
+          .setMaxNumberOutputStreams(
+              VirtualCameraDevice::kMaxNumberOfRawStreams,
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              VirtualCameraDevice::kMaxNumberOfStallStreams)
+          .setRequestPartialResultCount(1)
+          .setPipelineMaxDepth(kPipelineMaxDepth)
+          .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
+          .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+                                    ANDROID_CONTROL_CAPTURE_INTENT,
+                                    ANDROID_CONTROL_AE_MODE,
+                                    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                    ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                    ANDROID_CONTROL_AF_TRIGGER,
+                                    ANDROID_CONTROL_AF_MODE,
+                                    ANDROID_CONTROL_AWB_MODE,
+                                    ANDROID_SCALER_CROP_REGION,
+                                    ANDROID_CONTROL_EFFECT_MODE,
+                                    ANDROID_CONTROL_MODE,
+                                    ANDROID_CONTROL_SCENE_MODE,
+                                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                    ANDROID_CONTROL_ZOOM_RATIO,
+                                    ANDROID_FLASH_MODE,
+                                    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                                    ANDROID_JPEG_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    ANDROID_NOISE_REDUCTION_MODE,
+                                    ANDROID_STATISTICS_FACE_DETECT_MODE})
+          .setAvailableResultKeys(
+              {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, ANDROID_CONTROL_AE_MODE,
+               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
+               ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_EFFECT_MODE,
+               ANDROID_CONTROL_MODE, ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
+               ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
+               ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_LENS_FOCAL_LENGTH,
+               ANDROID_SENSOR_TIMESTAMP, ANDROID_NOISE_REDUCTION_MODE})
           .setAvailableCapabilities(
-              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE})
-          .setAvailableCharacteristicKeys();
+              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
 
   // Active array size must correspond to largest supported input resolution.
   std::optional<Resolution> maxResolution =
@@ -160,56 +320,36 @@
   }
   builder.setSensorActiveArraySize(0, 0, maxResolution->width,
                                    maxResolution->height);
+  builder.setSensorPixelArraySize(maxResolution->width, maxResolution->height);
 
   std::vector<MetadataBuilder::StreamConfiguration> outputConfigurations;
 
   // TODO(b/301023410) Add also all "standard" resolutions we can rescale the
   // streams to (all standard resolutions with same aspect ratio).
 
-  // Add IMPLEMENTATION_DEFINED format for all supported input resolutions.
-  std::set<Resolution> uniqueResolutions =
-      getUniqueResolutions(supportedInputConfig);
-  std::transform(
-      uniqueResolutions.begin(), uniqueResolutions.end(),
-      std::back_inserter(outputConfigurations),
-      [](const Resolution& resolution) {
-        return MetadataBuilder::StreamConfiguration{
-            .width = resolution.width,
-            .height = resolution.height,
-            .format = ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-            .minFrameDuration = kMinFrameDuration30Fps,
-            .minStallDuration = 0s};
-      });
+  std::map<Resolution, int> resolutionToMaxFpsMap =
+      getResolutionToMaxFpsMap(supportedInputConfig);
 
-  // Add all supported configuration with explicit pixel format.
-  std::transform(supportedInputConfig.begin(), supportedInputConfig.end(),
-                 std::back_inserter(outputConfigurations),
-                 [](const SupportedStreamConfiguration& config) {
-                   return MetadataBuilder::StreamConfiguration{
-                       .width = config.width,
-                       .height = config.height,
-                       .format = static_cast<int>(config.pixelFormat),
-                       .minFrameDuration = kMinFrameDuration30Fps,
-                       .minStallDuration = 0s};
-                 });
-
-  // TODO(b/301023410) We currently don't support rescaling for still capture,
-  // so only announce BLOB support for formats exactly matching the input.
-  std::transform(uniqueResolutions.begin(), uniqueResolutions.end(),
-                 std::back_inserter(outputConfigurations),
-                 [](const Resolution& resolution) {
-                   return MetadataBuilder::StreamConfiguration{
-                       .width = resolution.width,
-                       .height = resolution.height,
-                       .format = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                       .minFrameDuration = kMinFrameDuration30Fps,
-                       .minStallDuration = 0s};
-                 });
+  // Add configurations for all unique input resolutions and output formats.
+  for (const PixelFormat format : kOutputFormats) {
+    std::transform(
+        resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
+        std::back_inserter(outputConfigurations), [format](const auto& entry) {
+          Resolution resolution = entry.first;
+          int maxFps = entry.second;
+          return MetadataBuilder::StreamConfiguration{
+              .width = resolution.width,
+              .height = resolution.height,
+              .format = static_cast<int32_t>(format),
+              .minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
+              .minStallDuration = 0s};
+        });
+  }
 
   ALOGV("Adding %zu output configurations", outputConfigurations.size());
   builder.setAvailableOutputStreamConfigurations(outputConfigurations);
 
-  auto metadata = builder.build();
+  auto metadata = builder.setAvailableCharacteristicKeys().build();
   if (metadata == nullptr) {
     ALOGE("Failed to build metadata!");
     return CameraMetadata();
@@ -221,14 +361,13 @@
 }  // namespace
 
 VirtualCameraDevice::VirtualCameraDevice(
-    const uint32_t cameraId,
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
-    std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
+    const uint32_t cameraId, const VirtualCameraConfiguration& configuration)
     : mCameraId(cameraId),
-      mVirtualCameraClientCallback(virtualCameraClientCallback),
-      mSupportedInputConfigurations(supportedInputConfig) {
-  std::optional<CameraMetadata> metadata =
-      initCameraCharacteristics(mSupportedInputConfigurations);
+      mVirtualCameraClientCallback(configuration.virtualCameraCallback),
+      mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
+  std::optional<CameraMetadata> metadata = initCameraCharacteristics(
+      mSupportedInputConfigurations, configuration.sensorOrientation,
+      configuration.lensFacing);
   if (metadata.has_value()) {
     mCameraCharacteristics = *metadata;
   } else {
@@ -286,6 +425,29 @@
 
 bool VirtualCameraDevice::isStreamCombinationSupported(
     const StreamConfiguration& streamConfiguration) const {
+  if (streamConfiguration.streams.empty()) {
+    ALOGE("%s: Querying empty configuration", __func__);
+    return false;
+  }
+
+  const std::vector<Stream>& streams = streamConfiguration.streams;
+
+  Resolution firstStreamResolution(streams[0].width, streams[0].height);
+  auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
+    return isApproximatellySameAspectRatio(
+        firstStreamResolution, Resolution(stream.width, stream.height));
+  };
+  if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
+    ALOGW(
+        "%s: Requested streams do not have same aspect ratio. Different aspect "
+        "ratios are currently "
+        "not supported by virtual camera. Stream configuration: %s",
+        __func__, streamConfiguration.toString().c_str());
+    return false;
+  }
+
+  int numberOfProcessedStreams = 0;
+  int numberOfStallStreams = 0;
   for (const Stream& stream : streamConfiguration.streams) {
     ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());
 
@@ -294,18 +456,25 @@
       return false;
     }
 
-    // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
     if (stream.rotation != StreamRotation::ROTATION_0 ||
-        (stream.format != PixelFormat::IMPLEMENTATION_DEFINED &&
-         stream.format != PixelFormat::YCBCR_420_888 &&
-         stream.format != PixelFormat::BLOB)) {
+        !isSupportedOutputFormat(stream.format)) {
       ALOGV("Unsupported output stream type");
       return false;
     }
 
+    if (stream.format == PixelFormat::BLOB) {
+      numberOfStallStreams++;
+    } else {
+      numberOfProcessedStreams++;
+    }
+
+    Resolution requestedResolution(stream.width, stream.height);
     auto matchesSupportedInputConfig =
-        [&stream](const SupportedStreamConfiguration& config) {
-          return stream.width == config.width && stream.height == config.height;
+        [requestedResolution](const SupportedStreamConfiguration& config) {
+          Resolution supportedInputResolution(config.width, config.height);
+          return requestedResolution <= supportedInputResolution &&
+                 isApproximatellySameAspectRatio(requestedResolution,
+                                                 supportedInputResolution);
         };
     if (std::none_of(mSupportedInputConfigurations.begin(),
                      mSupportedInputConfigurations.end(),
@@ -314,6 +483,19 @@
       return false;
     }
   }
+
+  if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
+    ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
+          __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
+    return false;
+  }
+
+  if (numberOfStallStreams > kMaxNumberOfStallStreams) {
+    ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
+          numberOfStallStreams, kMaxNumberOfStallStreams);
+    return false;
+  }
+
   return true;
 }
 
@@ -368,6 +550,24 @@
   return std::string(kDevicePathPrefix) + std::to_string(mCameraId);
 }
 
+const std::vector<SupportedStreamConfiguration>&
+VirtualCameraDevice::getInputConfigs() const {
+  return mSupportedInputConfigurations;
+}
+
+Resolution VirtualCameraDevice::getMaxInputResolution() const {
+  std::optional<Resolution> maxResolution =
+      getMaxResolution(mSupportedInputConfigurations);
+  if (!maxResolution.has_value()) {
+    ALOGE(
+        "%s: Cannot determine sensor size for virtual camera - input "
+        "configurations empty?",
+        __func__);
+    return Resolution(0, 0);
+  }
+  return maxResolution.value();
+}
+
 std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
   // SharedRefBase which BnCameraDevice inherits from breaks
   // std::enable_shared_from_this. This is recommended replacement for
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 402de6c..c274dc9 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -22,7 +22,9 @@
 
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -35,12 +37,8 @@
  public:
   explicit VirtualCameraDevice(
       uint32_t cameraId,
-      const std::vector<
-          aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
-          supportedInputConfig,
-      std::shared_ptr<
-          ::aidl::android::companion::virtualcamera::IVirtualCameraCallback>
-          virtualCameraClientCallback = nullptr);
+      const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration);
 
   virtual ~VirtualCameraDevice() override = default;
 
@@ -97,6 +95,32 @@
 
   uint32_t getCameraId() const { return mCameraId; }
 
+  const std::vector<
+      aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
+  getInputConfigs() const;
+
+  // Returns largest supported input resolution.
+  Resolution getMaxInputResolution() const;
+
+  // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
+  static constexpr int32_t kMaxNumberOfRawStreams = 0;
+
+  // Maximal number of non-jpeg streams configured concurrently in single
+  // session. This should be at least 3 and can be increased at the potential
+  // cost of more CPU/GPU load if there are many concurrent streams.
+  static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
+
+  // Maximal number of stalling (in case of virtual camera only jpeg for now)
+  // streams. Can be increaed at the cost of potential cost of more GPU/CPU
+  // load.
+  static constexpr int32_t kMaxNumberOfStallStreams = 1;
+
+  // Focal length for full frame sensor.
+  static constexpr float kFocalLength = 43.0;
+
+  // Default JPEG compression quality.
+  static constexpr uint8_t kDefaultJpegQuality = 80;
+
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
 
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index 25a43d6..e4a68f5 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -33,8 +33,7 @@
 namespace companion {
 namespace virtualcamera {
 
-using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
-using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::common::VendorTagSection;
@@ -155,10 +154,9 @@
 }
 
 std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
-    std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback) {
-  auto camera = ndk::SharedRefBase::make<VirtualCameraDevice>(
-      sNextId++, supportedInputConfig, virtualCameraClientCallback);
+    const VirtualCameraConfiguration& configuration) {
+  auto camera =
+      ndk::SharedRefBase::make<VirtualCameraDevice>(sNextId++, configuration);
   std::shared_ptr<ICameraProviderCallback> callback;
   {
     const std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index d41a005..11d3123 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -74,14 +74,9 @@
 
   // Create new virtual camera devices
   // Returns nullptr if creation was not successful.
-  //
-  // TODO(b/301023410) - Add camera configuration.
   std::shared_ptr<VirtualCameraDevice> createCamera(
-      const std::vector<
-          aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
-          supportedInputConfig,
-      std::shared_ptr<aidl::android::companion::virtualcamera::IVirtualCameraCallback>
-          virtualCameraClientCallback = nullptr);
+      const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration);
 
   std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
 
@@ -105,4 +100,4 @@
 }  // namespace companion
 }  // namespace android
 
-#endif  // ANDROID_SERVICES_VIRTUAL_CAMERA_VIRTUALCAMERAPROVIDER_H
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERAPROVIDER_H
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 79c91ef..9b0fc07 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,21 +14,27 @@
  * limitations under the License.
  */
 
+#include "system/camera_metadata.h"
 #define LOG_TAG "VirtualCameraRenderThread"
 #include "VirtualCameraRenderThread.h"
 
 #include <chrono>
-#include <cstddef>
 #include <cstdint>
+#include <cstring>
 #include <future>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <vector>
 
+#include "Exif.h"
 #include "GLES/gl.h"
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureResult.h"
 #include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -42,7 +48,7 @@
 #include "ui/GraphicBuffer.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
@@ -53,6 +59,8 @@
 
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::CaptureResult;
 using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,16 +73,49 @@
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::android::base::ScopedLockAssertion;
 
+using ::android::hardware::camera::common::helper::ExifUtils;
+
 namespace {
 
 using namespace std::chrono_literals;
 
 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
 
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
+
 CameraMetadata createCaptureResultMetadata(
-    const std::chrono::nanoseconds timestamp) {
+    const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
+    const Resolution reportedSensorSize) {
   std::unique_ptr<CameraMetadata> metadata =
-      MetadataBuilder().setSensorTimestamp(timestamp).build();
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAePrecaptureTrigger(
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setCropRegion(0, 0, reportedSensorSize.width,
+                         reportedSensorSize.height)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .setPipelineDepth(kPipelineDepth)
+          .setSensorTimestamp(timestamp)
+          .build();
   if (metadata == nullptr) {
     ALOGE("%s: Failed to build capture result metadata", __func__);
     return CameraMetadata();
@@ -150,6 +191,34 @@
   }
 }
 
+std::vector<uint8_t> createExif(
+    Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
+  std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+  exifUtils->initialize();
+  exifUtils->setImageWidth(imageSize.width);
+  exifUtils->setImageHeight(imageSize.height);
+  // TODO(b/324383963) Set Make/Model and orientation.
+
+  std::vector<uint8_t> app1Data;
+
+  size_t thumbnailDataSize = compressedThumbnail.size();
+  const void* thumbnailData =
+      thumbnailDataSize > 0
+          ? reinterpret_cast<const void*>(compressedThumbnail.data())
+          : nullptr;
+
+  if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+    ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+    return app1Data;
+  }
+
+  const uint8_t* data = exifUtils->getApp1Buffer();
+  const size_t size = exifUtils->getApp1Length();
+
+  app1Data.insert(app1Data.end(), data, data + size);
+  return app1Data;
+}
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -170,12 +239,12 @@
 }
 
 VirtualCameraRenderThread::VirtualCameraRenderThread(
-    VirtualCameraSessionContext& sessionContext, const int mWidth,
-    const int mHeight,
+    VirtualCameraSessionContext& sessionContext,
+    const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
     std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
     : mCameraDeviceCallback(cameraDeviceCallback),
-      mInputSurfaceWidth(mWidth),
-      mInputSurfaceHeight(mHeight),
+      mInputSurfaceSize(inputSurfaceSize),
+      mReportedSensorSize(reportedSensorSize),
       mTestMode(testMode),
       mSessionContext(sessionContext) {
 }
@@ -188,8 +257,11 @@
 }
 
 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
-    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
-    : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+    const RequestSettings& requestSettings)
+    : mFrameNumber(frameNumber),
+      mBuffers(requestBuffers),
+      mRequestSettings(requestSettings) {
 }
 
 int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -201,6 +273,10 @@
   return mBuffers;
 }
 
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+  return mRequestSettings;
+}
+
 void VirtualCameraRenderThread::enqueueTask(
     std::unique_ptr<ProcessCaptureRequestTask> task) {
   std::lock_guard<std::mutex> lock(mLock);
@@ -263,8 +339,8 @@
       std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
   mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
       EglTextureProgram::TextureFormat::RGBA);
-  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
-                                                           mInputSurfaceHeight);
+  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
+      mInputSurfaceSize.width, mInputSurfaceSize.height);
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
   while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
@@ -287,7 +363,8 @@
   captureResult.partialResult = 1;
   captureResult.inputBuffer.streamId = -1;
   captureResult.physicalCameraMetadata.resize(0);
-  captureResult.result = createCaptureResultMetadata(timestamp);
+  captureResult.result = createCaptureResultMetadata(
+      timestamp, request.getRequestSettings(), mReportedSensorSize);
 
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
@@ -316,9 +393,9 @@
     }
 
     auto status = streamConfig->format == PixelFormat::BLOB
-                      ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
-                                                   reqBuffer.getBufferId(),
-                                                   reqBuffer.getFence())
+                      ? renderIntoBlobStreamBuffer(
+                            reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+                            request.getRequestSettings(), reqBuffer.getFence())
                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
                                                     reqBuffer.getBufferId(),
                                                     reqBuffer.getFence());
@@ -398,9 +475,70 @@
   }
 }
 
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+    const Resolution resolution, const int quality) {
+  if (resolution.width == 0 || resolution.height == 0) {
+    ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+    return {};
+  }
+
+  ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+        resolution.width, resolution.height, quality);
+  std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+      mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
+  if (framebuffer == nullptr) {
+    ALOGE(
+        "Failed to allocate temporary framebuffer for JPEG thumbnail "
+        "compression");
+    return {};
+  }
+
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+  // doesn't correspond
+  //  to input texture aspect ratio.
+  if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
+    ALOGE(
+        "Failed to render input texture into temporary framebuffer for JPEG "
+        "thumbnail");
+    return {};
+  }
+
+  std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+
+  if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+    // This should never happen since we're allocating the temporary buffer
+    // with YUV420 layout above.
+    ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+          gBuffer->getPixelFormat());
+    return {};
+  }
+
+  YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+  if (yCbCrLock.getStatus() != NO_ERROR) {
+    ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
+          __func__, yCbCrLock.getStatus());
+    return {};
+  }
+
+  std::vector<uint8_t> compressedThumbnail;
+  compressedThumbnail.resize(kJpegThumbnailBufferSize);
+  ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
+        gBuffer->getHeight());
+  std::optional<size_t> compressedSize = compressJpeg(
+      gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
+      compressedThumbnail.size(), compressedThumbnail.data());
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+    return {};
+  }
+  compressedThumbnail.resize(compressedSize.value());
+  return compressedThumbnail;
+}
+
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
-    const int streamId, const int bufferId, sp<Fence> fence) {
-  ALOGV("%s", __func__);
+    const int streamId, const int bufferId,
+    const RequestSettings& requestSettings, sp<Fence> fence) {
   std::shared_ptr<AHardwareBuffer> hwBuffer =
       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
   if (hwBuffer == nullptr) {
@@ -415,6 +553,9 @@
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+        stream->width, stream->height, requestSettings.jpegQuality);
+
   // Let's create YUV framebuffer and render the surface into this.
   // This will take care about rescaling as well as potential format conversion.
   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
@@ -431,58 +572,62 @@
     return status;
   }
 
-  AHardwareBuffer_Planes planes_info;
-
-  int32_t rawFence = fence != nullptr ? fence->get() : -1;
-  int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          rawFence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+                             fence);
+  if (planesLock.getStatus() != OK) {
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
   std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
   GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
 
-  bool compressionSuccess = true;
-  if (gBuffer != nullptr) {
-    android_ycbcr ycbcr;
-    if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
-      // This should never happen since we're allocating the temporary buffer
-      // with YUV420 layout above.
-      ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
-            gBuffer->getPixelFormat());
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
-
-    status_t status =
-        gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-    ALOGV("Locked buffers");
-    if (status != NO_ERROR) {
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
-
-    compressionSuccess =
-        compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
-                     stream->bufferSize, planes_info.planes[0].data);
-
-    status_t res = gBuffer->unlock();
-    if (res != NO_ERROR) {
-      ALOGE("Failed to unlock graphic buffer: %d", res);
-    }
-  } else {
-    compressionSuccess =
-        compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
-                          planes_info.planes[0].data);
+  if (gBuffer == nullptr) {
+    ALOGE(
+        "%s: Encountered invalid temporary buffer while rendering JPEG "
+        "into BLOB stream",
+        __func__);
+    return cameraStatus(Status::INTERNAL_ERROR);
   }
-  AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-  ALOGV("Unlocked buffers");
-  return compressionSuccess ? ndk::ScopedAStatus::ok()
-                            : cameraStatus(Status::INTERNAL_ERROR);
+
+  if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+    // This should never happen since we're allocating the temporary buffer
+    // with YUV420 layout above.
+    ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+          gBuffer->getPixelFormat());
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
+
+  YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+  if (yCbCrLock.getStatus() != OK) {
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
+
+  std::vector<uint8_t> app1ExifData =
+      createExif(Resolution(stream->width, stream->height),
+                 createThumbnail(requestSettings.thumbnailResolution,
+                                 requestSettings.thumbnailJpegQuality));
+  std::optional<size_t> compressedSize = compressJpeg(
+      gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
+      *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
+      (*planesLock).planes[0].data);
+
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress JPEG image", __func__);
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
+
+  CameraBlob cameraBlob{
+      .blobId = CameraBlobId::JPEG,
+      .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+  memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+             (stream->bufferSize - sizeof(cameraBlob)),
+         &cameraBlob, sizeof(cameraBlob));
+
+  ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+        __func__, compressedSize.value());
+
+  return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -542,8 +687,12 @@
   } else {
     const bool renderSuccess =
         isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
-            ? mEglTextureYuvProgram->draw(mEglSurfaceTexture->updateTexture())
-            : mEglTextureRgbProgram->draw(mEglSurfaceTexture->updateTexture());
+            ? mEglTextureYuvProgram->draw(
+                  mEglSurfaceTexture->getTextureId(),
+                  mEglSurfaceTexture->getTransformMatrix())
+            : mEglTextureRgbProgram->draw(
+                  mEglSurfaceTexture->getTextureId(),
+                  mEglSurfaceTexture->getTransformMatrix());
     if (!renderSuccess) {
       ALOGE("%s: Failed to render texture", __func__);
       return cameraStatus(Status::INTERNAL_ERROR);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index b3aaed8..86dad0b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,18 +17,23 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <cstdint>
 #include <deque>
 #include <future>
 #include <memory>
 #include <thread>
+#include <vector>
 
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "android/binder_auto_utils.h"
 #include "util/EglDisplayContext.h"
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -49,11 +54,18 @@
   const sp<Fence> mFence;
 };
 
+struct RequestSettings {
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+  Resolution thumbnailResolution = Resolution(0, 0);
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+};
+
 // Represents single capture request to fill set of buffers.
 class ProcessCaptureRequestTask {
  public:
   ProcessCaptureRequestTask(
-      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+      const RequestSettings& RequestSettings = {});
 
   // Returns frame number corresponding to the request.
   int getFrameNumber() const;
@@ -65,9 +77,12 @@
   // so it cannot be access outside of its lifetime.
   const std::vector<CaptureRequestBuffer>& getBuffers() const;
 
+  const RequestSettings& getRequestSettings() const;
+
  private:
   const int mFrameNumber;
   const std::vector<CaptureRequestBuffer> mBuffers;
+  const RequestSettings mRequestSettings;
 };
 
 // Wraps dedicated rendering thread and rendering business with corresponding
@@ -77,14 +92,14 @@
   // Create VirtualCameraRenderThread instance:
   // * sessionContext - VirtualCameraSessionContext reference for shared access
   // to mapped buffers.
-  // * inputWidth - requested width of input surface ("virtual camera sensor")
-  // * inputHeight - requested height of input surface ("virtual camera sensor")
+  // * inputSurfaceSize - requested size of input surface.
+  // * reportedSensorSize - reported static sensor size of virtual camera.
   // * cameraDeviceCallback - callback for corresponding camera instance
   // * testMode - when set to true, test pattern is rendered to input surface
   // before each capture request is processed to simulate client input.
   VirtualCameraRenderThread(
-      VirtualCameraSessionContext& sessionContext, int inputWidth,
-      int inputHeight,
+      VirtualCameraSessionContext& sessionContext, Resolution inputSurfaceSize,
+      Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
           cameraDeviceCallback,
@@ -122,13 +137,19 @@
   // TODO(b/301023410) - Refactor the actual rendering logic off this class for
   // easier testability.
 
+  // Create thumbnail with specified size for current image.
+  // The compressed image size is limited by 32KiB.
+  // Returns vector with compressed thumbnail if successful,
+  // empty vector otherwise.
+  std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
   // Render current image to the BLOB buffer.
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on render thread.
-  ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
-                                                const int bufferId,
-                                                sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+      const int streamId, const int bufferId,
+      const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
 
   // Render current image to the YCbCr buffer.
   // If fence is specified, this function will block until the fence is cleared
@@ -149,8 +170,8 @@
       ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
       mCameraDeviceCallback;
 
-  const int mInputSurfaceWidth;
-  const int mInputSurfaceHeight;
+  const Resolution mInputSurfaceSize;
+  const Resolution mReportedSensorSize;
   const int mTestMode;
 
   VirtualCameraSessionContext& mSessionContext;
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 370a5a8..1144997 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -41,6 +41,8 @@
 namespace virtualcamera {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 
@@ -48,6 +50,7 @@
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 60;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
 constexpr char kShellCmdHelp[] = R"(
@@ -69,13 +72,29 @@
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
-                                   config.pixelFormat)) {
+                                   config.pixelFormat, config.maxFps)) {
       ALOGE("%s: Requested unsupported input format: %d x %d (%d)", __func__,
             config.width, config.height, static_cast<int>(config.pixelFormat));
       return ndk::ScopedAStatus::fromServiceSpecificError(
           Status::EX_ILLEGAL_ARGUMENT);
     }
   }
+
+  if (configuration.sensorOrientation != SensorOrientation::ORIENTATION_0 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_90 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_180 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_270) {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
+  if (configuration.lensFacing != LensFacing::FRONT &&
+      configuration.lensFacing != LensFacing::BACK &&
+      configuration.lensFacing != LensFacing::EXTERNAL) {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
   return ndk::ScopedAStatus::ok();
 }
 
@@ -121,10 +140,8 @@
     return ndk::ScopedAStatus::ok();
   }
 
-  // TODO(b/301023410) Validate configuration and pass it to the camera.
   std::shared_ptr<VirtualCameraDevice> camera =
-      mVirtualCameraProvider->createCamera(configuration.supportedStreamConfigs,
-                                           configuration.virtualCameraCallback);
+      mVirtualCameraProvider->createCamera(configuration);
   if (camera == nullptr) {
     ALOGE("Failed to create camera for binder token 0x%" PRIxPTR,
           reinterpret_cast<uintptr_t>(token.get()));
@@ -241,8 +258,11 @@
 
   bool ret;
   VirtualCameraConfiguration configuration;
-  configuration.supportedStreamConfigs.push_back(
-      {.width = kVgaWidth, .height = kVgaHeight, Format::YUV_420_888});
+  configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
+                                                  .height = kVgaHeight,
+                                                  Format::YUV_420_888,
+                                                  .maxFps = kMaxFps});
+  configuration.lensFacing = LensFacing::EXTERNAL;
   registerCamera(mTestCameraToken, configuration, &ret);
   if (ret) {
     dprintf(out, "Successfully registered test camera %s",
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 47780d8..2a691c1 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -18,14 +18,17 @@
 #define LOG_TAG "VirtualCameraSession"
 #include "VirtualCameraSession.h"
 
+#include <algorithm>
 #include <atomic>
 #include <chrono>
+#include <cmath>
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
 #include <map>
 #include <memory>
 #include <mutex>
+#include <numeric>
 #include <optional>
 #include <tuple>
 #include <unordered_set>
@@ -37,13 +40,17 @@
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraRenderThread.h"
 #include "VirtualCameraStream.h"
+#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferCache.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
 #include "aidl/android/hardware/camera/device/HalStream.h"
 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
+#include "aidl/android/hardware/camera/device/RequestTemplate.h"
 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
+#include "aidl/android/hardware/camera/device/Stream.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
 #include "aidl/android/hardware/camera/device/StreamRotation.h"
@@ -58,7 +65,7 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
@@ -68,6 +75,7 @@
 
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferCache;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -96,36 +104,75 @@
 
 // Size of request/result metadata fast message queue.
 // Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
 
 // Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
 
-CameraMetadata createDefaultRequestSettings(RequestTemplate type) {
-  hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper;
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
 
-  camera_metadata_enum_android_control_capture_intent_t intent =
-      ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
+    const RequestTemplate type) {
   switch (type) {
     case RequestTemplate::PREVIEW:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
     case RequestTemplate::STILL_CAPTURE:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
     case RequestTemplate::VIDEO_RECORD:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
     case RequestTemplate::VIDEO_SNAPSHOT:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
     default:
-      // Leave default.
-      break;
+      // Return PREVIEW by default
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   }
+}
 
-  auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build();
-  return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata();
+int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
+  return std::transform_reduce(
+      configs.begin(), configs.end(), 0,
+      [](const int a, const int b) { return std::max(a, b); },
+      [](const SupportedStreamConfiguration& config) { return config.maxFps; });
+}
+
+CameraMetadata createDefaultRequestSettings(
+    const RequestTemplate type,
+    const std::vector<SupportedStreamConfiguration>& inputConfigs) {
+  int maxFps = getMaxFps(inputConfigs);
+  auto metadata =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlCaptureIntent(requestTemplateToIntent(type))
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAeExposureCompensation(0)
+          .setControlAeTargetFpsRange(maxFps, maxFps)
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
+          .setControlAePrecaptureTrigger(
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailSize(0, 0)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .build();
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to construct metadata for default request type %s",
+          __func__, toString(type).c_str());
+    return CameraMetadata();
+  } else {
+    ALOGV("%s: Successfully created metadata for request type %s", __func__,
+          toString(type).c_str());
+  }
+  return *metadata;
 }
 
 HalStream getHalStream(const Stream& stream) {
@@ -150,6 +197,72 @@
   return halStream;
 }
 
+Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
+  return *(std::max_element(streams.begin(), streams.end(),
+                            [](const Stream& a, const Stream& b) {
+                              return a.width * a.height < b.width * b.height;
+                            }));
+}
+
+Resolution resolutionFromStream(const Stream& stream) {
+  return Resolution(stream.width, stream.height);
+}
+
+Resolution resolutionFromInputConfig(
+    const SupportedStreamConfiguration& inputConfig) {
+  return Resolution(inputConfig.width, inputConfig.height);
+}
+
+std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
+    const std::vector<Stream>& requestedStreams,
+    const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+  Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
+  Resolution maxResolution = resolutionFromStream(maxResolutionStream);
+
+  // Find best fitting stream to satisfy all requested streams:
+  // Best fitting => same or higher resolution as input with lowest pixel count
+  // difference and same aspect ratio.
+  auto isBetterInputConfig = [maxResolution](
+                                 const SupportedStreamConfiguration& configA,
+                                 const SupportedStreamConfiguration& configB) {
+    int maxResPixelCount = maxResolution.width * maxResolution.height;
+    int pixelCountDiffA =
+        std::abs((configA.width * configA.height) - maxResPixelCount);
+    int pixelCountDiffB =
+        std::abs((configB.width * configB.height) - maxResPixelCount);
+
+    return pixelCountDiffA < pixelCountDiffB;
+  };
+
+  std::optional<SupportedStreamConfiguration> bestConfig;
+  for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
+    Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
+    if (inputConfigResolution < maxResolution ||
+        !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
+      // We don't want to upscale from lower resolution, or use different aspect
+      // ratio, skip.
+      continue;
+    }
+
+    if (!bestConfig.has_value() ||
+        isBetterInputConfig(inputConfig, bestConfig.value())) {
+      bestConfig = inputConfig;
+    }
+  }
+
+  return bestConfig;
+}
+
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+  return RequestSettings{
+      .jpegQuality = getJpegQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .thumbnailResolution =
+          getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+      .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality)};
+}
+
 }  // namespace
 
 VirtualCameraSession::VirtualCameraSession(
@@ -215,15 +328,13 @@
   halStreams.clear();
   halStreams.resize(in_requestedConfiguration.streams.size());
 
-  sp<Surface> inputSurface = nullptr;
-  int inputWidth;
-  int inputHeight;
-
   if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
     ALOGE("%s: Requested stream configuration is not supported", __func__);
     return cameraStatus(Status::ILLEGAL_ARGUMENT);
   }
 
+  sp<Surface> inputSurface = nullptr;
+  std::optional<SupportedStreamConfiguration> inputConfig;
   {
     std::lock_guard<std::mutex> lock(mLock);
     for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
@@ -233,13 +344,21 @@
       }
     }
 
-    inputWidth = streams[0].width;
-    inputHeight = streams[0].height;
+    inputConfig = pickInputConfigurationForStreams(
+        streams, virtualCamera->getInputConfigs());
+    if (!inputConfig.has_value()) {
+      ALOGE(
+          "%s: Failed to pick any input configuration for stream configuration "
+          "request: %s",
+          __func__, in_requestedConfiguration.toString().c_str());
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
     if (mRenderThread == nullptr) {
       // If there's no client callback, start camera in test mode.
       const bool testMode = mVirtualCameraClientCallback == nullptr;
       mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-          mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
+          mSessionContext, resolutionFromInputConfig(*inputConfig),
+          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
           testMode);
       mRenderThread->start();
       inputSurface = mRenderThread->getInputSurface();
@@ -252,10 +371,9 @@
     // create single texture.
     mVirtualCameraClientCallback->onStreamConfigured(
         /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
-        inputWidth, inputHeight, Format::YUV_420_888);
+        inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
   }
 
-  mFirstRequest.store(true);
   return ndk::ScopedAStatus::ok();
 }
 
@@ -263,12 +381,22 @@
     RequestTemplate in_type, CameraMetadata* _aidl_return) {
   ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
 
+  std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
+  if (camera == nullptr) {
+    ALOGW(
+        "%s: constructDefaultRequestSettings called on already unregistered "
+        "camera",
+        __func__);
+    return cameraStatus(Status::CAMERA_DISCONNECTED);
+  }
+
   switch (in_type) {
     case RequestTemplate::PREVIEW:
     case RequestTemplate::STILL_CAPTURE:
     case RequestTemplate::VIDEO_RECORD:
     case RequestTemplate::VIDEO_SNAPSHOT: {
-      *_aidl_return = createDefaultRequestSettings(in_type);
+      *_aidl_return =
+          createDefaultRequestSettings(in_type, camera->getInputConfigs());
       return ndk::ScopedAStatus::ok();
     }
     case RequestTemplate::MANUAL:
@@ -387,13 +515,25 @@
     const CaptureRequest& request) {
   ALOGD("%s: request: %s", __func__, request.toString().c_str());
 
-  if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
-    return cameraStatus(Status::ILLEGAL_ARGUMENT);
-  }
-
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+  RequestSettings requestSettings;
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    // If metadata it empty, last received metadata applies, if  it's non-empty
+    // update it.
+    if (!request.settings.metadata.empty()) {
+      mCurrentRequestMetadata = request.settings;
+    }
+
+    // We don't have any metadata for this request - this means we received none
+    // in first request, this is an error state.
+    if (mCurrentRequestMetadata.metadata.empty()) {
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
     cameraCallback = mCameraDeviceCallback;
   }
 
@@ -427,7 +567,7 @@
       return cameraStatus(Status::INTERNAL_ERROR);
     }
     mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
-        request.frameNumber, taskBuffers));
+        request.frameNumber, taskBuffers, requestSettings));
   }
 
   if (mVirtualCameraClientCallback != nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..556314f 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "utils/Mutex.h"
 
@@ -138,7 +139,8 @@
       int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
   std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
-  std::atomic_bool mFirstRequest{true};
+  aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+      GUARDED_BY(mLock);
 
   std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
 };
diff --git a/services/camera/virtualcamera/aidl/Android.bp b/services/camera/virtualcamera/aidl/Android.bp
index 9105b09..b3fe3ad 100644
--- a/services/camera/virtualcamera/aidl/Android.bp
+++ b/services/camera/virtualcamera/aidl/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -8,9 +9,11 @@
     unstable: true,
     srcs: [
         "android/companion/virtualcamera/Format.aidl",
+        "android/companion/virtualcamera/LensFacing.aidl",
         "android/companion/virtualcamera/IVirtualCameraCallback.aidl",
         "android/companion/virtualcamera/IVirtualCameraService.aidl",
         "android/companion/virtualcamera/VirtualCameraConfiguration.aidl",
+        "android/companion/virtualcamera/SensorOrientation.aidl",
         "android/companion/virtualcamera/SupportedStreamConfiguration.aidl",
     ],
     local_include_dir: ".",
@@ -21,6 +24,9 @@
         cpp: {
             enabled: false,
         },
+        rust: {
+            enabled: false,
+        },
         ndk: {
             enabled: true,
             additional_shared_libraries: [
@@ -31,6 +37,6 @@
         java: {
             enabled: true,
             platform_apis: true,
-        }
+        },
     },
 }
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
index cbe03e9..f5a84f7 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -36,7 +36,8 @@
      * @param height - height of the surface.
      * @param pixelFormat - pixel format of the surface.
      */
-    void onStreamConfigured(int streamId, in Surface surface, int width, int height, in Format pixelFormat);
+    void onStreamConfigured(int streamId, in Surface surface, int width, int height,
+            in Format pixelFormat);
 
     /**
      * Called when framework requests capture. This can be used by the client as a hint
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl
new file mode 100644
index 0000000..8568c91
--- /dev/null
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualcamera;
+
+/**
+ * Direction that the virtual camera faces relative to the device's screen.
+ *
+ * @hide
+ */
+@Backing(type="int")
+enum LensFacing {
+    FRONT = 0,
+    BACK = 1,
+    EXTERNAL = 2,
+}
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl
new file mode 100644
index 0000000..ef91f00
--- /dev/null
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualcamera;
+
+/**
+ * Sensor orientation of a virtual camera.
+ *
+ * @hide
+ */
+@Backing(type="int")
+enum SensorOrientation {
+    ORIENTATION_0 = 0,
+    ORIENTATION_90 = 90,
+    ORIENTATION_180 = 180,
+    ORIENTATION_270 = 270,
+}
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
index 7070cbd..6f86cbe 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.companion.virtualcamera;
 
 import android.companion.virtualcamera.Format;
@@ -26,4 +27,5 @@
     int width;
     int height;
     Format pixelFormat = Format.UNKNOWN;
+    int maxFps;
 }
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
index c1a2f22..887ad26 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,9 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.companion.virtualcamera;
 
 import android.companion.virtualcamera.IVirtualCameraCallback;
+import android.companion.virtualcamera.LensFacing;
+import android.companion.virtualcamera.SensorOrientation;
 import android.companion.virtualcamera.SupportedStreamConfiguration;
 
 /**
@@ -26,4 +29,6 @@
 parcelable VirtualCameraConfiguration {
     SupportedStreamConfiguration[] supportedStreamConfigs;
     IVirtualCameraCallback virtualCameraCallback;
+    SensorOrientation sensorOrientation = SensorOrientation.ORIENTATION_0;
+    LensFacing lensFacing;
 }
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
new file mode 100644
index 0000000..5fa53d8
--- /dev/null
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -0,0 +1,61 @@
+package {
+    default_team: "trendy_team_xr_framework",
+}
+
+soong_config_module_type {
+    name: "virtual_device_build_flags_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "vdm",
+    bool_variables: [
+        "virtual_camera_service_enabled",
+    ],
+    properties: [
+        "cflags",
+    ],
+}
+
+soong_config_bool_variable {
+    name: "virtual_camera_service_enabled",
+}
+
+virtual_device_build_flags_cc_defaults {
+    name: "virtual_device_build_flags_defaults",
+    soong_config_variables: {
+        virtual_camera_service_enabled: {
+            cflags: ["-DVIRTUAL_CAMERA_SERVICE_ENABLED=1"],
+        },
+    },
+}
+
+cc_library_static {
+    name: "libvirtualdevicebuildflags",
+    srcs: [
+        "android_companion_virtualdevice_build_flags.cc",
+    ],
+    export_include_dirs: ["."],
+    defaults: ["virtual_device_build_flags_defaults"],
+}
+
+soong_config_module_type {
+    name: "virtual_device_build_flags_java_library",
+    module_type: "java_library",
+    config_namespace: "vdm",
+    bool_variables: [
+        "virtual_camera_service_enabled",
+    ],
+    properties: [
+        "srcs",
+    ],
+}
+
+virtual_device_build_flags_java_library {
+    name: "virtual_device_build_flag_java",
+    soong_config_variables: {
+        virtual_camera_service_enabled: {
+            srcs: ["java/enabled/**/*.java"],
+            conditions_default: {
+                srcs: ["java/disabled/**/*.java"],
+            },
+        },
+    },
+}
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
new file mode 100644
index 0000000..5525bc9
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+bool virtual_camera_service_build_flag() {
+#if VIRTUAL_CAMERA_SERVICE_ENABLED
+  return true;
+#else
+  return false;
+#endif
+}
+
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
new file mode 100644
index 0000000..718ce9b
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+// Returns true if the virtual camera service is enabled
+// in the build.
+//
+// TODO(b/309090563) - Deprecate in favor of autogened library to query build
+// flags once available.
+bool virtual_camera_service_build_flag();
+
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
new file mode 100644
index 0000000..128d93c
--- /dev/null
+++ b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualdevice.flags;
+
+/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag isn't set.*/
+public class VirtualCameraServiceBuildFlag {
+
+    public static boolean isVirtualCameraServiceBuildFlagEnabled() {
+        return false;
+    }
+}
diff --git a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
new file mode 100644
index 0000000..02816fb
--- /dev/null
+++ b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualdevice.flags;
+
+/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag is set.*/
+public class VirtualCameraServiceBuildFlag {
+
+    public static boolean isVirtualCameraServiceBuildFlagEnabled() {
+        return true;
+    }
+}
diff --git a/services/camera/virtualcamera/fuzzer/Android.bp b/services/camera/virtualcamera/fuzzer/Android.bp
index 71e8f50..6a72167 100644
--- a/services/camera/virtualcamera/fuzzer/Android.bp
+++ b/services/camera/virtualcamera/fuzzer/Android.bp
@@ -15,7 +15,8 @@
  * limitations under the License.
  *
  *****************************************************************************/
- package {
+package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index bc46ba0..c51b4a3 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -14,11 +15,13 @@
         "libgtest",
         "libgmock",
     ],
-    srcs: ["EglUtilTest.cc",
-           "VirtualCameraDeviceTest.cc",
-           "VirtualCameraProviderTest.cc",
-           "VirtualCameraRenderThreadTest.cc",
-           "VirtualCameraServiceTest.cc",
-           "VirtualCameraSessionTest.cc"],
+    srcs: [
+        "EglUtilTest.cc",
+        "VirtualCameraDeviceTest.cc",
+        "VirtualCameraProviderTest.cc",
+        "VirtualCameraRenderThreadTest.cc",
+        "VirtualCameraServiceTest.cc",
+        "VirtualCameraSessionTest.cc",
+    ],
     test_suites: ["device-tests"],
 }
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 140ae65..ad9d83b 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,24 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <iterator>
 #include <memory>
 
 #include "VirtualCameraDevice.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
+#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
 #include "log/log_main.h"
 #include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -34,27 +40,51 @@
 namespace {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::Stream;
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
 using ::aidl::android::hardware::camera::device::StreamType;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
 using ::testing::UnorderedElementsAreArray;
 using metadata_stream_t =
     camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
 constexpr int kCameraId = 42;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int k360pWidth = 640;
+constexpr int k360pHeight = 360;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kHdWidth = 1280;
 constexpr int kHdHeight = 720;
+constexpr int kMaxFps = 30;
+
+const Stream kVgaYUV420Stream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::YCBCR_420_888,
+};
+
+const Stream kVgaJpegStream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::BLOB,
+};
 
 struct AvailableStreamConfiguration {
   const int width;
   const int height;
   const int pixelFormat;
-  const metadata_stream_t streamConfiguration;
+  const metadata_stream_t streamConfiguration =
+      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
 };
 
 bool operator==(const AvailableStreamConfiguration& a,
@@ -96,18 +126,19 @@
 }
 
 struct VirtualCameraConfigTestParam {
-  std::vector<SupportedStreamConfiguration> inputConfig;
+  VirtualCameraConfiguration inputConfig;
   std::vector<AvailableStreamConfiguration> expectedAvailableStreamConfigs;
 };
 
-class VirtualCameraDeviceTest
+class VirtualCameraDeviceCharacterisicsTest
     : public testing::TestWithParam<VirtualCameraConfigTestParam> {};
 
-TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
+TEST_P(VirtualCameraDeviceCharacterisicsTest,
+       cameraCharacteristicsForInputFormat) {
   const VirtualCameraConfigTestParam& param = GetParam();
   std::shared_ptr<VirtualCameraDevice> camera =
-      ndk::SharedRefBase::make<VirtualCameraDevice>(
-          kCameraId, param.inputConfig, /*virtualCameraClientCallback=*/nullptr);
+      ndk::SharedRefBase::make<VirtualCameraDevice>(kCameraId,
+                                                    param.inputConfig);
 
   CameraMetadata metadata;
   ASSERT_TRUE(camera->getCameraCharacteristics(&metadata).isOk());
@@ -132,81 +163,202 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(
-    cameraCharacteristicsForInputFormat, VirtualCameraDeviceTest,
+    cameraCharacteristicsForInputFormat, VirtualCameraDeviceCharacterisicsTest,
     testing::Values(
         VirtualCameraConfigTestParam{
-            .inputConfig = {SupportedStreamConfiguration{
-                .width = kVgaWidth,
-                .height = kVgaHeight,
-                .pixelFormat = Format::YUV_420_888}},
+            .inputConfig =
+                VirtualCameraConfiguration{
+                    .supportedStreamConfigs = {SupportedStreamConfiguration{
+                        .width = kVgaWidth,
+                        .height = kVgaHeight,
+                        .pixelFormat = Format::YUV_420_888,
+                        .maxFps = kMaxFps}},
+                    .virtualCameraCallback = nullptr,
+                    .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                    .lensFacing = LensFacing::FRONT},
             .expectedAvailableStreamConfigs =
                 {AvailableStreamConfiguration{
-                     .width = kVgaWidth,
-                     .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
                      .pixelFormat =
-                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}},
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kVgaWidth,
+                     .height = kVgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}},
         VirtualCameraConfigTestParam{
-            .inputConfig = {SupportedStreamConfiguration{
-                                .width = kVgaWidth,
-                                .height = kVgaHeight,
-                                .pixelFormat = Format::YUV_420_888},
-                            SupportedStreamConfiguration{
-                                .width = kHdWidth,
-                                .height = kHdHeight,
-                                .pixelFormat = Format::YUV_420_888}},
+            .inputConfig =
+                VirtualCameraConfiguration{
+                    .supportedStreamConfigs =
+                        {SupportedStreamConfiguration{
+                             .width = kVgaWidth,
+                             .height = kVgaHeight,
+                             .pixelFormat = Format::YUV_420_888,
+                             .maxFps = kMaxFps},
+                         SupportedStreamConfiguration{
+                             .width = kHdWidth,
+                             .height = kHdHeight,
+                             .pixelFormat = Format::YUV_420_888,
+                             .maxFps = kMaxFps}},
+                    .virtualCameraCallback = nullptr,
+                    .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                    .lensFacing = LensFacing::BACK},
             .expectedAvailableStreamConfigs = {
                 AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}}));
+
+class VirtualCameraDeviceTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId, VirtualCameraConfiguration{
+                       .supportedStreamConfigs = {SupportedStreamConfiguration{
+                           .width = kVgaWidth,
+                           .height = kVgaHeight,
+                           .pixelFormat = Format::YUV_420_888,
+                           .maxFps = kMaxFps}},
+                       .virtualCameraCallback = nullptr,
+                       .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                       .lensFacing = LensFacing::FRONT});
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mCamera;
+};
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfNonStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyNonStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams + 1,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams + 1, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+  CameraMetadata metadata;
+  ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+  // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+  // characteristics, since it has same aspect ratio.
+  EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+              ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
 
 }  // namespace
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index 615a77c..ab647a4 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -33,7 +33,10 @@
 namespace {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::common::TorchModeStatus;
@@ -49,6 +52,7 @@
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 30;
 constexpr char kVirtualCameraNameRegex[] =
     "device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
 
@@ -79,10 +83,15 @@
   std::shared_ptr<VirtualCameraProvider> mCameraProvider;
   std::shared_ptr<MockCameraProviderCallback> mMockCameraProviderCallback =
       ndk::SharedRefBase::make<MockCameraProviderCallback>();
-  std::vector<SupportedStreamConfiguration> mInputConfigs = {
-      SupportedStreamConfiguration{.width = kVgaWidth,
-                                   .height = kVgaHeight,
-                                   .pixelFormat = Format::YUV_420_888}};
+  VirtualCameraConfiguration mInputConfig = VirtualCameraConfiguration{
+      .supportedStreamConfigs = {SupportedStreamConfiguration{
+          .width = kVgaWidth,
+          .height = kVgaHeight,
+          .pixelFormat = Format::YUV_420_888,
+          .maxFps = kMaxFps}},
+      .virtualCameraCallback = nullptr,
+      .sensorOrientation = SensorOrientation::ORIENTATION_0,
+      .lensFacing = LensFacing::FRONT};
 };
 
 TEST_F(VirtualCameraProviderTest, SetNullCameraCallbackFails) {
@@ -109,7 +118,7 @@
 
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig);
   EXPECT_THAT(camera, Not(IsNull()));
   EXPECT_THAT(camera->getCameraName(), MatchesRegex(kVirtualCameraNameRegex));
 
@@ -127,7 +136,7 @@
       .WillOnce(Return(ndk::ScopedAStatus::ok()));
 
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig);
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
 
   // Created camera should be in the list of cameras.
@@ -139,7 +148,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig);
 
   EXPECT_CALL(*mMockCameraProviderCallback,
               cameraDeviceStatusChange(Eq(camera->getCameraName()),
@@ -156,7 +165,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveNonExistingCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig);
 
   // Removing non-existing camera should fail.
   const std::string cameraName = "DefinitelyNoTCamera";
diff --git a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
index 5f899b8..ddcb789 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
@@ -33,6 +33,7 @@
 #include "android/binder_auto_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -62,6 +63,7 @@
 
 constexpr int kInputWidth = 640;
 constexpr int kInputHeight = 480;
+const Resolution kInputResolution(kInputWidth, kInputHeight);
 
 Matcher<StreamBuffer> IsStreamBufferWithStatus(const int streamId,
                                                const int bufferId,
@@ -102,7 +104,8 @@
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-        *mSessionContext, kInputWidth, kInputHeight, mMockCameraDeviceCallback);
+        *mSessionContext, kInputResolution,
+        /*reportedSensorSize*/ kInputResolution, mMockCameraDeviceCallback);
   }
 
  protected:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 38261fb..d4d00a2 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -39,6 +39,8 @@
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::TorchModeStatus;
@@ -56,16 +58,25 @@
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 30;
+constexpr SensorOrientation kSensorOrientation =
+    SensorOrientation::ORIENTATION_0;
+constexpr LensFacing kLensFacing = LensFacing::FRONT;
 constexpr char kCreateVirtualDevicePermissions[] =
     "android.permission.CREATE_VIRTUAL_DEVICE";
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
-                                               const Format format) {
+                                               const Format format,
+                                               const int maxFps) {
   VirtualCameraConfiguration configuration;
-  configuration.supportedStreamConfigs.push_back(
-      {.width = width, .height = height, .pixelFormat = format});
+  configuration.supportedStreamConfigs.push_back({.width = width,
+                                                  .height = height,
+                                                  .pixelFormat = format,
+                                                  .maxFps = maxFps});
+  configuration.sensorOrientation = kSensorOrientation;
+  configuration.lensFacing = kLensFacing;
   return configuration;
 }
 
@@ -150,7 +161,7 @@
   int mDevNullFd;
 
   VirtualCameraConfiguration mVgaYUV420OnlyConfiguration =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, kMaxFps);
 };
 
 TEST_F(VirtualCameraServiceTest, RegisterCameraWithYuvInputSucceeds) {
@@ -173,7 +184,7 @@
   bool aidlRet;
 
   VirtualCameraConfiguration config =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
 
   ASSERT_TRUE(mCameraService->registerCamera(ndkToken, config, &aidlRet).isOk());
 
@@ -208,7 +219,7 @@
   bool aidlRet;
 
   VirtualCameraConfiguration config =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN, kMaxFps);
 
   ASSERT_FALSE(
       mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
@@ -219,7 +230,7 @@
 TEST_F(VirtualCameraServiceTest, ConfigurationWithTooHighResFails) {
   bool aidlRet;
   VirtualCameraConfiguration config =
-      createConfiguration(1000000, 1000000, Format::YUV_420_888);
+      createConfiguration(1000000, 1000000, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
       mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
@@ -230,7 +241,7 @@
 TEST_F(VirtualCameraServiceTest, ConfigurationWithUnalignedResolutionFails) {
   bool aidlRet;
   VirtualCameraConfiguration config =
-      createConfiguration(641, 481, Format::YUV_420_888);
+      createConfiguration(641, 481, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
       mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
@@ -241,7 +252,29 @@
 TEST_F(VirtualCameraServiceTest, ConfigurationWithNegativeResolutionFails) {
   bool aidlRet;
   VirtualCameraConfiguration config =
-      createConfiguration(-1, kVgaHeight, Format::YUV_420_888);
+      createConfiguration(-1, kVgaHeight, Format::YUV_420_888, kMaxFps);
+
+  ASSERT_FALSE(
+      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, ConfigurationWithTooLowMaxFpsFails) {
+  bool aidlRet;
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 0);
+
+  ASSERT_FALSE(
+      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, ConfigurationWithTooHighMaxFpsFails) {
+  bool aidlRet;
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 90);
 
   ASSERT_FALSE(
       mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 30bd2b6..5f313a0 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -21,6 +21,7 @@
 #include "VirtualCameraSession.h"
 #include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceCallback.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
@@ -29,21 +30,30 @@
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 namespace {
 
-constexpr int kWidth = 640;
-constexpr int kHeight = 480;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int kVgaWidth = 640;
+constexpr int kVgaHeight = 480;
+constexpr int kSvgaWidth = 800;
+constexpr int kSvgaHeight = 600;
+constexpr int kMaxFps = 30;
 constexpr int kStreamId = 0;
+constexpr int kSecondStreamId = 1;
 constexpr int kCameraId = 42;
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BnCameraDeviceCallback;
 using ::aidl::android::hardware::camera::device::BufferRequest;
@@ -96,23 +106,13 @@
   MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int), (override));
 };
 
-class VirtualCameraSessionTest : public ::testing::Test {
+class VirtualCameraSessionTestBase : public ::testing::Test {
  public:
-  void SetUp() override {
+  virtual void SetUp() override {
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mMockVirtualCameraClientCallback =
         ndk::SharedRefBase::make<MockVirtualCameraCallback>();
-    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
-        kCameraId,
-        std::vector<SupportedStreamConfiguration>{
-            SupportedStreamConfiguration{.width = kWidth,
-                                         .height = kHeight,
-                                         .pixelFormat = Format::YUV_420_888}},
-        mMockVirtualCameraClientCallback);
-    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
-        mVirtualCameraDevice, mMockCameraDeviceCallback,
-        mMockVirtualCameraClientCallback);
 
     // Explicitly defining default actions below to prevent gmock from
     // default-constructing ndk::ScopedAStatus, because default-constructed
@@ -138,6 +138,35 @@
  protected:
   std::shared_ptr<MockCameraDeviceCallback> mMockCameraDeviceCallback;
   std::shared_ptr<MockVirtualCameraCallback> mMockVirtualCameraClientCallback;
+};
+
+class VirtualCameraSessionTest : public VirtualCameraSessionTestBase {
+ public:
+  void SetUp() override {
+    VirtualCameraSessionTestBase::SetUp();
+
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = {SupportedStreamConfiguration{
+                                           .width = kVgaWidth,
+                                           .height = kVgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps},
+                                       SupportedStreamConfiguration{
+                                           .width = kSvgaWidth,
+                                           .height = kSvgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps}},
+            .virtualCameraCallback = mMockVirtualCameraClientCallback,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT});
+    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
   std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
   std::shared_ptr<VirtualCameraSession> mVirtualCameraSession;
 };
@@ -146,18 +175,22 @@
   PixelFormat format = PixelFormat::YCBCR_420_888;
   StreamConfiguration streamConfiguration;
   streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, format)};
+      createStream(kStreamId, kVgaWidth, kVgaHeight, format),
+      createStream(kSecondStreamId, kSvgaWidth, kSvgaHeight, format)};
   std::vector<HalStream> halStreams;
-  EXPECT_CALL(
-      *mMockVirtualCameraClientCallback,
-      onStreamConfigured(kStreamId, _, kWidth, kHeight, Format::YUV_420_888));
+
+  // Expect highest resolution to be picked for the client input.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
 
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
 
   EXPECT_THAT(halStreams, SizeIs(streamConfiguration.streams.size()));
-  EXPECT_THAT(mVirtualCameraSession->getStreamIds(), ElementsAre(0));
+  EXPECT_THAT(mVirtualCameraSession->getStreamIds(),
+              ElementsAre(kStreamId, kSecondStreamId));
 }
 
 TEST_F(VirtualCameraSessionTest, SecondConfigureDropsUnreferencedStreams) {
@@ -165,18 +198,18 @@
   StreamConfiguration streamConfiguration;
   std::vector<HalStream> halStreams;
 
-  streamConfiguration.streams = {createStream(0, kWidth, kHeight, format),
-                                 createStream(1, kWidth, kHeight, format),
-                                 createStream(2, kWidth, kHeight, format)};
+  streamConfiguration.streams = {createStream(0, kVgaWidth, kVgaHeight, format),
+                                 createStream(1, kVgaWidth, kVgaHeight, format),
+                                 createStream(2, kVgaWidth, kVgaHeight, format)};
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
 
   EXPECT_THAT(mVirtualCameraSession->getStreamIds(), ElementsAre(0, 1, 2));
 
-  streamConfiguration.streams = {createStream(0, kWidth, kHeight, format),
-                                 createStream(2, kWidth, kHeight, format),
-                                 createStream(3, kWidth, kHeight, format)};
+  streamConfiguration.streams = {createStream(0, kVgaWidth, kVgaHeight, format),
+                                 createStream(2, kVgaWidth, kVgaHeight, format),
+                                 createStream(3, kVgaWidth, kVgaHeight, format)};
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
@@ -201,8 +234,8 @@
 
 TEST_F(VirtualCameraSessionTest, onProcessCaptureRequestTriggersClientCallback) {
   StreamConfiguration streamConfiguration;
-  streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, PixelFormat::YCBCR_420_888)};
+  streamConfiguration.streams = {createStream(kStreamId, kVgaWidth, kVgaHeight,
+                                              PixelFormat::YCBCR_420_888)};
   std::vector<CaptureRequest> requests(1);
   requests[0].frameNumber = 42;
   requests[0].settings = *(
@@ -226,8 +259,8 @@
 
 TEST_F(VirtualCameraSessionTest, configureAfterCameraRelease) {
   StreamConfiguration streamConfiguration;
-  streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, PixelFormat::YCBCR_420_888)};
+  streamConfiguration.streams = {createStream(kStreamId, kVgaWidth, kVgaHeight,
+                                              PixelFormat::YCBCR_420_888)};
   std::vector<HalStream> halStreams;
 
   // Release virtual camera.
@@ -240,6 +273,108 @@
       Eq(static_cast<int32_t>(Status::CAMERA_DISCONNECTED)));
 }
 
+TEST_F(VirtualCameraSessionTest, ConfigureWithEmptyStreams) {
+  StreamConfiguration streamConfiguration;
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_THAT(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .getServiceSpecificError(),
+      Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(VirtualCameraSessionTest, ConfigureWithDifferentAspectRatioFails) {
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {
+      createStream(kStreamId, kVgaWidth, kVgaHeight, PixelFormat::YCBCR_420_888),
+      createStream(kSecondStreamId, kVgaHeight, kVgaWidth,
+                   PixelFormat::YCBCR_420_888)};
+
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_THAT(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .getServiceSpecificError(),
+      Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+class VirtualCameraSessionInputChoiceTest : public VirtualCameraSessionTestBase {
+ public:
+  std::shared_ptr<VirtualCameraSession> createSession(
+      const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId, VirtualCameraConfiguration{
+                       .supportedStreamConfigs = supportedInputConfigs,
+                       .virtualCameraCallback = mMockVirtualCameraClientCallback,
+                       .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                       .lensFacing = LensFacing::FRONT});
+    return ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
+};
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForDownsampledOutput) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kVgaWidth, kVgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForMatchingResolution) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+                                 Format::RGBA_8888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 510fd33..7554a67 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -68,12 +68,13 @@
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
+  uniform mat4 aTextureTransformMatrix; // Transform matrix given by surface texture.
   in vec4 aPosition;
   in vec2 aTextureCoord;
   out vec2 vTextureCoord;
   void main() {
     gl_Position = aPosition;
-    vTextureCoord = aTextureCoord;
+    vTextureCoord = (aTextureTransformMatrix * vec4(aTextureCoord, 0.0, 1.0)).xy;
   })";
 
 constexpr char kExternalYuvTextureFragmentShader[] = R"(#version 300 es
@@ -100,10 +101,12 @@
     })";
 
 constexpr int kCoordsPerVertex = 3;
-constexpr std::array<float, 12> kSquareCoords{-1.f, 1.0f, 0.0f,  // top left
-                                              -1.f, -1.f, 0.0f,  // bottom left
-                                              1.0f, -1.f, 0.0f,  // bottom right
-                                              1.0f, 1.0f, 0.0f};  // top right
+
+constexpr std::array<float, 12> kSquareCoords{
+    -1.f, -1.0f, 0.0f,   // top left
+    -1.f, 1.f,   0.0f,   // bottom left
+    1.0f, 1.f,   0.0f,   // bottom right
+    1.0f, -1.0f, 0.0f};  // top right
 
 constexpr std::array<float, 8> kTextureCoords{0.0f, 1.0f,   // top left
                                               0.0f, 0.0f,   // bottom left
@@ -265,32 +268,50 @@
   } else {
     ALOGE("External texture EGL shader program initialization failed.");
   }
+
+  // Lookup and cache handles to uniforms & attributes.
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+  mTransformMatrixHandle =
+      glGetUniformLocation(mProgram, "aTextureTransformMatrix");
+  mTextureHandle = glGetUniformLocation(mProgram, "uTexture");
+
+  // Pass vertex array to the shader.
+  glEnableVertexAttribArray(mPositionHandle);
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  // Pass texture coordinates corresponding to vertex array to the shader.
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTextureProgram::draw(GLuint textureId) {
+EglTextureProgram::~EglTextureProgram() {
+  if (mPositionHandle != -1) {
+    glDisableVertexAttribArray(mPositionHandle);
+  }
+  if (mTextureCoordHandle != -1) {
+    glDisableVertexAttribArray(mTextureCoordHandle);
+  }
+}
+
+bool EglTextureProgram::draw(GLuint textureId,
+                             const std::array<float, 16>& transformMatrix) {
   // Load compiled shader.
   glUseProgram(mProgram);
   if (checkEglError("glUseProgram")) {
     return false;
   }
 
-  // Pass vertex array to the shader.
-  int positionHandle = glGetAttribLocation(mProgram, "aPosition");
-  glEnableVertexAttribArray(positionHandle);
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
-
-  // Pass texture coordinates corresponding to vertex array to the shader.
-  int textureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
-  glEnableVertexAttribArray(textureCoordHandle);
-  glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, false,
-                        kTextureCoords.size(), kTextureCoords.data());
+  // Pass transformation matrix for the texture coordinates.
+  glUniformMatrix4fv(mTransformMatrixHandle, 1, /*transpose=*/GL_FALSE,
+                     transformMatrix.data());
 
   // Configure texture for the shader.
-  int textureHandle = glGetUniformLocation(mProgram, "uTexture");
   glActiveTexture(GL_TEXTURE0);
   glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
-  glUniform1i(textureHandle, 0);
+  glUniform1i(mTextureHandle, 0);
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index 1b5f2cd..c695cbb 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 
+#include <array>
+
 #include "GLES/gl.h"
 
 namespace android {
@@ -58,8 +60,23 @@
   enum class TextureFormat { RGBA, YUV };
 
   EglTextureProgram(TextureFormat textureFormat = TextureFormat::YUV);
+  virtual ~EglTextureProgram();
 
-  bool draw(GLuint textureId);
+  // Draw texture over whole viewport, applying transformMatrix to texture
+  // coordinates.
+  //
+  // Transform matrix is 4x4 matrix represented in column-major order and is
+  // applied to texture coordinates in (s,t,0,1), s,t from <0,1> range prior to
+  // sampling:
+  //
+  // textureCoord = transformMatrix * vec4(s,t,0,1).xy
+  bool draw(GLuint textureId, const std::array<float, 16>& transformMatrix);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mTransformMatrixHandle = -1;
+  int mTextureHandle = -1;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 5b479c0..9f26e19 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -68,6 +68,16 @@
   return mTextureId;
 }
 
+GLuint EglSurfaceTexture::getTextureId() const {
+  return mTextureId;
+}
+
+std::array<float, 16> EglSurfaceTexture::getTransformMatrix() {
+  std::array<float, 16> matrix;
+  mGlConsumer->getTransformMatrix(matrix.data());
+  return matrix;
+}
+
 uint32_t EglSurfaceTexture::getWidth() const {
   return mWidth;
 }
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index 14dc7d6..faad7c4 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -57,6 +57,17 @@
   // Returns EGL texture id of the texture.
   GLuint updateTexture();
 
+  // Returns EGL texture id of the underlying texture.
+  GLuint getTextureId() const;
+
+  // Returns 4x4 transformation matrix in column-major order,
+  // which should be applied to EGL texture coordinates
+  // before sampling from the texture backed by android native buffer,
+  // so the corresponding region of the underlying buffer is sampled.
+  //
+  // See SurfaceTexture.getTransformMatrix for more details.
+  std::array<float, 16> getTransformMatrix();
+
  private:
   sp<IGraphicBufferProducer> mBufferProducer;
   sp<IGraphicBufferConsumer> mBufferConsumer;
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..8569eff 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -19,7 +19,7 @@
 
 #include <cstddef>
 #include <cstdint>
-#include <memory>
+#include <optional>
 #include <vector>
 
 #include "android/hardware_buffer.h"
@@ -34,11 +34,9 @@
 namespace virtualcamera {
 namespace {
 
-constexpr int kJpegQuality = 80;
-
 class LibJpegContext {
  public:
-  LibJpegContext(int width, int height, const size_t outBufferSize,
+  LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
                  void* outBuffer)
       : mWidth(width),
         mHeight(height),
@@ -76,7 +74,7 @@
     jpeg_set_defaults(&mCompressStruct);
 
     // Set quality and colorspace.
-    jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+    jpeg_set_quality(&mCompressStruct, quality, 1);
     jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
 
     // Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +92,31 @@
     mCompressStruct.comp_info[2].v_samp_factor = 1;
   }
 
-  bool compress(const android_ycbcr& ycbr) {
+  LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+    mApp1Data = app1Data;
+    mApp1DataSize = size;
+    return *this;
+  }
+
+  std::optional<size_t> compress(const android_ycbcr& ycbr) {
+    // TODO(b/301023410) - Add support for compressing image sizes not aligned
+    // with DCT size.
+    if (mWidth % (2 * DCTSIZE) || (mHeight % (2 * DCTSIZE))) {
+      ALOGE(
+          "%s: Compressing YUV420 image with size %dx%d not aligned with 2 * "
+          "DCTSIZE (%d) is not currently supported.",
+          __func__, mWidth, mHeight, 2 * DCTSIZE);
+      return std::nullopt;
+    }
+
+    // Chroma planes have 1/2 resolution of the original image.
+    const int cHeight = mHeight / 2;
+    const int cWidth = mWidth / 2;
+
     // Prepare arrays of pointers to scanlines of each plane.
     std::vector<JSAMPROW> yLines(mHeight);
-    std::vector<JSAMPROW> cbLines(mHeight / 2);
-    std::vector<JSAMPROW> crLines(mHeight / 2);
+    std::vector<JSAMPROW> cbLines(cHeight);
+    std::vector<JSAMPROW> crLines(cHeight);
 
     uint8_t* y = static_cast<uint8_t*>(ycbr.y);
     uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,23 +125,27 @@
     // Since UV samples might be interleaved (semiplanar) we need to copy
     // them to separate planes, since libjpeg doesn't directly
     // support processing semiplanar YUV.
-    const int c_samples = (mWidth / 2) * (mHeight / 2);
-    std::vector<uint8_t> cb_plane(c_samples);
-    std::vector<uint8_t> cr_plane(c_samples);
+    const int cSamples = cWidth * cHeight;
+    std::vector<uint8_t> cb_plane(cSamples);
+    std::vector<uint8_t> cr_plane(cSamples);
 
     // TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
-    for (int i = 0; i < c_samples; ++i) {
-      cb_plane[i] = *cb;
-      cr_plane[i] = *cr;
-      cb += ycbr.chroma_step;
-      cr += ycbr.chroma_step;
+    int out_idx = 0;
+    for (int i = 0; i < cHeight; ++i) {
+      for (int j = 0; j < cWidth; ++j) {
+        cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+        cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+        out_idx++;
+      }
+      cb += ycbr.cstride;
+      cr += ycbr.cstride;
     }
 
     // Collect pointers to individual scanline of each plane.
     for (int i = 0; i < mHeight; ++i) {
       yLines[i] = y + i * ycbr.ystride;
     }
-    for (int i = 0; i < (mHeight / 2); ++i) {
+    for (int i = 0; i < cHeight; ++i) {
       cbLines[i] = cb_plane.data() + i * (mWidth / 2);
       crLines[i] = cr_plane.data() + i * (mWidth / 2);
     }
@@ -131,18 +153,6 @@
     return compress(yLines, cbLines, crLines);
   }
 
-  bool compressBlackImage() {
-    // We only really need to prepare one scanline for Y and one shared scanline
-    // for Cb & Cr.
-    std::vector<uint8_t> yLine(mWidth, 0);
-    std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
-
-    std::vector<JSAMPROW> yLines(mHeight, yLine.data());
-    std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
-
-    return compress(yLines, cLines, cLines);
-  }
-
  private:
   void setSuccess(const boolean success) {
     mSuccess = success;
@@ -165,11 +175,18 @@
   // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
   // each vector needs to correspond to height of corresponding plane.
   //
-  // Returns true if compression is successful, false otherwise.
-  bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
-                std::vector<JSAMPROW>& crLines) {
+  // Returns size of compressed image in bytes on success, empty optional otherwise.
+  std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+                                 std::vector<JSAMPROW>& cbLines,
+                                 std::vector<JSAMPROW>& crLines) {
     jpeg_start_compress(&mCompressStruct, TRUE);
 
+    if (mApp1Data != nullptr && mApp1DataSize > 0) {
+      ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+      jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+                        static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+    }
+
     while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
       const uint32_t batchSize = DCTSIZE * 2;
       const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +198,11 @@
         ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
               __FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
               mCompressStruct.image_height);
-        return false;
+        return std::nullopt;
       }
     }
     jpeg_finish_compress(&mCompressStruct);
-    return mSuccess;
+    return mEncodedSize;
   }
 
   // === libjpeg callbacks below ===
@@ -217,6 +234,10 @@
   jpeg_error_mgr mErrorMgr;
   jpeg_destination_mgr mDestinationMgr;
 
+  // APP1 data.
+  const uint8_t* mApp1Data = nullptr;
+  size_t mApp1DataSize = 0;
+
   // Dimensions of the input image.
   int mWidth;
   int mHeight;
@@ -235,15 +256,15 @@
 
 }  // namespace
 
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
-}
-
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer)
-      .compressBlackImage();
+std::optional<size_t> compressJpeg(const int width, const int height,
+                                   const int quality, const android_ycbcr& ycbcr,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer) {
+  LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+  if (!app1ExifData.empty()) {
+    context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+  }
+  return context.compress(ycbcr);
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..83ed74b 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -17,9 +17,8 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
-#include <memory>
+#include <optional>
 
-#include "android/hardware_buffer.h"
 #include "system/graphics.h"
 
 namespace android {
@@ -27,14 +26,20 @@
 namespace virtualcamera {
 
 // Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer);
-
-// Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer);
+// * width - width of the image
+// * heigh - height of the image
+// * quality - 0-100, higher number corresponds to higher quality.
+// * ycbr - android_ycbr structure describing layout of input YUV420 image.
+// * app1ExifData - vector containing data to be included in APP1
+//   segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+                                   const android_ycbcr& ycbcr,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
deleted file mode 100644
index 92a48b9..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
-
-#include "MetadataBuilder.h"
-
-#include <algorithm>
-#include <cstdint>
-#include <iterator>
-#include <memory>
-#include <utility>
-#include <variant>
-#include <vector>
-
-#include "CameraMetadata.h"
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "log/log.h"
-#include "system/camera_metadata.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using ::android::hardware::camera::common::helper::CameraMetadata;
-
-template <typename To, typename From>
-std::vector<To> convertTo(const std::vector<From>& from) {
-  std::vector<To> to;
-  to.reserve(from.size());
-  std::transform(from.begin(), from.end(), std::back_inserter(to),
-                 [](const From& x) { return static_cast<To>(x); });
-  return to;
-}
-
-}  // namespace
-
-MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
-    camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
-  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
-      std::vector<uint8_t>({static_cast<uint8_t>(hwLevel)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
-  const uint8_t metadataVal = flashAvailable
-                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
-                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
-  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = std::vector<uint8_t>({metadataVal});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setLensFacing(
-    camera_metadata_enum_android_lens_facing lensFacing) {
-  mEntryMap[ANDROID_LENS_FACING] =
-      std::vector<uint8_t>({static_cast<uint8_t>(lensFacing)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
-  mEntryMap[ANDROID_SENSOR_ORIENTATION] =
-      std::vector<int32_t>({sensorOrientation});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorTimestamp(
-    std::chrono::nanoseconds timestamp) {
-  mEntryMap[ANDROID_SENSOR_TIMESTAMP] =
-      std::vector<int64_t>({timestamp.count()});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
-    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-        faceDetectModes) {
-  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
-      convertTo<uint8_t>(faceDetectModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfMode(
-    const camera_metadata_enum_android_control_af_mode_t mode) {
-  mEntryMap[ANDROID_CONTROL_AF_MODE] =
-      std::vector<uint8_t>({static_cast<uint8_t>(mode)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRange(
-    const int32_t minFps, const int32_t maxFps) {
-  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] =
-      std::vector<int32_t>({minFps, maxFps});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
-                                                       int32_t maxAwbRegions,
-                                                       int32_t maxAfRegions) {
-  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
-      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeRegions(
-    const std::vector<ControlRegion>& aeRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * aeRegions.size());
-  for (const ControlRegion& region : aeRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfRegions(
-    const std::vector<ControlRegion>& afRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * afRegions.size());
-  for (const ControlRegion& region : afRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAwbRegions(
-    const std::vector<ControlRegion>& awbRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * awbRegions.size());
-  for (const ControlRegion& region : awbRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
-    const camera_metadata_enum_android_control_capture_intent_t intent) {
-  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] =
-      std::vector<uint8_t>({static_cast<uint8_t>(intent)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
-  mEntryMap[ANDROID_JPEG_MAX_SIZE] = std::vector<int32_t>({size});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
-    const std::vector<StreamConfiguration>& streamConfigurations) {
-  std::vector<int32_t> metadataStreamConfigs;
-  std::vector<int64_t> metadataMinFrameDurations;
-  std::vector<int64_t> metadataStallDurations;
-  metadataStreamConfigs.reserve(streamConfigurations.size());
-  metadataMinFrameDurations.reserve(streamConfigurations.size());
-  metadataStallDurations.reserve(streamConfigurations.size());
-
-  for (const auto& config : streamConfigurations) {
-    metadataStreamConfigs.push_back(config.format);
-    metadataStreamConfigs.push_back(config.width);
-    metadataStreamConfigs.push_back(config.height);
-    metadataStreamConfigs.push_back(
-        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
-
-    metadataMinFrameDurations.push_back(config.format);
-    metadataMinFrameDurations.push_back(config.width);
-    metadataMinFrameDurations.push_back(config.height);
-    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
-
-    metadataStallDurations.push_back(config.format);
-    metadataStallDurations.push_back(config.width);
-    metadataStallDurations.push_back(config.height);
-    metadataStallDurations.push_back(config.minStallDuration.count());
-  }
-
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
-      metadataStreamConfigs;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
-      metadataMinFrameDurations;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
-      metadataMinFrameDurations;
-
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
-      std::vector<float>(maxZoom);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
-                                                           const float max) {
-  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
-                                                           int x1, int y1) {
-  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
-      std::vector<int32_t>({x0, y0, x1, y1});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
-                                                                int32_t max) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
-      std::vector<int32_t>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
-    const camera_metadata_rational step) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
-      std::vector<camera_metadata_rational>({step});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
-    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
-        capabilities) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
-      convertTo<uint8_t>(capabilities);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
-    const std::vector<camera_metadata_tag_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
-      convertTo<int32_t>(keys);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
-  std::vector<camera_metadata_tag_t> availableKeys;
-  availableKeys.reserve(mEntryMap.size());
-  for (const auto& [key, _] : mEntryMap) {
-    if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
-      availableKeys.push_back(key);
-    }
-  }
-  setAvailableCharacteristicKeys(availableKeys);
-  return *this;
-}
-
-std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
-MetadataBuilder::build() const {
-  CameraMetadata metadataHelper;
-  for (const auto& entry : mEntryMap) {
-    status_t ret = std::visit(
-        [&](auto&& arg) {
-          return metadataHelper.update(entry.first, arg.data(), arg.size());
-        },
-        entry.second);
-    if (ret != NO_ERROR) {
-      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
-            get_camera_metadata_tag_name(entry.first),
-            ::android::statusToString(ret).c_str());
-      return nullptr;
-    }
-  }
-
-  const camera_metadata_t* metadata = metadataHelper.getAndLock();
-  if (metadata == nullptr) {
-    ALOGE(
-        "Failure when constructing metadata -> CameraMetadata helper returned "
-        "nullptr");
-    return nullptr;
-  }
-
-  auto aidlMetadata =
-      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
-  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
-  aidlMetadata->metadata.assign(data_ptr,
-                                data_ptr + get_camera_metadata_size(metadata));
-  metadataHelper.unlock(metadata);
-
-  return aidlMetadata;
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
deleted file mode 100644
index d992d31..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-
-#include <chrono>
-#include <cstdint>
-#include <map>
-#include <memory>
-#include <variant>
-#include <vector>
-
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "system/camera_metadata.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Convenience builder for the
-// aidl::android::hardware::camera::device::CameraMetadata.
-//
-// Calling the same builder setter multiple will overwrite the value.
-// This class is not thread-safe.
-class MetadataBuilder {
- public:
-  struct StreamConfiguration {
-    int32_t width = 0;
-    int32_t height = 0;
-    int32_t format = 0;
-    // Minimal frame duration - corresponds to maximal FPS for given format.
-    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minFrameDuration{std::chrono::seconds(1) / 30};
-    // Minimal stall duration.
-    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minStallDuration{0};
-  };
-
-  struct ControlRegion {
-    int32_t x0 = 0;
-    int32_t y0 = 0;
-    int32_t x1 = 0;
-    int32_t y1 = 0;
-    int32_t weight = 0;
-  };
-
-  MetadataBuilder() = default;
-  ~MetadataBuilder() = default;
-
-  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
-  MetadataBuilder& setSupportedHardwareLevel(
-      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
-
-  // Whether this camera device has a flash unit
-  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
-  MetadataBuilder& setFlashAvailable(bool flashAvailable);
-
-  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
-  MetadataBuilder& setLensFacing(
-      camera_metadata_enum_android_lens_facing lensFacing);
-
-  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
-
-  // Time at start of exposure of first row of the image
-  // sensor active array, in nanoseconds.
-  //
-  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
-
-  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
-
-  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableFaceDetectModes(
-      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-          faceDetectMode);
-
-  // Sets available stream configurations along with corresponding minimal frame
-  // durations (corresponding to max fps) and stall durations.
-  //
-  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
-  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
-  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableOutputStreamConfigurations(
-      const std::vector<StreamConfiguration>& streamConfigurations);
-
-  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
-
-  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfMode(
-      const camera_metadata_enum_android_control_af_mode_t mode);
-
-  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeAvailableFpsRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlCaptureIntent(
-      camera_metadata_enum_android_control_capture_intent_t intent);
-
-  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
-                                        int32_t maxAwbRegions,
-                                        int32_t maxAfRegions);
-
-  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeRegions(
-      const std::vector<ControlRegion>& aeRegions);
-
-  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAwbRegions(
-      const std::vector<ControlRegion>& awbRegions);
-
-  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfRegions(
-      const std::vector<ControlRegion>& afRegions);
-
-  // The size of the compressed JPEG image, in bytes.
-  //
-  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setMaxJpegSize(int32_t size);
-
-  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
-
-  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlZoomRatioRange(float min, float max);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureRequest.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureResult.
-  //
-  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
-
-  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCapabilities(
-      const std::vector<
-          camera_metadata_enum_android_request_available_capabilities_t>&
-          capabilities);
-
-  // A list of all keys that the camera device has available to use.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCharacteristicKeys(
-      const std::vector<camera_metadata_tag_t>& keys);
-
-  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
-  // containing all previously set tags.
-  MetadataBuilder& setAvailableCharacteristicKeys();
-
-  // Build CameraMetadata instance.
-  //
-  // Returns nullptr in case something went wrong.
-  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
-  build() const;
-
- private:
-  // Maps metadata tags to vectors of values for the given tag.
-  std::map<camera_metadata_tag_t,
-           std::variant<std::vector<int64_t>, std::vector<int32_t>,
-                        std::vector<uint8_t>, std::vector<float>,
-                        std::vector<camera_metadata_rational_t>>>
-      mEntryMap;
-};
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
new file mode 100644
index 0000000..e3d9e28
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -0,0 +1,729 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MetadataUtil"
+
+#include "MetadataUtil.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#include <utility>
+#include <variant>
+#include <vector>
+
+#include "CameraMetadata.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "log/log.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+namespace {
+
+using ::android::hardware::camera::common::helper::CameraMetadata;
+
+template <typename To, typename From>
+std::vector<To> convertTo(const std::vector<From>& from) {
+  std::vector<To> to;
+  to.reserve(from.size());
+  std::transform(from.begin(), from.end(), std::back_inserter(to),
+                 [](const From& x) { return static_cast<To>(x); });
+  return to;
+}
+
+template <typename To, typename From>
+std::vector<To> asVectorOf(const From from) {
+  return std::vector<To>({static_cast<To>(from)});
+}
+
+}  // namespace
+
+MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
+    camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
+      asVectorOf<uint8_t>(hwLevel);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
+  const uint8_t metadataVal = flashAvailable
+                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
+                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = asVectorOf<uint8_t>(metadataVal);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashState(
+    const camera_metadata_enum_android_flash_state_t flashState) {
+  mEntryMap[ANDROID_FLASH_STATE] = asVectorOf<uint8_t>(flashState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashMode(
+    const camera_metadata_enum_android_flash_mode_t flashMode) {
+  mEntryMap[ANDROID_FLASH_MODE] = asVectorOf<uint8_t>(flashMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensFacing(
+    camera_metadata_enum_android_lens_facing lensFacing) {
+  mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorReadoutTimestamp(
+    const camera_metadata_enum_android_sensor_readout_timestamp_t
+        sensorReadoutTimestamp) {
+  mEntryMap[ANDROID_SENSOR_READOUT_TIMESTAMP] =
+      asVectorOf<uint8_t>(sensorReadoutTimestamp);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFocalLengths(
+    const std::vector<float>& focalLengths) {
+  mEntryMap[ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS] = focalLengths;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFocalLength(float focalLength) {
+  mEntryMap[ANDROID_LENS_FOCAL_LENGTH] = asVectorOf<float>(focalLength);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
+  mEntryMap[ANDROID_SENSOR_ORIENTATION] = asVectorOf<int32_t>(sensorOrientation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestampSource(
+    const camera_metadata_enum_android_sensor_info_timestamp_source_t
+        timestampSource) {
+  mEntryMap[ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE] =
+      asVectorOf<uint8_t>(timestampSource);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestamp(
+    std::chrono::nanoseconds timestamp) {
+  mEntryMap[ANDROID_SENSOR_TIMESTAMP] = asVectorOf<int64_t>(timestamp.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
+    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+        faceDetectModes) {
+  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
+      convertTo<uint8_t>(faceDetectModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableTestPatternModes(
+    const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+        testPatternModes) {
+  mEntryMap[ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES] =
+      convertTo<int32_t>(testPatternModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFaceDetectMode(
+    const camera_metadata_enum_android_statistics_face_detect_mode_t
+        faceDetectMode) {
+  mEntryMap[ANDROID_STATISTICS_FACE_DETECT_MODE] =
+      asVectorOf<uint8_t>(faceDetectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMode(
+    const camera_metadata_enum_android_control_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableSceneModes(
+    const std::vector<camera_metadata_enum_android_control_scene_mode>&
+        availableSceneModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_SCENE_MODES] =
+      convertTo<uint8_t>(availableSceneModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
+    const std::vector<camera_metadata_enum_android_control_effect_mode>&
+        availableEffects) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_EFFECTS] =
+      convertTo<uint8_t>(availableEffects);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlEffectMode(
+    const camera_metadata_enum_android_control_effect_mode_t effectMode) {
+  mEntryMap[ANDROID_CONTROL_EFFECT_MODE] = asVectorOf<uint8_t>(effectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableVideoStabilizationModes(
+    const std::vector<
+        camera_metadata_enum_android_control_video_stabilization_mode_t>&
+        videoStabilizationModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES] =
+      convertTo<uint8_t>(videoStabilizationModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfMode(
+    const camera_metadata_enum_android_control_af_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AF_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+// See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+MetadataBuilder& MetadataBuilder::setControlAfTrigger(
+    const camera_metadata_enum_android_control_af_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AF_TRIGGER] = asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRanges(
+    const std::vector<FpsRange>& fpsRanges) {
+  std::vector<int32_t> ranges;
+  ranges.reserve(2 * fpsRanges.size());
+  for (const FpsRange fpsRange : fpsRanges) {
+    ranges.push_back(fpsRange.minFps);
+    ranges.push_back(fpsRange.maxFps);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] = std::move(ranges);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
+    const int32_t minFps, const int32_t maxFps) {
+  mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
+      std::vector<int32_t>({minFps, maxFps});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeMode(
+    camera_metadata_enum_android_control_ae_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_MODES] = convertTo<uint8_t>(modes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAePrecaptureTrigger(
+    const camera_metadata_enum_android_control_ae_precapture_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER] =
+      asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
+                                                       int32_t maxAwbRegions,
+                                                       int32_t maxAfRegions) {
+  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
+      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableAwbModes(
+    const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes) {
+  mEntryMap[ANDROID_CONTROL_AWB_AVAILABLE_MODES] = convertTo<uint8_t>(awbModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbMode(
+    const camera_metadata_enum_android_control_awb_mode awbMode) {
+  mEntryMap[ANDROID_CONTROL_AWB_MODE] = asVectorOf<uint8_t>(awbMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
+    const bool awbLockAvailable) {
+  const uint8_t lockAvailable = awbLockAvailable
+                                    ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK_AVAILABLE] =
+      std::vector<uint8_t>({lockAvailable});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
+    const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+        antibandingModes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES] =
+      convertTo<uint8_t>(antibandingModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAntibandingMode(
+    const camera_metadata_enum_android_control_ae_antibanding_mode_t
+        antibandingMode) {
+  mEntryMap[ANDROID_CONTROL_AE_ANTIBANDING_MODE] =
+      asVectorOf<uint8_t>(antibandingMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLockAvailable(
+    const bool aeLockAvailable) {
+  const uint8_t lockAvailable = aeLockAvailable
+                                    ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AE_LOCK_AVAILABLE] =
+      asVectorOf<uint8_t>(lockAvailable);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeRegions(
+    const std::vector<ControlRegion>& aeRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * aeRegions.size());
+  for (const ControlRegion& region : aeRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfRegions(
+    const std::vector<ControlRegion>& afRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * afRegions.size());
+  for (const ControlRegion& region : afRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbRegions(
+    const std::vector<ControlRegion>& awbRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * awbRegions.size());
+  for (const ControlRegion& region : awbRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
+    const camera_metadata_enum_android_control_capture_intent_t intent) {
+  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] = asVectorOf<uint8_t>(intent);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCropRegion(const int32_t x, const int32_t y,
+                                                const int32_t width,
+                                                const int32_t height) {
+  mEntryMap[ANDROID_SCALER_CROP_REGION] =
+      std::vector<int32_t>({x, y, width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
+  mEntryMap[ANDROID_JPEG_MAX_SIZE] = asVectorOf<int32_t>(size);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFrameDuration(
+    const std::chrono::nanoseconds duration) {
+  mEntryMap[ANDROID_SENSOR_INFO_MAX_FRAME_DURATION] =
+      asVectorOf<int64_t>(duration.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegAvailableThumbnailSizes(
+    const std::vector<Resolution>& thumbnailSizes) {
+  std::vector<int32_t> sizes;
+  sizes.reserve(thumbnailSizes.size() * 2);
+  for (const Resolution& resolution : thumbnailSizes) {
+    sizes.push_back(resolution.width);
+    sizes.push_back(resolution.height);
+  }
+  mEntryMap[ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES] = std::move(sizes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+                                                       const int height) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
+    const int32_t maxRawStreams, const int32_t maxProcessedStreams,
+    const int32_t maxStallStreams) {
+  mEntryMap[ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS] = std::vector<int32_t>(
+      {maxRawStreams, maxProcessedStreams, maxStallStreams});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
+    camera_metadata_enum_android_sync_max_latency latency) {
+  mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineMaxDepth(const uint8_t maxDepth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_MAX_DEPTH] = asVectorOf<uint8_t>(maxDepth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineDepth(const uint8_t depth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_DEPTH] = asVectorOf<uint8_t>(depth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        requestCapabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(requestCapabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
+    const std::vector<StreamConfiguration>& streamConfigurations) {
+  std::vector<int32_t> metadataStreamConfigs;
+  std::vector<int64_t> metadataMinFrameDurations;
+  std::vector<int64_t> metadataStallDurations;
+  metadataStreamConfigs.reserve(streamConfigurations.size());
+  metadataMinFrameDurations.reserve(streamConfigurations.size());
+  metadataStallDurations.reserve(streamConfigurations.size());
+
+  for (const auto& config : streamConfigurations) {
+    metadataStreamConfigs.push_back(config.format);
+    metadataStreamConfigs.push_back(config.width);
+    metadataStreamConfigs.push_back(config.height);
+    metadataStreamConfigs.push_back(
+        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+
+    metadataMinFrameDurations.push_back(config.format);
+    metadataMinFrameDurations.push_back(config.width);
+    metadataMinFrameDurations.push_back(config.height);
+    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
+
+    metadataStallDurations.push_back(config.format);
+    metadataStallDurations.push_back(config.width);
+    metadataStallDurations.push_back(config.height);
+    metadataStallDurations.push_back(config.minStallDuration.count());
+  }
+
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
+      std::move(metadataStreamConfigs);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
+      std::move(metadataMinFrameDurations);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
+      std::move(metadataStallDurations);
+
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableAberrationCorrectionModes(
+    const std::vector<camera_metadata_enum_android_color_correction_aberration_mode>&
+        aberrationCorectionModes) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES] =
+      convertTo<uint8_t>(aberrationCorectionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAberrationCorrectionMode(
+    const camera_metadata_enum_android_color_correction_aberration_mode
+        aberrationCorrectionMode) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_ABERRATION_MODE] =
+      asVectorOf<uint8_t>(aberrationCorrectionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableNoiseReductionModes(
+    const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+        noiseReductionModes) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES] =
+      convertTo<uint8_t>(noiseReductionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
+    camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
+      asVectorOf<uint8_t>(noiseReductionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setRequestPartialResultCount(
+    const int partialResultCount) {
+  mEntryMap[ANDROID_REQUEST_PARTIAL_RESULT_COUNT] =
+      asVectorOf<int32_t>(partialResultCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCroppingType(
+    const camera_metadata_enum_android_scaler_cropping_type croppingType) {
+  mEntryMap[ANDROID_SCALER_CROPPING_TYPE] = asVectorOf<uint8_t>(croppingType);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFaceCount(const int maxFaceCount) {
+  mEntryMap[ANDROID_STATISTICS_INFO_MAX_FACE_COUNT] =
+      asVectorOf<int32_t>(maxFaceCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
+      asVectorOf<float>(maxZoom);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
+                                                           const float max) {
+  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
+                                                           int x1, int y1) {
+  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
+      std::vector<int32_t>({x0, y0, x1, y1});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPixelArraySize(int width,
+                                                          int height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE] =
+      std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPhysicalSize(float width,
+                                                        float height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PHYSICAL_SIZE] =
+      std::vector<float>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
+                                                                int32_t max) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
+      std::vector<int32_t>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
+    const camera_metadata_rational step) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
+      asVectorOf<camera_metadata_rational>(step);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeExposureCompensation(
+    const int32_t exposureCompensation) {
+  mEntryMap[ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION] =
+      asVectorOf<int32_t>(exposureCompensation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        capabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(capabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
+    const std::vector<camera_metadata_tag_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
+      convertTo<int32_t>(keys);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
+  mExtendWithAvailableCharacteristicsKeys = true;
+  return *this;
+}
+
+std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+MetadataBuilder::build() {
+  if (mExtendWithAvailableCharacteristicsKeys) {
+    std::vector<camera_metadata_tag_t> availableKeys;
+    availableKeys.reserve(mEntryMap.size());
+    for (const auto& [key, _] : mEntryMap) {
+      if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
+        availableKeys.push_back(key);
+      }
+    }
+    setAvailableCharacteristicKeys(availableKeys);
+  }
+
+  CameraMetadata metadataHelper;
+  for (const auto& entry : mEntryMap) {
+    status_t ret = std::visit(
+        [&](auto&& arg) {
+          return metadataHelper.update(entry.first, arg.data(), arg.size());
+        },
+        entry.second);
+    if (ret != NO_ERROR) {
+      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
+            get_camera_metadata_tag_name(entry.first),
+            ::android::statusToString(ret).c_str());
+      return nullptr;
+    }
+  }
+
+  const camera_metadata_t* metadata = metadataHelper.getAndLock();
+  if (metadata == nullptr) {
+    ALOGE(
+        "Failure when constructing metadata -> CameraMetadata helper returned "
+        "nullptr");
+    return nullptr;
+  }
+
+  auto aidlMetadata =
+      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
+  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
+  aidlMetadata->metadata.assign(data_ptr,
+                                data_ptr + get_camera_metadata_size(metadata));
+  metadataHelper.unlock(metadata);
+
+  return aidlMetadata;
+}
+
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+    return {};
+  }
+
+  std::vector<Resolution> thumbnailSizes;
+  thumbnailSizes.reserve(entry.count / 2);
+  for (int i = 0; i < entry.count; i += 2) {
+    thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+  }
+  return thumbnailSizes;
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
new file mode 100644
index 0000000..b4d60cb
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -0,0 +1,417 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+
+#include <chrono>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <variant>
+#include <vector>
+
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Convenience builder for the
+// aidl::android::hardware::camera::device::CameraMetadata.
+//
+// Calling the same builder setter multiple will overwrite the value.
+// This class is not thread-safe.
+class MetadataBuilder {
+ public:
+  struct StreamConfiguration {
+    int32_t width = 0;
+    int32_t height = 0;
+    int32_t format = 0;
+    // Minimal frame duration - corresponds to maximal FPS for given format.
+    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minFrameDuration{0};
+    // Minimal stall duration.
+    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minStallDuration{0};
+  };
+
+  struct ControlRegion {
+    int32_t x0 = 0;
+    int32_t y0 = 0;
+    int32_t x1 = 0;
+    int32_t y1 = 0;
+    int32_t weight = 0;
+  };
+
+  struct FpsRange {
+    int32_t minFps;
+    int32_t maxFps;
+
+    bool operator<(const FpsRange& other) const {
+      return maxFps == other.maxFps ? minFps < other.minFps
+                                    : maxFps < other.maxFps;
+    }
+  };
+
+  MetadataBuilder() = default;
+  ~MetadataBuilder() = default;
+
+  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
+  MetadataBuilder& setSupportedHardwareLevel(
+      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
+
+  // Whether this camera device has a flash unit
+  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setFlashAvailable(bool flashAvailable);
+
+  // See FLASH_STATE in CaptureResult.java.
+  MetadataBuilder& setFlashState(
+      camera_metadata_enum_android_flash_state_t flashState);
+
+  // See FLASH_MODE in CaptureRequest.java.
+  MetadataBuilder& setFlashMode(
+      camera_metadata_enum_android_flash_mode_t flashMode);
+
+  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensFacing(
+      camera_metadata_enum_android_lens_facing lensFacing);
+
+  // See ANDROID_SENSOR_READOUT_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorReadoutTimestamp(
+      camera_metadata_enum_android_sensor_readout_timestamp_t
+          sensorReadoutTimestamp);
+
+  // See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFocalLengths(
+      const std::vector<float>& focalLengths);
+
+  // See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
+  MetadataBuilder& setFocalLength(float focalLength);
+
+  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
+
+  // Time at start of exposure of first row of the image
+  // sensor active array, in nanoseconds.
+  //
+  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
+
+  // See SENSOR_INFO_TIMESTAMP_SOURCE in CameraCharacteristic.java.
+  MetadataBuilder& setSensorTimestampSource(
+      camera_metadata_enum_android_sensor_info_timestamp_source_t timestampSource);
+
+  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
+
+  // See ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPixelArraySize(int width, int height);
+
+  // See ANDROID_SENSOR_INFO_PHYSICAL_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPhysicalSize(float width, float height);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFaceDetectModes(
+      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+          faceDetectMode);
+
+  // See SENSOR_AVAILABLE_TEST_PATTERN_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableTestPatternModes(
+      const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+          testPatternModes);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setFaceDetectMode(
+      camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
+
+  // Sets available stream configurations along with corresponding minimal frame
+  // durations (corresponding to max fps) and stall durations.
+  //
+  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
+  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableOutputStreamConfigurations(
+      const std::vector<StreamConfiguration>& streamConfigurations);
+
+  // See COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableAberrationCorrectionModes(
+      const std::vector<
+          camera_metadata_enum_android_color_correction_aberration_mode>&
+          aberrationCorectionModes);
+
+  // See COLOR_CORRECTION_ABERRATION_MODE in CaptureRequest.java.
+  MetadataBuilder& setAberrationCorrectionMode(
+      camera_metadata_enum_android_color_correction_aberration_mode
+          aberrationCorrectionMode);
+
+  // See NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableNoiseReductionModes(
+      const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+          noiseReductionModes);
+
+  // See NOISE_REDUCTION_MODE in CaptureRequest.java.
+  MetadataBuilder& setNoiseReductionMode(
+      camera_metadata_enum_android_noise_reduction_mode noiseReductionMode);
+
+  // See REQUEST_PARTIAL_RESULT_COUNT in CameraCharacteristics.java.
+  MetadataBuilder& setRequestPartialResultCount(int partialResultCount);
+
+  // See SCALER_CROPPING_TYPE in CameraCharacteristics.java.
+  MetadataBuilder& setCroppingType(
+      camera_metadata_enum_android_scaler_cropping_type croppingType);
+
+  // See STATISTICS_INFO_MAX_FACE_COUNT in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFaceCount(int maxFaceCount);
+
+  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlMode(
+      camera_metadata_enum_android_control_mode_t mode);
+
+  // See ANDROID_CONTROL_AVAILABLE_SCENE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableSceneModes(
+      const std::vector<camera_metadata_enum_android_control_scene_mode>&
+          availableSceneModes);
+
+  // See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableEffects(
+      const std::vector<camera_metadata_enum_android_control_effect_mode>&
+          availableEffects);
+
+  // See CONTROL_EFFECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlEffectMode(
+      camera_metadata_enum_android_control_effect_mode_t effectMode);
+
+  // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+  MetadataBuilder& setControlAvailableVideoStabilizationModes(
+      const std::vector<
+          camera_metadata_enum_android_control_video_stabilization_mode_t>&
+          videoStabilizationModes);
+
+  // See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableAntibandingModes(
+      const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+          antibandingModes);
+
+  // See CONTROL_AE_ANTIBANDING_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeAntibandingMode(
+      camera_metadata_enum_android_control_ae_antibanding_mode_t antibandingMode);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
+
+  // See ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeExposureCompensation(int32_t exposureCompensation);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes);
+
+  // See ANDROID_CONTROL_AE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeMode(
+      camera_metadata_enum_android_control_ae_mode_t step);
+
+  // See ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER in CaptureRequest.java.
+  MetadataBuilder& setControlAePrecaptureTrigger(
+      camera_metadata_enum_android_control_ae_precapture_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfMode(
+      const camera_metadata_enum_android_control_af_mode_t mode);
+
+  // See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfTrigger(
+      const camera_metadata_enum_android_control_af_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeAvailableFpsRanges(
+      const std::vector<FpsRange>& fpsRanges);
+
+  // See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
+  MetadataBuilder& setControlAeTargetFpsRange(int32_t min, int32_t max);
+
+  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlCaptureIntent(
+      camera_metadata_enum_android_control_capture_intent_t intent);
+
+  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
+                                        int32_t maxAwbRegions,
+                                        int32_t maxAfRegions);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableAwbModes(
+      const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAwbMode(
+      camera_metadata_enum_android_control_awb_mode awb);
+
+  // See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
+
+  // See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
+
+  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeRegions(
+      const std::vector<ControlRegion>& aeRegions);
+
+  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbRegions(
+      const std::vector<ControlRegion>& awbRegions);
+
+  // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
+  MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
+                                 int32_t height);
+
+  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfRegions(
+      const std::vector<ControlRegion>& afRegions);
+
+  // The size of the compressed JPEG image, in bytes.
+  //
+  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxJpegSize(int32_t size);
+
+  // See SENSOR_INFO_MAX_FRAME_DURATION in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFrameDuration(std::chrono::nanoseconds duration);
+
+  // See JPEG_AVAILABLE_THUMBNAIL_SIZES in CameraCharacteristic.java.
+  MetadataBuilder& setJpegAvailableThumbnailSizes(
+      const std::vector<Resolution>& thumbnailSizes);
+
+  // See JPEG_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegQuality(uint8_t quality);
+
+  // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+  // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
+  // The maximum numbers of different types of output streams
+  // that can be configured and used simultaneously by a camera device.
+  //
+  // See ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxNumberOutputStreams(int32_t maxRawStreams,
+                                             int32_t maxProcessedStreams,
+                                             int32_t maxStallStreams);
+
+  // See ANDROID_SYNC_MAX_LATENCY in CameraMetadataTag.aidl.
+  MetadataBuilder& setSyncMaxLatency(
+      camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);
+
+  // See REQUEST_PIPELINE_MAX_DEPTH in CameraCharacteristic.java.
+  MetadataBuilder& setPipelineMaxDepth(uint8_t maxDepth);
+
+  // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+  MetadataBuilder& setPipelineDepth(uint8_t depth);
+
+  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
+
+  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlZoomRatioRange(float min, float max);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          requestCapabilities);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureRequest.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureResult.
+  //
+  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          capabilities);
+
+  // A list of all keys that the camera device has available to use.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCharacteristicKeys(
+      const std::vector<camera_metadata_tag_t>& keys);
+
+  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
+  // containing all set tags.
+  MetadataBuilder& setAvailableCharacteristicKeys();
+
+  // Build CameraMetadata instance.
+  //
+  // Returns nullptr in case something went wrong.
+  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
+  build();
+
+ private:
+  // Maps metadata tags to vectors of values for the given tag.
+  std::map<camera_metadata_tag_t,
+           std::variant<std::vector<int64_t>, std::vector<int32_t>,
+                        std::vector<uint8_t>, std::vector<float>,
+                        std::vector<camera_metadata_rational_t>>>
+      mEntryMap;
+  // Extend metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
+  bool mExtendWithAvailableCharacteristicsKeys = false;
+};
+
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index df771b1..b2048bc 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -20,8 +20,13 @@
 
 #include <algorithm>
 #include <array>
+#include <cstdint>
+#include <memory>
 
+#include "android/hardware_buffer.h"
 #include "jpeglib.h"
+#include "ui/GraphicBuffer.h"
+#include "utils/Errors.h"
 
 namespace android {
 namespace companion {
@@ -35,10 +40,87 @@
 // TODO(b/301023410) - Query actual max texture size.
 constexpr int kMaxTextureSize = 2048;
 constexpr int kLibJpegDctSize = DCTSIZE;
+constexpr int kMaxFpsUpperLimit = 60;
 
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
                                                   Format::RGBA_8888};
 
+YCbCrLockGuard::YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                               const uint32_t usageFlags)
+    : mHwBuffer(hwBuffer) {
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+  mLockStatus = gBuffer->lockYCbCr(usageFlags, &mYCbCr);
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+YCbCrLockGuard::~YCbCrLockGuard() {
+  if (getStatus() != OK) {
+    return;
+  }
+
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    return;
+  }
+  gBuffer->unlock();
+  status_t status = gBuffer->unlock();
+  if (status != NO_ERROR) {
+    ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
+  }
+}
+
+status_t YCbCrLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const android_ycbcr& YCbCrLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked YCbCrLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mYCbCr;
+}
+
+PlanesLockGuard::PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                                 const uint64_t usageFlags, sp<Fence> fence) {
+  if (hwBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+
+  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
+      hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+PlanesLockGuard::~PlanesLockGuard() {
+  if (getStatus() != OK || mHwBuffer == nullptr) {
+    return;
+  }
+  AHardwareBuffer_unlock(mHwBuffer.get(), /*fence=*/nullptr);
+}
+
+int PlanesLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const AHardwareBuffer_Planes& PlanesLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked PlanesLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mPlanes;
+}
+
 sp<Fence> importFence(const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.size() != 1) {
     return sp<Fence>::make();
@@ -54,7 +136,7 @@
 
 // Returns true if specified format is supported for virtual camera input.
 bool isFormatSupportedForInput(const int width, const int height,
-                               const Format format) {
+                               const Format format, const int maxFps) {
   if (!isPixelFormatSupportedForInput(format)) {
     return false;
   }
@@ -71,9 +153,17 @@
     return false;
   }
 
+  if (maxFps <= 0 || maxFps > kMaxFpsUpperLimit) {
+    return false;
+  }
+
   return true;
 }
 
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+  return os << resolution.width << "x" << resolution.height;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index a73c99b..faae010 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -17,18 +17,82 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 
+#include <cmath>
 #include <cstdint>
+#include <memory>
 
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "android/binder_auto_utils.h"
+#include "android/hardware_buffer.h"
+#include "system/graphics.h"
 #include "ui/Fence.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
+// RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
+// structure describing YUV plane layout.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class YCbCrLockGuard {
+ public:
+  YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer, uint32_t usageFlags);
+  YCbCrLockGuard(YCbCrLockGuard&& other) = default;
+  ~YCbCrLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns android_ycbcr structure
+  // describing the layout.
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const android_ycbcr& operator*() const;
+
+  // Disable copy.
+  YCbCrLockGuard(const YCbCrLockGuard&) = delete;
+  YCbCrLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  android_ycbcr mYCbCr = {};
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
+// RAII utility class to safely lock AHardwareBuffer and obtain
+// AHardwareBuffer_Planes (Suitable for interacting with RGBA / BLOB buffers.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class PlanesLockGuard {
+ public:
+  PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                  uint64_t usageFlags, sp<Fence> fence = nullptr);
+  PlanesLockGuard(PlanesLockGuard&& other) = default;
+  ~PlanesLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns AHardwareBuffer_Planes
+  // structure describing the layout.
+  //
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const AHardwareBuffer_Planes& operator*() const;
+
+  // Disable copy.
+  PlanesLockGuard(const PlanesLockGuard&) = delete;
+  PlanesLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  AHardwareBuffer_Planes mPlanes;
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
 // Converts camera AIDL status to ndk::ScopedAStatus
 inline ndk::ScopedAStatus cameraStatus(
     const ::aidl::android::hardware::camera::common::Status status) {
@@ -50,7 +114,46 @@
 // Returns true if specified format is supported for virtual camera input.
 bool isFormatSupportedForInput(
     int width, int height,
-    ::aidl::android::companion::virtualcamera::Format format);
+    ::aidl::android::companion::virtualcamera::Format format, int maxFps);
+
+// Representation of resolution / size.
+struct Resolution {
+  Resolution() = default;
+  Resolution(const int w, const int h) : width(w), height(h) {
+  }
+
+  // Order by increasing pixel count, and by width for same pixel count.
+  bool operator<(const Resolution& other) const {
+    const int pixCount = width * height;
+    const int otherPixCount = other.width * other.height;
+    return pixCount == otherPixCount ? width < other.width
+                                     : pixCount < otherPixCount;
+  }
+
+  bool operator<=(const Resolution& other) const {
+    return *this == other || *this < other;
+  }
+
+  bool operator==(const Resolution& other) const {
+    return width == other.width && height == other.height;
+  }
+
+  int width = 0;
+  int height = 0;
+};
+
+inline bool isApproximatellySameAspectRatio(const Resolution r1,
+                                            const Resolution r2) {
+  static constexpr float kAspectRatioEpsilon = 0.05;
+  float aspectRatio1 =
+      static_cast<float>(r1.width) / static_cast<float>(r1.height);
+  float aspectRatio2 =
+      static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+  return std::abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index a2f17c2..506b3bc 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -95,31 +95,6 @@
         "android.hidl.memory@1.0",
     ],
 
-    runtime_libs: [
-        "libstagefright_soft_aacdec",
-        "libstagefright_soft_aacenc",
-        "libstagefright_soft_amrdec",
-        "libstagefright_soft_amrnbenc",
-        "libstagefright_soft_amrwbenc",
-        "libstagefright_soft_avcdec",
-        "libstagefright_soft_avcenc",
-        "libstagefright_soft_flacdec",
-        "libstagefright_soft_flacenc",
-        "libstagefright_soft_g711dec",
-        "libstagefright_soft_gsmdec",
-        "libstagefright_soft_hevcdec",
-        "libstagefright_soft_mp3dec",
-        "libstagefright_soft_mpeg2dec",
-        "libstagefright_soft_mpeg4dec",
-        "libstagefright_soft_mpeg4enc",
-        "libstagefright_soft_opusdec",
-        "libstagefright_soft_rawdec",
-        "libstagefright_soft_vorbisdec",
-        "libstagefright_soft_vpxdec",
-        "libstagefright_soft_vpxenc",
-        "libstagefright_softomx_plugin",
-    ],
-
     // OMX interfaces force this to stay in 32-bit mode;
     compile_multilib: "32",
 
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
index a55c3eb..0c6aafd 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
@@ -52,6 +52,9 @@
 getdents64: 1
 ppoll: 1
 
+clock_gettime: 1
+pipe2: 1
+
 # Required by AddressSanitizer
 gettid: 1
 sched_yield: 1
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..bf90f43 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 630a436..7dc445b 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -549,7 +549,7 @@
 
     int slot = 1;
     std::stringstream ss;
-    ss << "AudioPowerUsage:\n";
+    ss << "AudioPowerUsage interval " << mIntervalHours << " hours:\n";
     for (const auto &item : mItems) {
         if (slot >= limit - 1) {
             ss << "-- AudioPowerUsage may be truncated!\n";
diff --git a/services/mediametrics/include/mediametricsservice/TimedAction.h b/services/mediametrics/include/mediametricsservice/TimedAction.h
index c7ef585..8b53ded 100644
--- a/services/mediametrics/include/mediametricsservice/TimedAction.h
+++ b/services/mediametrics/include/mediametricsservice/TimedAction.h
@@ -25,6 +25,12 @@
 namespace android::mediametrics {
 
 class TimedAction {
+    // Use system_clock instead of steady_clock to include suspend time.
+    using TimerClock = class std::chrono::system_clock;
+
+    // Define granularity of wakeup to prevent delayed events if
+    // device is suspended.
+    static constexpr auto kWakeupInterval = std::chrono::minutes(3);
 public:
     TimedAction() : mThread{[this](){threadLoop();}} {}
 
@@ -35,7 +41,7 @@
     // TODO: return a handle for cancelling the action?
     template <typename T> // T is in units of std::chrono::duration.
     void postIn(const T& time, std::function<void()> f) {
-        postAt(std::chrono::steady_clock::now() + time, f);
+        postAt(TimerClock::now() + time, f);
     }
 
     template <typename T> // T is in units of std::chrono::time_point
@@ -75,16 +81,21 @@
     void threadLoop() NO_THREAD_SAFETY_ANALYSIS { // thread safety doesn't cover unique_lock
         std::unique_lock l(mLock);
         while (!mQuit) {
-            auto sleepUntilTime = std::chrono::time_point<std::chrono::steady_clock>::max();
+            auto sleepUntilTime = std::chrono::time_point<TimerClock>::max();
             if (!mMap.empty()) {
                 sleepUntilTime = mMap.begin()->first;
-                if (sleepUntilTime <= std::chrono::steady_clock::now()) {
+                const auto now = TimerClock::now();
+                if (sleepUntilTime <= now) {
                     auto node = mMap.extract(mMap.begin()); // removes from mMap.
                     l.unlock();
                     node.mapped()();
                     l.lock();
                     continue;
                 }
+                // Bionic uses CLOCK_MONOTONIC for its pthread_mutex regardless
+                // of REALTIME specification, use kWakeupInterval to ensure minimum
+                // granularity if suspended.
+                sleepUntilTime = std::min(sleepUntilTime, now + kWakeupInterval);
             }
             mCondition.wait_until(l, sleepUntilTime);
         }
@@ -93,7 +104,7 @@
     mutable std::mutex mLock;
     std::condition_variable mCondition GUARDED_BY(mLock);
     bool mQuit GUARDED_BY(mLock) = false;
-    std::multimap<std::chrono::time_point<std::chrono::steady_clock>, std::function<void()>>
+    std::multimap<std::chrono::time_point<TimerClock>, std::function<void()>>
             mMap GUARDED_BY(mLock); // multiple functions could execute at the same time.
 
     // needs to be initialized after the variables above, done in constructor initializer list.
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 73a96e9..7f66859 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -74,10 +74,15 @@
     name: "libresourcemanagerservice",
 
     srcs: [
+        "ClientImportanceReclaimPolicy.cpp",
+        "DefaultResourceModel.cpp",
+        "ProcessPriorityReclaimPolicy.cpp",
         "ResourceManagerMetrics.cpp",
         "ResourceManagerService.cpp",
+        "ResourceManagerServiceNew.cpp",
         "ResourceObserverService.cpp",
         "ResourceManagerServiceUtils.cpp",
+        "ResourceTracker.cpp",
         "ServiceLog.cpp",
         "UidObserver.cpp",
 
@@ -97,6 +102,7 @@
         "libstatssocket",
         "libprotobuf-cpp-lite",
         "libactivitymanager_aidl",
+        "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
diff --git a/services/mediaresourcemanager/ClientImportanceReclaimPolicy.cpp b/services/mediaresourcemanager/ClientImportanceReclaimPolicy.cpp
new file mode 100644
index 0000000..a81b32f
--- /dev/null
+++ b/services/mediaresourcemanager/ClientImportanceReclaimPolicy.cpp
@@ -0,0 +1,88 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ClientImportanceReclaimPolicy"
+#include <utils/Log.h>
+
+#include "ResourceTracker.h"
+#include "ResourceManagerService.h"
+#include "ClientImportanceReclaimPolicy.h"
+
+namespace android {
+
+using aidl::android::media::IResourceManagerClient;
+
+ClientImportanceReclaimPolicy::ClientImportanceReclaimPolicy(
+        const std::shared_ptr<ResourceTracker>& resourceTracker)
+    : mResourceTracker(resourceTracker) {
+}
+
+ClientImportanceReclaimPolicy::~ClientImportanceReclaimPolicy() {
+}
+
+// Find the biggest client from the same process with the lowest importance
+// than that of the requesting client.
+bool ClientImportanceReclaimPolicy::getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                                              const std::vector<ClientInfo>& clients,
+                                              std::vector<ClientInfo>& targetClients) {
+    pid_t callingPid = reclaimRequestInfo.mCallingPid;
+    int32_t callingImportance = reclaimRequestInfo.mCallingClientImportance;
+    MediaResource::Type type = reclaimRequestInfo.mResources[0].type;
+    MediaResource::SubType subType = reclaimRequestInfo.mResources[0].subType;
+    ClientInfo targetClient;
+    // Look to find the biggest client with lowest importance from the same process that
+    // has the other resources and with the given primary type.
+    bool found = false;
+    MediaResource::SubType primarySubType = subType;
+    for (size_t index = 1; !found && (index < reclaimRequestInfo.mResources.size()); index++) {
+        MediaResource::Type type = reclaimRequestInfo.mResources[index].type;
+        MediaResource::SubType subType = reclaimRequestInfo.mResources[index].subType;
+        found = mResourceTracker->getLeastImportantBiggestClient(
+            callingPid, callingImportance,
+            type, subType, primarySubType,
+            clients, targetClient);
+    }
+    // If no success, then select the biggest client of primary type with lowest importance
+    // from the same process.
+    if (!found) {
+        found = mResourceTracker->getLeastImportantBiggestClient(
+            callingPid, callingImportance,
+            type, subType, MediaResource::SubType::kUnspecifiedSubType,
+            clients, targetClient);
+    }
+    // If we haven't found a client yet, then select the biggest client of different type
+    // with lowest importance from the same process.
+    // This is applicable for codec type only.
+    if (!found) {
+        if (type != MediaResource::Type::kSecureCodec &&
+            type != MediaResource::Type::kNonSecureCodec) {
+            return false;
+        }
+        MediaResourceType otherType = (type == MediaResource::Type::kSecureCodec) ?
+            MediaResource::Type::kNonSecureCodec : MediaResource::Type::kSecureCodec;
+        if (!mResourceTracker->getLeastImportantBiggestClient(
+            callingPid, callingImportance,
+            otherType, subType, MediaResource::SubType::kUnspecifiedSubType,
+            clients, targetClient)) {
+            return false;
+        }
+    }
+    targetClients.emplace_back(targetClient);
+    return true;
+}
+} // namespace android
diff --git a/services/mediaresourcemanager/ClientImportanceReclaimPolicy.h b/services/mediaresourcemanager/ClientImportanceReclaimPolicy.h
new file mode 100644
index 0000000..1a54c7d
--- /dev/null
+++ b/services/mediaresourcemanager/ClientImportanceReclaimPolicy.h
@@ -0,0 +1,64 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_CLIENTIMPORTANCERECLAIMPOLICY_H_
+#define ANDROID_MEDIA_CLIENTIMPORTANCERECLAIMPOLICY_H_
+
+#include <media/MediaResource.h>
+#include "IReclaimPolicy.h"
+
+namespace android {
+
+class ResourceTracker;
+struct ClientInfo;
+
+/*
+ * Implementation of Reclaim Policy based on the client's importance.
+ *
+ * Find the least important (other than that of requesting client) client from the
+ * same process (that is requesting for the resource).
+ * If there are multiple clients with least importance, then pick the biggest
+ * client among them.
+ *
+ */
+class ClientImportanceReclaimPolicy : public IReclaimPolicy {
+public:
+    explicit ClientImportanceReclaimPolicy(const std::shared_ptr<ResourceTracker>& resourceTracker);
+
+    virtual ~ClientImportanceReclaimPolicy();
+
+    /*
+     * Based on the client importance, identify and return the least important client of
+     * the requesting process from the list of given clients that satisfy the resource requested.
+     *
+     * @param[in]  reclaimRequestInfo Information about the resource request
+     * @param[in]  client List of clients to select from.
+     * @param[out] targetClients Upon success, this will have the list of identified client(s).
+     *
+     * @return true on success, false otherwise
+     */
+    bool getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                    const std::vector<ClientInfo>& clients,
+                    std::vector<ClientInfo>& targetClients) override;
+
+private:
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_CLIENTIMPORTANCERECLAIMPOLICY_H_
diff --git a/services/mediaresourcemanager/DefaultResourceModel.cpp b/services/mediaresourcemanager/DefaultResourceModel.cpp
new file mode 100644
index 0000000..990df82
--- /dev/null
+++ b/services/mediaresourcemanager/DefaultResourceModel.cpp
@@ -0,0 +1,149 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DefaultResourceModel"
+#include <utils/Log.h>
+
+#include "ResourceManagerServiceUtils.h"
+#include "DefaultResourceModel.h"
+#include "ResourceTracker.h"
+
+namespace android {
+
+DefaultResourceModel::DefaultResourceModel(
+        const std::shared_ptr<ResourceTracker>& resourceTracker,
+        bool supportsMultipleSecureCodecs,
+        bool supportsSecureWithNonSecureCodec)
+    : mSupportsMultipleSecureCodecs(supportsMultipleSecureCodecs),
+      mSupportsSecureWithNonSecureCodec(supportsSecureWithNonSecureCodec),
+      mResourceTracker(resourceTracker) {
+}
+
+DefaultResourceModel::~DefaultResourceModel() {
+}
+
+bool DefaultResourceModel::getAllClients(
+        const ReclaimRequestInfo& reclimRequestInfo,
+        std::vector<ClientInfo>& clients) {
+
+    clients.clear();
+    MediaResourceParcel mediaResource{.type = reclimRequestInfo.mResources[0].type,
+                                      .subType = reclimRequestInfo.mResources[0].subType};
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+                                            reclimRequestInfo.mClientId,
+                                            &mediaResource};
+
+    // Resolve the secure-unsecure codec conflicts if there is any.
+    switch (reclimRequestInfo.mResources[0].type) {
+    case MediaResource::Type::kSecureCodec:
+        // Looking to start a secure codec.
+        // #1. Make sure if multiple secure codecs can coexist
+        if (!mSupportsMultipleSecureCodecs) {
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        // #2. Make sure a secure codec can coexist if there is an instance
+        // of non-secure codec running already.
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kNonSecureCodec;
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a non-secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        break;
+    case MediaResource::Type::kNonSecureCodec:
+        // Looking to start a non-secure codec.
+        // Make sure a non-secure codec can coexist if there is an instance
+        // of secure codec running already.
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kSecureCodec;
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        break;
+    default:
+        break;
+    }
+
+    if (!clients.empty()) {
+        // There is secure/unsecure codec co-existence conflict
+        // and we have only found processes with lower priority holding the
+        // resources. So, all of these need to be reclaimed.
+        return false;
+    }
+
+    // No more resource conflicts.
+    switch (reclimRequestInfo.mResources[0].type) {
+    case MediaResource::Type::kSecureCodec:
+    case MediaResource::Type::kNonSecureCodec:
+        // Handling Codec resource reclaim
+        return getCodecClients(reclimRequestInfo, clients);
+    case MediaResource::Type::kGraphicMemory:
+    case MediaResource::Type::kDrmSession:
+        // Handling DRM and GraphicMemory resource reclaim
+        mediaResource.id = reclimRequestInfo.mResources[0].id;
+        mediaResource.value = reclimRequestInfo.mResources[0].value;
+        return mResourceTracker->getAllClients(resourceRequestInfo, clients);
+    default:
+        break;
+    }
+
+    return !clients.empty();
+}
+
+bool DefaultResourceModel::getCodecClients(
+        const ReclaimRequestInfo& reclimRequestInfo,
+        std::vector<ClientInfo>& clients) {
+    MediaResourceParcel mediaResource;
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+                                            reclimRequestInfo.mClientId,
+                                            &mediaResource};
+
+    // 1. Look to find the client(s) with the other resources, for the given
+    // primary type.
+    MediaResource::SubType primarySubType = reclimRequestInfo.mResources[0].subType;
+    for (size_t index = 1; index < reclimRequestInfo.mResources.size(); index++) {
+        mediaResource.type = reclimRequestInfo.mResources[index].type;
+        mediaResource.subType = reclimRequestInfo.mResources[index].subType;
+        mResourceTracker->getAllClients(resourceRequestInfo, clients, primarySubType);
+    }
+
+    // 2. Get all clients of the same type.
+    mediaResource.type = reclimRequestInfo.mResources[0].type;
+    mediaResource.subType = reclimRequestInfo.mResources[0].subType;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    // 3. Get all cliends of the different type.
+    MediaResourceType otherType =
+        (reclimRequestInfo.mResources[0].type == MediaResource::Type::kSecureCodec) ?
+        MediaResource::Type::kNonSecureCodec : MediaResource::Type::kSecureCodec;
+    mediaResource.type = otherType;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    return !clients.empty();
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/DefaultResourceModel.h b/services/mediaresourcemanager/DefaultResourceModel.h
new file mode 100644
index 0000000..1891eda
--- /dev/null
+++ b/services/mediaresourcemanager/DefaultResourceModel.h
@@ -0,0 +1,73 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
+#define ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
+
+#include "IResourceModel.h"
+
+namespace android {
+
+class ResourceTracker;
+
+/*
+ * Implements the Default Resource Model that handles:
+ *   - coexistence of secure codec with another secure/non-secure codecs
+ *   - sharing resources among other codecs
+ */
+class DefaultResourceModel : public IResourceModel {
+public:
+    DefaultResourceModel(const std::shared_ptr<ResourceTracker>& resourceTracker,
+                         bool supportsMultipleSecureCodecs = true,
+                         bool supportsSecureWithNonSecureCodec = true);
+    virtual ~DefaultResourceModel();
+
+    /*
+     * Set the codec co-existence properties
+     */
+    void config(bool supportsMultipleSecureCodecs, bool supportsSecureWithNonSecureCodec) {
+        mSupportsMultipleSecureCodecs = supportsMultipleSecureCodecs;
+        mSupportsSecureWithNonSecureCodec = supportsSecureWithNonSecureCodec;
+    }
+
+    /*
+     * Get a list of all clients that holds the resources requested.
+     * This implementation uses the ResourceModel to select the clients.
+     *
+     * @param[in]  reclaimRequestInfo Information about the Reclaim request
+     * @param[out] cliens The list of clients that hold the resources in question.
+     *
+     * @return true if there aren't any resource conflicts and false otherwise.
+     */
+    bool getAllClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                       std::vector<ClientInfo>& clients) override;
+
+protected:
+    bool getCodecClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                         std::vector<ClientInfo>& clients);
+
+protected:
+    // Keeping these protected to allow extending this implementation
+    // by other resource models.
+    bool mSupportsMultipleSecureCodecs;
+    bool mSupportsSecureWithNonSecureCodec;
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
diff --git a/services/mediaresourcemanager/IReclaimPolicy.h b/services/mediaresourcemanager/IReclaimPolicy.h
new file mode 100644
index 0000000..dfbfc12
--- /dev/null
+++ b/services/mediaresourcemanager/IReclaimPolicy.h
@@ -0,0 +1,58 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_IRECLAIMPOLICY_H_
+#define ANDROID_MEDIA_IRECLAIMPOLICY_H_
+
+#include <memory>
+#include <aidl/android/media/IResourceManagerClient.h>
+
+namespace android {
+
+struct ClientInfo;
+struct ReclaimRequestInfo;
+
+/*
+ * Interface that defines Reclaim Policy.
+ *
+ * This provides an interface to select/identify a client based on a specific
+ * Reclaim policy.
+ */
+class IReclaimPolicy {
+public:
+    IReclaimPolicy() {}
+
+    virtual ~IReclaimPolicy() {}
+
+    /*
+     * Based on the Reclaim policy, identify and return a client from the list
+     * of given clients that satisfy the resource requested.
+     *
+     * @param[in]  reclaimRequestInfo Information about the resource request
+     * @param[in]  client List of clients to select from.
+     * @param[out] targetClients Upon success, this will have the list of identified client(s).
+     *
+     * @return true on success, false otherwise
+     */
+    virtual bool getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                            const std::vector<ClientInfo>& clients,
+                            std::vector<ClientInfo>& targetClients) = 0;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_IRECLAIMPOLICY_H_
diff --git a/services/mediaresourcemanager/IResourceModel.h b/services/mediaresourcemanager/IResourceModel.h
new file mode 100644
index 0000000..f865f54
--- /dev/null
+++ b/services/mediaresourcemanager/IResourceModel.h
@@ -0,0 +1,67 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_IRESOURCEMODEL_H_
+#define ANDROID_MEDIA_IRESOURCEMODEL_H_
+
+#include <memory>
+#include <vector>
+
+#include <aidl/android/media/IResourceManagerClient.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+namespace android {
+
+struct ClientInfo;
+struct ReclaimRequestInfo;
+
+/*
+ * Interface that defines Resource Model.
+ *
+ * This provides an interface that manages the resource model.
+ * The primary functionality of the implementation of this resource model is to:
+ *  1. Define a resource model for a device (or family of devices)
+ *    For example (and not limited to):
+ *      - Can a secure codec coexist with another secure or unsecured codec?
+ *      - How many codecs can coexist?
+ *      - Can one type of codecs (for example avc) coexist with another type of codec
+ *        (for example hevc) independently? OR are they sharing the common
+ *        resource pool?
+ *  2. Provide a list of clients that hold requesting resources.
+ */
+class IResourceModel {
+public:
+    IResourceModel() {}
+
+    virtual ~IResourceModel() {}
+
+    /*
+     * Get a list of all clients that holds the resources requested.
+     * This implementation uses the ResourceModel to select the clients.
+     *
+     * @param[in]  reclaimRequestInfo Information about the Reclaim request
+     * @param[out] clients The list of clients that hold the resources in question.
+     *
+     * @return true if there aren't any resource conflicts and false otherwise.
+     */
+    virtual bool getAllClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                               std::vector<ClientInfo>& clients) = 0;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_IRESOURCEMODEL_H_
diff --git a/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp
new file mode 100644
index 0000000..5b776a6
--- /dev/null
+++ b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp
@@ -0,0 +1,135 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ProcessPriorityReclaimPolicy"
+#include <utils/Log.h>
+
+#include "ResourceTracker.h"
+#include "ResourceManagerService.h"
+#include "ProcessPriorityReclaimPolicy.h"
+
+namespace android {
+
+using aidl::android::media::IResourceManagerClient;
+
+ProcessPriorityReclaimPolicy::ProcessPriorityReclaimPolicy(
+        const std::shared_ptr<ResourceTracker>& resourceTracker)
+    : mResourceTracker(resourceTracker) {
+}
+
+ProcessPriorityReclaimPolicy::~ProcessPriorityReclaimPolicy() {
+}
+
+// Process priority (oom score) based reclaim:
+//   - Find a process with lowest priority (than that of calling process).
+//   - Find the bigegst client (with required resources) from that process.
+bool ProcessPriorityReclaimPolicy::getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                                              const std::vector<ClientInfo>& clients,
+                                              std::vector<ClientInfo>& targetClients) {
+    // NOTE: This is the behavior of the existing reclaim policy.
+    // We can alter it to select more than one client to reclaim from, depending
+    // on the reclaim polocy.
+
+    MediaResource::Type type = reclaimRequestInfo.mResources[0].type;
+    MediaResource::SubType subType = reclaimRequestInfo.mResources[0].subType;
+    // Find one client to reclaim the needed resources from.
+    // 1. Get the priority of the (reclaim) requesting process.
+    int callingPid = reclaimRequestInfo.mCallingPid;
+    int callingPriority = -1;
+    if (!mResourceTracker->getPriority(callingPid, &callingPriority)) {
+        ALOGE("%s: can't get process priority for pid %d", __func__, callingPid);
+        return false;
+    }
+
+    ClientInfo clientInfo;
+    // 2 Look to find the biggest client from the lowest priority process that
+    // has the other resources and with the given primary type.
+    bool found = false;
+    int lowestPriority = -1;
+    MediaResource::SubType primarySubType = subType;
+    for (size_t index = 1; !found && (index < reclaimRequestInfo.mResources.size()); index++) {
+        MediaResource::Type type = reclaimRequestInfo.mResources[index].type;
+        MediaResource::SubType subType = reclaimRequestInfo.mResources[index].subType;
+        found = getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                   type, subType, primarySubType,
+                                                   clients, clientInfo, lowestPriority);
+    }
+    // 3 If we haven't found a client yet, then select the biggest client of primary type.
+    if (!found) {
+        found = getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                   type, subType,
+                                                   MediaResource::SubType::kUnspecifiedSubType,
+                                                   clients, clientInfo, lowestPriority);
+    }
+    // 4 If we haven't found a client yet, then select the biggest client of different type.
+    // This is applicable for code type only.
+    if (!found) {
+        if (type != MediaResource::Type::kSecureCodec &&
+            type != MediaResource::Type::kNonSecureCodec) {
+            return false;
+        }
+        MediaResourceType otherType = (type == MediaResource::Type::kSecureCodec) ?
+            MediaResource::Type::kNonSecureCodec : MediaResource::Type::kSecureCodec;
+        if (!getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                otherType, subType,
+                                                MediaResource::SubType::kUnspecifiedSubType,
+                                                clients, clientInfo, lowestPriority)) {
+            return false;
+        }
+    }
+
+    targetClients.emplace_back(clientInfo);
+    ALOGI("%s: CallingProcess(%d:%d) will reclaim from the lowestPriorityProcess(%d:%d)",
+          __func__, callingPid, callingPriority, clientInfo.mPid, lowestPriority);
+
+    return true;
+}
+
+bool ProcessPriorityReclaimPolicy::getBiggestClientFromLowestPriority(
+        pid_t callingPid,
+        int callingPriority,
+        MediaResource::Type type, MediaResource::SubType subType,
+        MediaResource::SubType primarySubType,
+        const std::vector<ClientInfo>& clients,
+        ClientInfo& targetClient,
+        int& lowestPriority) {
+    // 1. Find the lowest priority process among all the clients with the
+    // requested resource type.
+    int lowestPriorityPid = -1;
+    lowestPriority = -1;
+    if (!mResourceTracker->getLowestPriorityPid(type, subType, primarySubType, clients,
+                                                lowestPriorityPid, lowestPriority)) {
+        ALOGD("%s: can't find a process with lower priority than that of the process[%d:%d]",
+              __func__, callingPid, callingPriority);
+        return false;
+    }
+
+    // 2. Make sure that the priority of the target process is less than
+    // requesting process.
+    if (lowestPriority <= callingPriority) {
+        ALOGD("%s: lowest priority %d vs caller priority %d",
+              __func__, lowestPriority, callingPriority);
+        return false;
+    }
+
+    // 3. Look to find the biggest client from that process for the given resources
+    return mResourceTracker->getBiggestClient(lowestPriorityPid, type, subType,
+                                              clients, targetClient, primarySubType);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h
new file mode 100644
index 0000000..77bf7e1
--- /dev/null
+++ b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h
@@ -0,0 +1,89 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
+#define ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
+
+#include <media/MediaResource.h>
+#include "IReclaimPolicy.h"
+
+namespace android {
+
+class ResourceTracker;
+struct ClientInfo;
+
+/*
+ * Implementation of the Reclaim Policy based on the process priority.
+ *
+ * Find the lowest priority process (lower than the calling/requesting process’s priority)
+ * that has the required resources.
+ * From that process, find the biggest client and return the same for reclaiming.
+ * If there is a codec co-existence policy, that is addressed as below:
+ *   - if these are any conflicting codecs, reclaim all those conflicting clients.
+ * If no conflicting codecs, the reclaim policy will select a client in the order of:
+ *   - Find the biggest client from the lowest priority process that
+ *     has the other resources and with the given primary type.
+ *   - select the biggest client from the lower priority process that
+ *     has the primary type.
+ *   - If it's a codec reclaim request, then:
+ *      - select the biggest client from the lower priority process that
+ *        has the othe type (for example secure for a non-secure and vice versa).
+ */
+class ProcessPriorityReclaimPolicy : public IReclaimPolicy {
+public:
+    ProcessPriorityReclaimPolicy(const std::shared_ptr<ResourceTracker>& resourceTracker);
+
+    virtual ~ProcessPriorityReclaimPolicy();
+
+    /*
+     * Based on the process priority, identify and return a client from the list
+     * of given clients that satisfy the resource requested.
+     *
+     * @param[in]  reclaimRequestInfo Information about the resource request
+     * @param[in]  client List of clients to select from.
+     * @param[out] targetClients Upon success, this will have the list of identified client(s).
+     *
+     * @return true on success, false otherwise
+     */
+    bool getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                    const std::vector<ClientInfo>& clients,
+                    std::vector<ClientInfo>& targetClients) override;
+
+private:
+
+    // Get the biggest client with the given resources from the given list of clients.
+    // The client should belong to lowest possible priority than that of the
+    // calling/requesting process.
+    // returns true on success, false otherwise
+    //
+    bool getBiggestClientFromLowestPriority(
+        pid_t callingPid,
+        int callingPriority,
+        MediaResource::Type type,
+        MediaResource::SubType subType,
+        MediaResource::SubType primarySubType,
+        const std::vector<ClientInfo>& clients,
+        ClientInfo& targetClient,
+        int& lowestPriority);
+
+private:
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index af85772..cd00937 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -43,6 +43,28 @@
     MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
 using stats::media_metrics::\
     MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+
+// Map MediaResourceSubType to stats::media_metrics::CodecType
+inline int32_t getMetricsCodecType(MediaResourceSubType codecType) {
+    switch (codecType) {
+        case MediaResourceSubType::kHwAudioCodec:
+        case MediaResourceSubType::kSwAudioCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+        case MediaResourceSubType::kHwVideoCodec:
+        case MediaResourceSubType::kSwVideoCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+        case MediaResourceSubType::kHwImageCodec:
+        case MediaResourceSubType::kSwImageCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+        case MediaResourceSubType::kUnspecifiedSubType:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+    }
+    return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+}
 
 inline const char* getCodecType(MediaResourceSubType codecType) {
     switch (codecType) {
@@ -229,7 +251,7 @@
          clientConfig.clientInfo.uid,
          clientConfig.id,
          clientConfig.clientInfo.name.c_str(),
-         static_cast<int32_t>(clientConfig.codecType),
+         getMetricsCodecType(clientConfig.codecType),
          clientConfig.isEncoder,
          isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
@@ -311,7 +333,7 @@
          clientConfig.clientInfo.uid,
          clientConfig.id,
          clientConfig.clientInfo.name.c_str(),
-         static_cast<int32_t>(clientConfig.codecType),
+         getMetricsCodecType(clientConfig.codecType),
          clientConfig.isEncoder,
          isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
@@ -425,6 +447,42 @@
 #endif
 }
 
+inline void pushReclaimStats(int32_t callingPid,
+                             int32_t requesterUid,
+                             int requesterPriority,
+                             const std::string& clientName,
+                             int32_t noOfConcurrentCodecs,
+                             int32_t reclaimStatus,
+                             int32_t noOfCodecsReclaimed = 0,
+                             int32_t targetIndex = -1,
+                             int32_t targetClientPid = -1,
+                             int32_t targetClientUid = -1,
+                             int32_t targetPriority = -1) {
+    // Post the pushed atom
+    int result = stats_write(
+        MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+        requesterUid,
+        requesterPriority,
+        clientName.c_str(),
+        noOfConcurrentCodecs,
+        reclaimStatus,
+        noOfCodecsReclaimed,
+        targetIndex,
+        targetClientUid,
+        targetPriority);
+    ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+          "Requester[pid(%d): uid(%d): priority(%d)] "
+          "Codec: [%s] "
+          "No of concurrent codecs: %d "
+          "Reclaim Status: %d "
+          "No of codecs reclaimed: %d "
+          "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+          __func__, callingPid, requesterUid, requesterPriority,
+              clientName.c_str(), noOfConcurrentCodecs,
+          reclaimStatus, noOfCodecsReclaimed,
+          targetIndex, targetClientPid, targetClientUid, targetPriority, result);
+}
+
 void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
                                              const std::vector<int>& priorities,
                                              const std::vector<ClientInfo>& targetClients,
@@ -463,33 +521,34 @@
             MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
       }
     }
+
+    if (targetClients.empty()) {
+        // Push the reclaim atom to stats.
+        pushReclaimStats(callingPid,
+                         requesterUid,
+                         requesterPriority,
+                         clientName,
+                         noOfConcurrentCodecs,
+                         reclaimStatus);
+        return;
+    }
+
     int32_t noOfCodecsReclaimed = targetClients.size();
     int32_t targetIndex = 1;
     for (const ClientInfo& targetClient : targetClients) {
         int targetPriority = priorities[targetIndex];
-        // Post the pushed atom
-        int result = stats_write(
-            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
-            requesterUid,
-            requesterPriority,
-            clientName.c_str(),
-            noOfConcurrentCodecs,
-            reclaimStatus,
-            noOfCodecsReclaimed,
-            targetIndex,
-            targetClient.mUid,
-            targetPriority);
-        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
-              "Requester[pid(%d): uid(%d): priority(%d)] "
-              "Codec: [%s] "
-              "No of concurrent codecs: %d "
-              "Reclaim Status: %d "
-              "No of codecs reclaimed: %d "
-              "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
-              __func__, callingPid, requesterUid, requesterPriority,
-              clientName.c_str(), noOfConcurrentCodecs,
-              reclaimStatus, noOfCodecsReclaimed,
-              targetIndex, targetClient.mPid, targetClient.mUid, targetPriority, result);
+        // Push the reclaim atom to stats.
+        pushReclaimStats(callingPid,
+                         requesterUid,
+                         requesterPriority,
+                         clientName,
+                         noOfConcurrentCodecs,
+                         reclaimStatus,
+                         noOfCodecsReclaimed,
+                         targetIndex,
+                         targetClient.mPid,
+                         targetClient.mUid,
+                         targetPriority);
         targetIndex++;
     }
 }
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index a9bc34b..7a5a89f 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -96,7 +96,32 @@
 };
 
 //
-// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//  Resource Manager Metrics is designed to answer some of the questions like:
+//    - What apps are causing reclaim and what apps are targeted (reclaimed from) in the process?
+//    - which apps use the most codecs and the most codec memory?
+//    - What is the % of total successful reclaims?
+//
+//  Though, it's not in the context of this class, metrics should also answer:
+//    - what % of codec errors are due to codec being reclaimed?
+//    - What % of successful codec creation(start) requires codec reclaims?
+//    - How often codec start fails even after successful reclaim?
+//
+//  The metrics are collected to analyze and understand the codec resource usage
+//  and use that information to help with:
+//    - minimize the no of reclaims
+//    - reduce the codec start delays by minimizing no of times we try to reclaim
+//    - minimize the reclaim errors in codec records
+//
+//  Success metrics for Resource Manager Service could be defined as:
+//   - increase in sucecssful codec creation for the foreground apps
+//   - reduce the number of reclaims for codecs
+//   - reduce the time to create codec
+//
+//  We would like to use this data to come up with a better resource management that would:
+//   - increase the successful codec creation (for all kind of apps)
+//   - decrease the codec errors due to resources
+//
+// This class that maintains concurrent codec counts based on:
 //
 //  1. # of concurrent active codecs (initialized, but aren't released yet) of given
 //     implementation (by codec name) across the system.
@@ -111,7 +136,7 @@
 //  This should help with understanding the (video) memory usage per
 //  application.
 //
-//
+
 class ResourceManagerMetrics {
 public:
     ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 1953237..d37d893 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -29,37 +29,60 @@
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/ProcessInfo.h>
 #include <mediautils/SchedulingPolicyService.h>
+#include <com_android_media_codec_flags.h>
 
-#include "IMediaResourceMonitor.h"
 #include "ResourceManagerMetrics.h"
+#include "ResourceManagerServiceNew.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
+namespace CodecFeatureFlags = com::android::media::codec::flags;
+
 namespace android {
 
-static void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
-    static const char* const kServiceName = "media_resource_monitor";
-    sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
-    if (binder != NULL) {
-        sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
-        for (size_t i = 0; i < resources.size(); ++i) {
-            switch (resources[i].subType) {
-                case MediaResource::SubType::kHwAudioCodec:
-                case MediaResource::SubType::kSwAudioCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
-                    break;
-                case MediaResource::SubType::kHwVideoCodec:
-                case MediaResource::SubType::kSwVideoCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
-                    break;
-                case MediaResource::SubType::kHwImageCodec:
-                case MediaResource::SubType::kSwImageCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
-                    break;
-                case MediaResource::SubType::kUnspecifiedSubType:
-                    break;
-            }
+void ResourceManagerService::getResourceDump(std::string& resourceLog) const {
+    PidResourceInfosMap mapCopy;
+    std::map<int, int> overridePidMapCopy;
+    {
+        std::scoped_lock lock{mLock};
+        mapCopy = mMap;  // Shadow copy, real copy will happen on write.
+        overridePidMapCopy = mOverridePidMap;
+    }
+
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    resourceLog.append("  Processes:\n");
+    for (const auto& [pid, infos] : mapCopy) {
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        resourceLog.append(buffer);
+        int priority = 0;
+        if (getPriority_l(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
+        } else {
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
         }
+        resourceLog.append(buffer);
+
+        for (const auto& [infoKey, info] : infos) {
+            resourceLog.append("      Client:\n");
+            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
+            resourceLog.append(buffer);
+
+            std::string clientName = info.name;
+            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
+            resourceLog.append(buffer);
+
+            const ResourceList& resources = info.resources;
+            resourceLog.append("        Resources:\n");
+            resourceLog.append(resources.toString());
+        }
+    }
+
+    resourceLog.append("  Process Pid override:\n");
+    for (auto it = overridePidMapCopy.begin(); it != overridePidMapCopy.end(); ++it) {
+        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n",
+            it->first, it->second);
+        resourceLog.append(buffer);
     }
 }
 
@@ -75,20 +98,20 @@
         return PERMISSION_DENIED;
     }
 
-    PidResourceInfosMap mapCopy;
     bool supportsMultipleSecureCodecs;
     bool supportsSecureWithNonSecureCodec;
-    std::map<int, int> overridePidMapCopy;
     String8 serviceLog;
     {
         std::scoped_lock lock{mLock};
-        mapCopy = mMap;  // Shadow copy, real copy will happen on write.
         supportsMultipleSecureCodecs = mSupportsMultipleSecureCodecs;
         supportsSecureWithNonSecureCodec = mSupportsSecureWithNonSecureCodec;
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
-        overridePidMapCopy = mOverridePidMap;
     }
 
+    // Get all the resource (and overload pid) logs
+    std::string resourceLog;
+    getResourceDump(resourceLog);
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -100,41 +123,8 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
-    result.append("  Processes:\n");
-    for (const auto& [pid, infos] : mapCopy) {
-        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
-        result.append(buffer);
-        int priority = 0;
-        if (getPriority_l(pid, &priority)) {
-            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
-        } else {
-            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
-        }
-        result.append(buffer);
+    result.append(resourceLog.c_str());
 
-        for (const auto& [infoKey, info] : infos) {
-            result.append("      Client:\n");
-            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
-            result.append(buffer);
-
-            std::string clientName = info.name;
-            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
-            result.append(buffer);
-
-            const ResourceList& resources = info.resources;
-            result.append("        Resources:\n");
-            for (auto it = resources.begin(); it != resources.end(); it++) {
-                snprintf(buffer, SIZE, "          %s\n", toString(it->second).c_str());
-                result.append(buffer);
-            }
-        }
-    }
-    result.append("  Process Pid override:\n");
-    for (auto it = overridePidMapCopy.begin(); it != overridePidMapCopy.end(); ++it) {
-        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n",
-            it->first, it->second);
-        result.append(buffer);
-    }
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
@@ -212,9 +202,35 @@
 std::shared_ptr<ResourceManagerService> ResourceManagerService::Create(
         const sp<ProcessInfoInterface>& processInfo,
         const sp<SystemCallbackInterface>& systemResource) {
-    return ::ndk::SharedRefBase::make<ResourceManagerService>(processInfo, systemResource);
+    std::shared_ptr<ResourceManagerService> service = nullptr;
+    // If codec importance feature is on, create the refactored implementation.
+    if (CodecFeatureFlags::codec_importance()) {
+        service = ::ndk::SharedRefBase::make<ResourceManagerServiceNew>(processInfo,
+                                                                        systemResource);
+    } else {
+        service = ::ndk::SharedRefBase::make<ResourceManagerService>(processInfo,
+                                                                     systemResource);
+    }
+
+    if (service != nullptr) {
+        service->init();
+    }
+
+    return service;
 }
 
+// TEST only function.
+std::shared_ptr<ResourceManagerService> ResourceManagerService::CreateNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource) {
+    std::shared_ptr<ResourceManagerService> service =
+        ::ndk::SharedRefBase::make<ResourceManagerServiceNew>(processInfo, systemResource);
+    service->init();
+    return service;
+}
+
+void ResourceManagerService::init() {}
+
 ResourceManagerService::~ResourceManagerService() {}
 
 void ResourceManagerService::setObserverService(
@@ -296,31 +312,21 @@
 
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
-        const auto resType = std::tuple(res.type, res.subType, res.id);
 
         if (res.value < 0 && res.type != MediaResource::Type::kDrmSession) {
             ALOGW("Ignoring request to remove negative value of non-drm resource");
             continue;
         }
-        if (info.resources.find(resType) == info.resources.end()) {
-            if (res.value <= 0) {
-                // We can't init a new entry with negative value, although it's allowed
-                // to merge in negative values after the initial add.
-                ALOGW("Ignoring request to add new resource entry with value <= 0");
-                continue;
-            }
+        bool isNewEntry = false;
+        if (!info.resources.add(res, &isNewEntry)) {
+            continue;
+        }
+        if (isNewEntry) {
             onFirstAdded(res, info.uid);
-            info.resources[resType] = res;
-        } else {
-            mergeResources(info.resources[resType], res);
         }
+
         // Add it to the list of added resources for observers.
-        auto it = resourceAdded.find(resType);
-        if (it == resourceAdded.end()) {
-            resourceAdded[resType] = res;
-        } else {
-            mergeResources(it->second, res);
-        }
+        resourceAdded.add(res);
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
         info.deathNotifier = DeathNotifier::Create(
@@ -367,31 +373,22 @@
     ResourceList resourceRemoved;
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
-        const auto resType = std::tuple(res.type, res.subType, res.id);
 
         if (res.value < 0) {
             ALOGW("Ignoring request to remove negative value of resource");
             continue;
         }
-        // ignore if we don't have it
-        if (info.resources.find(resType) != info.resources.end()) {
-            MediaResourceParcel &resource = info.resources[resType];
+
+        long removedEntryValue = -1;
+        if (info.resources.remove(res, &removedEntryValue)) {
             MediaResourceParcel actualRemoved = res;
-            if (resource.value > res.value) {
-                resource.value -= res.value;
-            } else {
+            if (removedEntryValue != -1) {
                 onLastRemoved(res, info.uid);
-                actualRemoved.value = resource.value;
-                info.resources.erase(resType);
+                actualRemoved.value = removedEntryValue;
             }
 
             // Add it to the list of removed resources for observers.
-            auto it = resourceRemoved.find(resType);
-            if (it == resourceRemoved.end()) {
-                resourceRemoved[resType] = actualRemoved;
-            } else {
-                mergeResources(it->second, actualRemoved);
-            }
+            resourceRemoved.add(actualRemoved);
         }
     }
     if (mObserverService != nullptr && !resourceRemoved.empty()) {
@@ -434,8 +431,8 @@
     }
 
     const ResourceInfo& info = foundClient->second;
-    for (auto it = info.resources.begin(); it != info.resources.end(); it++) {
-        onLastRemoved(it->second, info.uid);
+    for (const MediaResourceParcel& res : info.resources.getResources()) {
+        onLastRemoved(res, info.uid);
     }
 
     // Since this client has been removed, update the metrics collector.
@@ -472,117 +469,137 @@
     }
 }
 
-Status ResourceManagerService::reclaimResource(const ClientInfoParcel& clientInfo,
-        const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
+bool ResourceManagerService::getTargetClients(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) {
     int32_t callingPid = clientInfo.pid;
-    std::string clientName = clientInfo.name;
-    String8 log = String8::format("reclaimResource(callingPid %d, uid %d resources %s)",
-            callingPid, clientInfo.uid, getString(resources).c_str());
-    mServiceLog->add(log);
-    *_aidl_return = false;
-
-    std::vector<ClientInfo> targetClients;
-    {
-        std::scoped_lock lock{mLock};
-        if (!mProcessInfo->isPidTrusted(callingPid)) {
-            pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
-            ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
-                    callingPid, actualCallingPid);
-            callingPid = actualCallingPid;
+    int64_t clientId = clientInfo.id;
+    std::scoped_lock lock{mLock};
+    if (!mProcessInfo->isPidTrusted(callingPid)) {
+        pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+                callingPid, actualCallingPid);
+        callingPid = actualCallingPid;
+    }
+    const MediaResourceParcel *secureCodec = NULL;
+    const MediaResourceParcel *nonSecureCodec = NULL;
+    const MediaResourceParcel *graphicMemory = NULL;
+    const MediaResourceParcel *drmSession = NULL;
+    for (size_t i = 0; i < resources.size(); ++i) {
+        switch (resources[i].type) {
+            case MediaResource::Type::kSecureCodec:
+                secureCodec = &resources[i];
+                break;
+            case MediaResource::Type::kNonSecureCodec:
+                nonSecureCodec = &resources[i];
+                break;
+            case MediaResource::Type::kGraphicMemory:
+                graphicMemory = &resources[i];
+                break;
+            case MediaResource::Type::kDrmSession:
+                drmSession = &resources[i];
+                break;
+            default:
+                break;
         }
-        const MediaResourceParcel *secureCodec = NULL;
-        const MediaResourceParcel *nonSecureCodec = NULL;
-        const MediaResourceParcel *graphicMemory = NULL;
-        const MediaResourceParcel *drmSession = NULL;
-        for (size_t i = 0; i < resources.size(); ++i) {
-            switch (resources[i].type) {
-                case MediaResource::Type::kSecureCodec:
-                    secureCodec = &resources[i];
-                    break;
-                case MediaResource::Type::kNonSecureCodec:
-                    nonSecureCodec = &resources[i];
-                    break;
-                case MediaResource::Type::kGraphicMemory:
-                    graphicMemory = &resources[i];
-                    break;
-                case MediaResource::Type::kDrmSession:
-                    drmSession = &resources[i];
-                    break;
-                default:
-                    break;
+    }
+
+    // first pass to handle secure/non-secure codec conflict
+    if (secureCodec != NULL) {
+        MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
+                                          .subType = secureCodec->subType};
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
+        if (!mSupportsMultipleSecureCodecs) {
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
             }
         }
-
-        // first pass to handle secure/non-secure codec conflict
-        if (secureCodec != NULL) {
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kNonSecureCodec;
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
+            }
+        }
+    }
+    if (nonSecureCodec != NULL) {
+        if (!mSupportsSecureWithNonSecureCodec) {
             MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
-                                              .subType = secureCodec->subType};
-            ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
-            if (!mSupportsMultipleSecureCodecs) {
-                if (!getAllClients_l(resourceRequestInfo, targetClients)) {
-                    return Status::ok();
-                }
-            }
-            if (!mSupportsSecureWithNonSecureCodec) {
-                mediaResource.type = MediaResource::Type::kNonSecureCodec;
-                if (!getAllClients_l(resourceRequestInfo, targetClients)) {
-                    return Status::ok();
-                }
-            }
-        }
-        if (nonSecureCodec != NULL) {
-            if (!mSupportsSecureWithNonSecureCodec) {
-                MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
-                                                  .subType = nonSecureCodec->subType};
-                ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
-                if (!getAllClients_l(resourceRequestInfo, targetClients)) {
-                    return Status::ok();
-                }
-            }
-        }
-
-        if (drmSession != NULL) {
-            ResourceRequestInfo resourceRequestInfo{callingPid, drmSession};
-            getClientForResource_l(resourceRequestInfo, targetClients);
-            if (targetClients.size() == 0) {
-                return Status::ok();
-            }
-        }
-
-        if (targetClients.size() == 0 && graphicMemory != nullptr) {
-            // if no secure/non-secure codec conflict, run second pass to handle other resources.
-            ResourceRequestInfo resourceRequestInfo{callingPid, graphicMemory};
-            getClientForResource_l(resourceRequestInfo, targetClients);
-        }
-
-        if (targetClients.size() == 0) {
-            // if we are here, run the third pass to free one codec with the same type.
-            if (secureCodec != nullptr) {
-                ResourceRequestInfo resourceRequestInfo{callingPid, secureCodec};
-                getClientForResource_l(resourceRequestInfo, targetClients);
-            }
-            if (nonSecureCodec != nullptr) {
-                ResourceRequestInfo resourceRequestInfo{callingPid, nonSecureCodec};
-                getClientForResource_l(resourceRequestInfo, targetClients);
-            }
-        }
-
-        if (targetClients.size() == 0) {
-            // if we are here, run the fourth pass to free one codec with the different type.
-            if (secureCodec != nullptr) {
-                MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
-                ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
-                getClientForResource_l(resourceRequestInfo, targetClients);
-            }
-            if (nonSecureCodec != nullptr) {
-                MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
-                ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
-                getClientForResource_l(resourceRequestInfo, targetClients);
+                                              .subType = nonSecureCodec->subType};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
             }
         }
     }
 
-    *_aidl_return = reclaimUnconditionallyFrom(targetClients);
+    if (drmSession != NULL) {
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, drmSession};
+        getClientForResource_l(resourceRequestInfo, targetClients);
+        if (targetClients.size() == 0) {
+            return false;
+        }
+    }
+
+    if (targetClients.size() == 0 && graphicMemory != nullptr) {
+        // if no secure/non-secure codec conflict, run second pass to handle other resources.
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, graphicMemory};
+        getClientForResource_l(resourceRequestInfo, targetClients);
+    }
+
+    if (targetClients.size() == 0) {
+        // if we are here, run the third pass to free one codec with the same type.
+        if (secureCodec != nullptr) {
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, secureCodec};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+        if (nonSecureCodec != nullptr) {
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, nonSecureCodec};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+    }
+
+    if (targetClients.size() == 0) {
+        // if we are here, run the fourth pass to free one codec with the different type.
+        if (secureCodec != nullptr) {
+            MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+        if (nonSecureCodec != nullptr) {
+            MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+    }
+
+    return !targetClients.empty();
+}
+
+Status ResourceManagerService::reclaimResource(const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
+    std::string clientName = clientInfo.name;
+    String8 log = String8::format("reclaimResource(callingPid %d, uid %d resources %s)",
+            clientInfo.pid, clientInfo.uid, getString(resources).c_str());
+    mServiceLog->add(log);
+    *_aidl_return = false;
+
+    // Check if there are any resources to be reclaimed before processing.
+    if (resources.empty()) {
+        // Invalid reclaim request. So no need to log.
+        return Status::ok();
+    }
+
+    std::vector<ClientInfo> targetClients;
+    if (getTargetClients(clientInfo, resources, targetClients)) {
+        // Reclaim all the target clients.
+        *_aidl_return = reclaimUnconditionallyFrom(targetClients);
+    } else {
+        // No clients to reclaim from.
+        ALOGI("%s: There aren't any clients to reclaim from", __func__);
+        // We need to log this failed reclaim as "no clients to reclaim from".
+        targetClients.clear();
+    }
 
     // Log Reclaim Pushed Atom to statsd
     pushReclaimAtom(clientInfo, targetClients, *_aidl_return);
@@ -607,7 +624,7 @@
     mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, targetClients, reclaimed);
 }
 
-std::shared_ptr<IResourceManagerClient> ResourceManagerService::getClient(
+std::shared_ptr<IResourceManagerClient> ResourceManagerService::getClient_l(
         int pid, const int64_t& clientId) const {
     std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
     if (found == mMap.end()) {
@@ -625,7 +642,7 @@
     return foundClient->second.client;
 }
 
-bool ResourceManagerService::removeClient(int pid, const int64_t& clientId) {
+bool ResourceManagerService::removeClient_l(int pid, const int64_t& clientId) {
     std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
     if (found == mMap.end()) {
         ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
@@ -652,8 +669,11 @@
     int64_t failedClientId = -1;
     int32_t failedClientPid = -1;
     for (const ClientInfo& targetClient : targetClients) {
-        std::shared_ptr<IResourceManagerClient> client = getClient(
-            targetClient.mPid, targetClient.mClientId);
+        std::shared_ptr<IResourceManagerClient> client = nullptr;
+        {
+            std::scoped_lock lock{mLock};
+            client = getClient_l(targetClient.mPid, targetClient.mClientId);
+        }
         if (client == nullptr) {
             // skip already released clients.
             continue;
@@ -675,7 +695,7 @@
 
     {
         std::scoped_lock lock{mLock};
-        bool found = removeClient(failedClientPid, failedClientId);
+        bool found = removeClient_l(failedClientPid, failedClientId);
         if (found) {
             ALOGW("Failed to reclaim resources from client with pid %d", failedClientPid);
         } else {
@@ -686,6 +706,16 @@
     return false;
 }
 
+bool ResourceManagerService::overridePid_l(int32_t originalPid, int32_t newPid) {
+    mOverridePidMap.erase(originalPid);
+    if (newPid != -1) {
+        mOverridePidMap.emplace(originalPid, newPid);
+        return true;
+    }
+
+    return false;
+}
+
 Status ResourceManagerService::overridePid(int originalPid, int newPid) {
     String8 log = String8::format("overridePid(originalPid %d, newPid %d)",
             originalPid, newPid);
@@ -705,9 +735,7 @@
 
     {
         std::scoped_lock lock{mLock};
-        mOverridePidMap.erase(originalPid);
-        if (newPid != -1) {
-            mOverridePidMap.emplace(originalPid, newPid);
+        if (overridePid_l(originalPid, newPid)) {
             mResourceManagerMetrics->addPid(newPid);
         }
     }
@@ -715,6 +743,29 @@
     return Status::ok();
 }
 
+bool ResourceManagerService::overrideProcessInfo_l(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    removeProcessInfoOverride_l(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return false;
+    }
+
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
+    auto deathNotifier = DeathNotifier::Create(
+        client, ref<ResourceManagerService>(), clientInfo, true);
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
+    return true;
+}
+
 Status ResourceManagerService::overrideProcessInfo(
         const std::shared_ptr<IResourceManagerClient>& client, int pid, int procState,
         int oomScore) {
@@ -735,23 +786,12 @@
     }
 
     std::scoped_lock lock{mLock};
-    removeProcessInfoOverride_l(pid);
-
-    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+    if (!overrideProcessInfo_l(client, pid, procState, oomScore)) {
         // Override value is rejected by ProcessInfo.
         return Status::fromServiceSpecificError(BAD_VALUE);
     }
-
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
-                                .uid = 0,
-                                .id = 0,
-                                .name = "<unknown client>"};
-    auto deathNotifier = DeathNotifier::Create(
-        client, ref<ResourceManagerService>(), clientInfo, true);
-
-    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
-
     return Status::ok();
+
 }
 
 void ResourceManagerService::removeProcessInfoOverride(int pid) {
@@ -857,11 +897,12 @@
     return Status::ok();
 }
 
-bool ResourceManagerService::getPriority_l(int pid, int* priority) {
+bool ResourceManagerService::getPriority_l(int pid, int* priority) const {
     int newPid = pid;
 
-    if (mOverridePidMap.find(pid) != mOverridePidMap.end()) {
-        newPid = mOverridePidMap[pid];
+    std::map<int, int>::const_iterator found = mOverridePidMap.find(pid);
+    if (found != mOverridePidMap.end()) {
+        newPid = found->second;
         ALOGD("getPriority_l: use override pid %d instead original pid %d",
                 newPid, pid);
     }
@@ -877,6 +918,11 @@
 
     for (auto& [pid, infos] : mMap) {
         for (const auto& [id, info] : infos) {
+            if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+                ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+                      __func__, id);
+                continue;
+            }
             if (hasResourceType(type, subType, info.resources)) {
                 if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, pid)) {
                     // some higher/equal priority process owns the resource,
@@ -1000,8 +1046,7 @@
         if (pendingRemovalOnly && !info.pendingRemoval) {
             continue;
         }
-        for (auto it = resources.begin(); it != resources.end(); it++) {
-            const MediaResourceParcel &resource = it->second;
+        for (const MediaResourceParcel& resource : resources.getResources()) {
             if (hasResourceType(type, subType, resource)) {
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
@@ -1053,4 +1098,8 @@
     return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
 }
 
+void ResourceManagerService::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+    mResourceManagerMetrics->notifyClientReleased(clientInfo);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index e22a6b3..dc1600a 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -73,7 +73,8 @@
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
 
-    void setObserverService(const std::shared_ptr<ResourceObserverService>& observerService);
+    virtual void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
@@ -103,8 +104,6 @@
 
     Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
 
-    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
-
     Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
 
     Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
@@ -113,88 +112,146 @@
 
     Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
 
+protected:
+    // To get notifications when a resource is added for the first time.
+    void onFirstAdded(const MediaResourceParcel& res, uid_t uid);
+    // To get notifications when a resource has been removed at last.
+    void onLastRemoved(const MediaResourceParcel& res, uid_t uid);
+
+    // Reclaims resources from |clients|. Returns true if reclaim succeeded
+    // for all clients.
+    bool reclaimUnconditionallyFrom(const std::vector<ClientInfo>& targetClients);
+
+    // A helper function that returns true if the callingPid has higher priority than pid.
+    // Returns false otherwise.
+    bool isCallingPriorityHigher_l(int callingPid, int pid);
+
+    // To notify the metrics about client being released.
+    void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+    virtual Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
+
 private:
     friend class ResourceManagerServiceTest;
     friend class ResourceManagerServiceTestBase;
     friend class DeathNotifier;
     friend class OverrideProcessInfoDeathNotifier;
 
-    // Reclaims resources from |clients|. Returns true if reclaim succeeded
-    // for all clients.
-    bool reclaimUnconditionallyFrom(
-        const std::vector<ClientInfo>& targetClients);
-
-    // Gets the list of all the clients who own the specified resource type.
-    // Returns false if any client belongs to a process with higher priority than the
-    // calling process. The clients will remain unchanged if returns false.
-    bool getAllClients_l(const ResourceRequestInfo& resourceRequestInfo,
-                         std::vector<ClientInfo>& clientsInfo);
-
-    // Gets the client who owns specified resource type from lowest possible priority process.
-    // Returns false if the calling process priority is not higher than the lowest process
-    // priority. The client will remain unchanged if returns false.
-    bool getLowestPriorityBiggestClient_l(
-        const ResourceRequestInfo& resourceRequestInfo,
-        ClientInfo& clientInfo);
-
     // Gets the client who owns biggest piece of specified resource type from pid.
     // Returns false with no change to client if there are no clients holding resources of this
     // type.
-    bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
+    bool getBiggestClient_l(int pid, MediaResource::Type type,
+                            MediaResource::SubType subType,
                             ClientInfo& clientsInfo,
                             bool pendingRemovalOnly = false);
-    // Same method as above, but with pendingRemovalOnly as true.
+
+    // A helper function that gets the biggest clients of the process pid that
+    // is marked to be (pending) removed and has the needed resources.
     bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
                                           MediaResource::SubType subType,
                                           ClientInfo& clientsInfo);
 
-    // A helper function that returns true if the callingPid has higher priority than pid.
-    // Returns false otherwise.
-    bool isCallingPriorityHigher_l(int callingPid, int pid);
-
-    // A helper function basically calls getLowestPriorityBiggestClient_l and adds
-    // the result client to the given Vector.
+    // From the list of clients, pick/select client(s) based on the reclaim policy.
     void getClientForResource_l(const ResourceRequestInfo& resourceRequestInfo,
                                 std::vector<ClientInfo>& clientsInfo);
-
-    void onFirstAdded(const MediaResourceParcel& res, uid_t uid);
-    void onLastRemoved(const MediaResourceParcel& res, uid_t uid);
-
-    // Get priority from process's pid
-    bool getPriority_l(int pid, int* priority);
-
-    void removeProcessInfoOverride(int pid);
-
-    void removeProcessInfoOverride_l(int pid);
-
+    // A helper function that pushes Reclaim Atom (for metric collection).
     void pushReclaimAtom(const ClientInfoParcel& clientInfo,
                          const std::vector<ClientInfo>& targetClients,
                          bool reclaimed);
 
-    // Get the client for given pid and the clientId from the map
-    std::shared_ptr<IResourceManagerClient> getClient(int pid, const int64_t& clientId) const;
+    // Remove the override info for the given process
+    void removeProcessInfoOverride_l(int pid);
 
-    // Remove the client for given pid and the clientId from the map
-    bool removeClient(int pid, const int64_t& clientId);
+    // Eventually we want to phase out this implementation of IResourceManagerService
+    // (ResourceManagerService) and replace that with the newer implementation
+    // (ResourceManagerServiceNew).
+    // So, marking the following methods as private virtual and for the newer implementation
+    // to override is the easiest way to maintain both implementation.
 
-    // The following utility functions are used only for testing by ResourceManagerServiceTest
+    // Initializes the internal state of the ResourceManagerService
+    virtual void init();
+
+    // Gets the list of all the clients who own the list of specified resource type
+    // and satisfy the resource model and the reclaim policy.
+    virtual bool getTargetClients(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients);
+
+    // Gets the list of all the clients who own the specified resource type.
+    // Returns false if any client belongs to a process with higher priority than the
+    // calling process. The clients will remain unchanged if returns false.
+    virtual bool getAllClients_l(const ResourceRequestInfo& resourceRequestInfo,
+                                 std::vector<ClientInfo>& clientsInfo);
+
+    // Gets the client who owns specified resource type from lowest possible priority process.
+    // Returns false if the calling process priority is not higher than the lowest process
+    // priority. The client will remain unchanged if returns false.
+    virtual bool getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo);
+
+    // override the pid of given process
+    virtual bool overridePid_l(int32_t originalPid, int32_t newPid);
+
+    // override the process info of given process
+    virtual bool overrideProcessInfo_l(const std::shared_ptr<IResourceManagerClient>& client,
+                                       int pid, int procState, int oomScore);
+
+    // Get priority from process's pid
+    virtual bool getPriority_l(int pid, int* priority) const;
+
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
-    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType,
-                                int* lowestPriorityPid, int* lowestPriority);
+    virtual bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType,
+                                        int* lowestPriorityPid, int* lowestPriority);
+
+    // Removes the pid from the override map.
+    virtual void removeProcessInfoOverride(int pid);
+
+    // Get the client for given pid and the clientId from the map
+    virtual std::shared_ptr<IResourceManagerClient> getClient_l(
+        int pid, const int64_t& clientId) const;
+
+    // Remove the client for given pid and the clientId from the map
+    virtual bool removeClient_l(int pid, const int64_t& clientId);
+
+    // Get all the resource status for dump
+    virtual void getResourceDump(std::string& resourceLog) const;
+
+    // The following utility functions are used only for testing by ResourceManagerServiceTest
+    // START: TEST only functions
     // Get the peak concurrent pixel count (associated with the video codecs) for the process.
     long getPeakConcurrentPixelCount(int pid) const;
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
+    // To create object of type ResourceManagerServiceNew
+    static std::shared_ptr<ResourceManagerService> CreateNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource);
+    // Returns a unmodifiable reference to the internal resource state as a map
+    virtual const std::map<int, ResourceInfos>& getResourceMap() const {
+        return mMap;
+    }
+    // enable/disable process priority based reclaim and client importance based reclaim
+    virtual void setReclaimPolicy(bool processPriority, bool clientImportance) {
+        // Implemented by the refactored/new RMService
+        (void)processPriority;
+        (void)clientImportance;
+    }
+    // END: TEST only functions
 
+protected:
     mutable std::mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
     sp<ServiceLog> mServiceLog;
-    PidResourceInfosMap mMap;
     bool mSupportsMultipleSecureCodecs;
     bool mSupportsSecureWithNonSecureCodec;
     int32_t mCpuBoostCount;
+
+private:
+    PidResourceInfosMap mMap;
     struct ProcessInfoOverride {
         std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
         std::shared_ptr<IResourceManagerClient> client;
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
new file mode 100644
index 0000000..0a0a8f4
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
@@ -0,0 +1,388 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerServiceNew"
+#include <utils/Log.h>
+#include <binder/IPCThreadState.h>
+#include <mediautils/ProcessInfo.h>
+
+#include "DefaultResourceModel.h"
+#include "ClientImportanceReclaimPolicy.h"
+#include "ProcessPriorityReclaimPolicy.h"
+#include "ResourceManagerServiceNew.h"
+#include "ResourceTracker.h"
+#include "ServiceLog.h"
+
+namespace android {
+
+ResourceManagerServiceNew::ResourceManagerServiceNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource) :
+  ResourceManagerService(processInfo, systemResource) {}
+
+ResourceManagerServiceNew::~ResourceManagerServiceNew() {}
+
+void ResourceManagerServiceNew::init() {
+    // Create the Resource Tracker
+    mResourceTracker = std::make_shared<ResourceTracker>(ref<ResourceManagerServiceNew>(),
+                                                         mProcessInfo);
+    setUpResourceModels();
+    setUpReclaimPolicies();
+}
+
+void ResourceManagerServiceNew::setUpResourceModels() {
+    std::scoped_lock lock{mLock};
+    // Create/Configure the default resource model.
+    if (mDefaultResourceModel == nullptr) {
+        mDefaultResourceModel = std::make_unique<DefaultResourceModel>(
+                mResourceTracker,
+                mSupportsMultipleSecureCodecs,
+                mSupportsSecureWithNonSecureCodec);
+    } else {
+        DefaultResourceModel* resourceModel =
+            static_cast<DefaultResourceModel*>(mDefaultResourceModel.get());
+        resourceModel->config(mSupportsMultipleSecureCodecs, mSupportsSecureWithNonSecureCodec);
+    }
+}
+
+void ResourceManagerServiceNew::setUpReclaimPolicies() {
+    mReclaimPolicies.clear();
+    // Add Reclaim policies based on:
+    // - the Process priority (oom score)
+    // - the client/codec importance.
+    setReclaimPolicy(true /* processPriority */, true /* clientImportance */);
+}
+
+Status ResourceManagerServiceNew::config(const std::vector<MediaResourcePolicyParcel>& policies) {
+    Status status = ResourceManagerService::config(policies);
+    // Change in the config dictates update to the resource model.
+    setUpResourceModels();
+    return status;
+}
+
+void ResourceManagerServiceNew::setObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    ResourceManagerService::setObserverService(observerService);
+    mResourceTracker->setResourceObserverService(observerService);
+}
+
+Status ResourceManagerServiceNew::addResource(
+        const ClientInfoParcel& clientInfo,
+        const std::shared_ptr<IResourceManagerClient>& client,
+        const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("addResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).c_str());
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->addResource(clientInfo, client, resources);
+    notifyResourceGranted(pid, resources);
+
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeResource(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).c_str());
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->removeResource(clientInfo, resources);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeClient(const ClientInfoParcel& clientInfo) {
+    removeResource(clientInfo, true /*checkValid*/);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeResource(const ClientInfoParcel& clientInfo,
+                                                 bool checkValid) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld)",
+            pid, uid, (long long) clientId);
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    if (mResourceTracker->removeResource(clientInfo, checkValid)) {
+        notifyClientReleased(clientInfo);
+    }
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::reclaimResource(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        bool* _aidl_return) {
+    return ResourceManagerService::reclaimResource(clientInfo, resources, _aidl_return);
+}
+
+bool ResourceManagerServiceNew::overridePid_l(int32_t originalPid, int32_t newPid) {
+    return mResourceTracker->overridePid(originalPid, newPid);
+}
+
+Status ResourceManagerServiceNew::overridePid(int originalPid, int newPid) {
+    return ResourceManagerService::overridePid(originalPid, newPid);
+}
+
+bool ResourceManagerServiceNew::overrideProcessInfo_l(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    return mResourceTracker->overrideProcessInfo(client, pid, procState, oomScore);
+}
+
+Status ResourceManagerServiceNew::overrideProcessInfo(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    return ResourceManagerService::overrideProcessInfo(client, pid, procState, oomScore);
+}
+
+void ResourceManagerServiceNew::removeProcessInfoOverride(int pid) {
+    std::scoped_lock lock{mLock};
+
+    mResourceTracker->removeProcessInfoOverride(pid);
+}
+
+Status ResourceManagerServiceNew::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format(
+            "markClientForPendingRemoval(pid %d, clientId %lld)",
+            pid, (long long) clientId);
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->markClientForPendingRemoval(clientInfo);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::reclaimResourcesFromClientsPendingRemoval(int32_t pid) {
+    String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
+    mServiceLog->add(log);
+
+    std::vector<ClientInfo> targetClients;
+    {
+        std::scoped_lock lock{mLock};
+        mResourceTracker->getClientsMarkedPendingRemoval(pid, targetClients);
+    }
+
+    if (!targetClients.empty()) {
+        reclaimUnconditionallyFrom(targetClients);
+    }
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    return ResourceManagerService::notifyClientCreated(clientInfo);
+}
+
+Status ResourceManagerServiceNew::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    return ResourceManagerService::notifyClientStarted(clientConfig);
+}
+
+Status ResourceManagerServiceNew::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    return ResourceManagerService::notifyClientStopped(clientConfig);
+}
+
+Status ResourceManagerServiceNew::notifyClientConfigChanged(
+        const ClientConfigParcel& clientConfig) {
+    {
+        // Update the ResourceTracker about the change in the configuration.
+        std::scoped_lock lock{mLock};
+        mResourceTracker->updateResource(clientConfig.clientInfo);
+    }
+    return ResourceManagerService::notifyClientConfigChanged(clientConfig);
+}
+
+void ResourceManagerServiceNew::getResourceDump(std::string& resourceLog) const {
+    std::scoped_lock lock{mLock};
+    mResourceTracker->dump(resourceLog);
+}
+
+binder_status_t ResourceManagerServiceNew::dump(int fd, const char** args, uint32_t numArgs) {
+    return ResourceManagerService::dump(fd, args, numArgs);
+}
+
+bool ResourceManagerServiceNew::getTargetClients(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) {
+    int32_t callingPid = clientInfo.pid;
+    std::scoped_lock lock{mLock};
+    if (!mProcessInfo->isPidTrusted(callingPid)) {
+        pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+                callingPid, actualCallingPid);
+        callingPid = actualCallingPid;
+    }
+
+    // Use the Resource Model to get a list of all the clients that hold the
+    // needed/requested resources.
+    uint32_t callingImportance = std::max(0, clientInfo.importance);
+    ReclaimRequestInfo reclaimRequestInfo{callingPid, clientInfo.id, callingImportance, resources};
+    std::vector<ClientInfo> clients;
+    if (!mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients)) {
+        if (clients.empty()) {
+            ALOGI("%s: There aren't any clients with given resources. Nothing to reclaim",
+                  __func__);
+            return false;
+        }
+        // Since there was a conflict, we need to reclaim all clients.
+        targetClients = std::move(clients);
+    } else {
+        // Select a client among those have the needed resources.
+        getClientForResource_l(reclaimRequestInfo, clients, targetClients);
+    }
+    return !targetClients.empty();
+}
+
+void ResourceManagerServiceNew::getClientForResource_l(
+        const ReclaimRequestInfo& reclaimRequestInfo,
+        const std::vector<ClientInfo>& clients,
+        std::vector<ClientInfo>& targetClients) {
+    int callingPid = reclaimRequestInfo.mCallingPid;
+
+    // Before looking into other processes, check if we have clients marked for
+    // pending removal in the same process.
+    ClientInfo targetClient;
+    for (const MediaResourceParcel& resource : reclaimRequestInfo.mResources) {
+        if (mResourceTracker->getBiggestClientPendingRemoval(callingPid, resource.type,
+                                                             resource.subType, targetClient)) {
+            targetClients.emplace_back(targetClient);
+            return;
+        }
+    }
+
+    // Run through all the reclaim policies until a client to reclaim from is identified.
+    for (std::unique_ptr<IReclaimPolicy>& reclaimPolicy : mReclaimPolicies) {
+        if (reclaimPolicy->getClients(reclaimRequestInfo, clients, targetClients)) {
+            return;
+        }
+    }
+}
+
+bool ResourceManagerServiceNew::getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    if (resourceRequestInfo.mResource == nullptr) {
+        return false;
+    }
+
+    // Use the DefaultResourceModel to get all the clients with the resources requested.
+    std::vector<MediaResourceParcel> resources{*resourceRequestInfo.mResource};
+    ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid,
+                                          resourceRequestInfo.mClientId,
+                                          0, // default importance
+                                          resources};
+    std::vector<ClientInfo> clients;
+    mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients);
+
+    // Use the ProcessPriorityReclaimPolicy to select a client to reclaim from.
+    std::unique_ptr<IReclaimPolicy> reclaimPolicy
+        = std::make_unique<ProcessPriorityReclaimPolicy>(mResourceTracker);
+    std::vector<ClientInfo> targetClients;
+    if (reclaimPolicy->getClients(reclaimRequestInfo, clients, targetClients)) {
+        if (!targetClients.empty()) {
+            clientInfo = targetClients[0];
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool ResourceManagerServiceNew::getPriority_l(int pid, int* priority) const {
+    return mResourceTracker->getPriority(pid, priority);
+}
+
+bool ResourceManagerServiceNew::getLowestPriorityPid_l(
+        MediaResource::Type type, MediaResource::SubType subType,
+        int* lowestPriorityPid, int* lowestPriority) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    return mResourceTracker->getLowestPriorityPid(type, subType,
+                                                  *lowestPriorityPid,
+                                                  *lowestPriority);
+}
+
+bool ResourceManagerServiceNew::getAllClients_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        std::vector<ClientInfo>& clientsInfo) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    // Get the list of all clients that has requested resources.
+    std::vector<ClientInfo> clients;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    // Check is there any high priority process holding up the resources already.
+    for (const ClientInfo& info : clients) {
+        if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, info.mPid)) {
+            // some higher/equal priority process owns the resource,
+            // this request can't be fulfilled.
+            ALOGE("%s: can't reclaim resource %s from pid %d", __func__, asString(type), info.mPid);
+            return false;
+        }
+        clientsInfo.emplace_back(info);
+    }
+    if (clientsInfo.size() == 0) {
+        ALOGV("%s: didn't find any resource %s", __func__, asString(type));
+    }
+    return true;
+}
+
+std::shared_ptr<IResourceManagerClient> ResourceManagerServiceNew::getClient_l(
+        int pid, const int64_t& clientId) const {
+    return mResourceTracker->getClient(pid, clientId);
+}
+
+bool ResourceManagerServiceNew::removeClient_l(int pid, const int64_t& clientId) {
+    return mResourceTracker->removeClient(pid, clientId);
+}
+
+const std::map<int, ResourceInfos>& ResourceManagerServiceNew::getResourceMap() const {
+    return mResourceTracker->getResourceMap();
+}
+
+void ResourceManagerServiceNew::setReclaimPolicy(bool processPriority, bool clientImportance) {
+    mReclaimPolicies.clear();
+    if (processPriority) {
+        // Process priority (oom score) as the Default reclaim policy.
+        mReclaimPolicies.push_back(std::make_unique<ProcessPriorityReclaimPolicy>(
+            mResourceTracker));
+    }
+    if (clientImportance) {
+        mReclaimPolicies.push_back(std::make_unique<ClientImportanceReclaimPolicy>(
+            mResourceTracker));
+    }
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.h b/services/mediaresourcemanager/ResourceManagerServiceNew.h
new file mode 100644
index 0000000..0599936
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.h
@@ -0,0 +1,174 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
+#define ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+class IReclaimPolicy;
+class IResourceModel;
+class ResourceTracker;
+
+//
+// A newer implementation of IResourceManagerService, which
+// eventually will replace the older implementation in ResourceManagerService.
+//
+// To make the transition easier, this implementation overrides the
+// private virtual methods from ResourceManagerService.
+//
+// This implementation is devised to abstract and integrate:
+//   - resources into an independent abstraction
+//   - resource model as a separate interface (and implementation)
+//   - reclaim policy as a separate interface (and implementation)
+//
+class ResourceManagerServiceNew : public ResourceManagerService {
+public:
+
+    explicit ResourceManagerServiceNew(const sp<ProcessInfoInterface>& processInfo,
+                                       const sp<SystemCallbackInterface>& systemResource);
+    virtual ~ResourceManagerServiceNew();
+
+    // IResourceManagerService interface
+    Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
+
+    Status addResource(const ClientInfoParcel& clientInfo,
+                       const std::shared_ptr<IResourceManagerClient>& client,
+                       const std::vector<MediaResourceParcel>& resources) override;
+
+    Status removeResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
+
+    Status removeClient(const ClientInfoParcel& clientInfo) override;
+
+    Status reclaimResource(const ClientInfoParcel& clientInfo,
+                           const std::vector<MediaResourceParcel>& resources,
+                           bool* _aidl_return) override;
+
+    Status overridePid(int32_t originalPid, int32_t newPid) override;
+
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                               int32_t pid, int32_t procState, int32_t oomScore) override;
+
+    Status markClientForPendingRemoval(const ClientInfoParcel& clientInfo) override;
+
+    Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
+
+    Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+    Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
+
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+    friend class ResourceTracker;
+
+private:
+
+    // Set up the Resource models.
+    void setUpResourceModels();
+
+    // Set up the Reclaim Policies.
+    void setUpReclaimPolicies();
+
+    // From the list of clients, pick/select client(s) based on the reclaim policy.
+    void getClientForResource_l(
+        const ReclaimRequestInfo& reclaimRequestInfo,
+        const std::vector<ClientInfo>& clients,
+        std::vector<ClientInfo>& targetClients);
+
+    // Initializes the internal state of the ResourceManagerService
+    void init() override;
+
+    void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService) override;
+
+    // Gets the list of all the clients who own the specified resource type.
+    // Returns false if any client belongs to a process with higher priority than the
+    // calling process. The clients will remain unchanged if returns false.
+    bool getTargetClients(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) override;
+
+    // Removes the pid from the override map.
+    void removeProcessInfoOverride(int pid) override;
+
+    // override the pid of given process
+    bool overridePid_l(int32_t originalPid, int32_t newPid) override;
+
+    // override the process info of given process
+    bool overrideProcessInfo_l(const std::shared_ptr<IResourceManagerClient>& client,
+                               int pid, int procState, int oomScore) override;
+
+    // Get priority from process's pid
+    bool getPriority_l(int pid, int* priority) const override;
+
+    // Get the client for given pid and the clientId from the map
+    std::shared_ptr<IResourceManagerClient> getClient_l(
+        int pid, const int64_t& clientId) const override;
+
+    // Remove the client for given pid and the clientId from the map
+    bool removeClient_l(int pid, const int64_t& clientId) override;
+
+    // Get all the resource status for dump
+    void getResourceDump(std::string& resourceLog) const override;
+
+    // Returns a unmodifiable reference to the internal resource state as a map
+    const std::map<int, ResourceInfos>& getResourceMap() const override;
+
+    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid) override;
+
+    // The following utility functions are used only for testing by ResourceManagerServiceTest
+    // START: TEST only functions
+    // Gets the list of all the clients who own the specified resource type.
+    // Returns false if any client belongs to a process with higher priority than the
+    // calling process. The clients will remain unchanged if returns false.
+    bool getAllClients_l(const ResourceRequestInfo& resourceRequestInfo,
+                         std::vector<ClientInfo>& clientsInfo) override;
+
+    // Gets the client who owns specified resource type from lowest possible priority process.
+    // Returns false if the calling process priority is not higher than the lowest process
+    // priority. The client will remain unchanged if returns false.
+    bool getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo) override;
+
+    // Gets lowest priority process that has the specified resource type.
+    // Returns false if failed. The output parameters will remain unchanged if failed.
+    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType,
+                                int* lowestPriorityPid, int* lowestPriority) override;
+
+    // enable/disable process priority based reclaim and client importance based reclaim
+    void setReclaimPolicy(bool processPriority, bool clientImportance) override;
+    // END: TEST only functions
+
+private:
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+    std::unique_ptr<IResourceModel> mDefaultResourceModel;
+    std::vector<std::unique_ptr<IReclaimPolicy>> mReclaimPolicies;
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index de682f8..679ab13 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -19,11 +19,101 @@
 #define LOG_TAG "ResourceManagerServiceUtils"
 #include <utils/Log.h>
 
+#include <binder/IServiceManager.h>
+
+#include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
 #include "ResourceManagerServiceUtils.h"
 
 namespace android {
 
+bool ResourceList::add(const MediaResourceParcel& res, bool* isNewEntry) {
+    // See if it's an existing entry, if so, merge it.
+    for (MediaResourceParcel& item : mResourceList) {
+        if (item.type == res.type && item.subType == res.subType && item.id == res.id) {
+            // We already have an item. Merge them and return
+            mergeResources(item, res);
+            return true;
+        }
+    }
+
+    // Since we have't found this resource yet, it is a new entry.
+    // We can't init a new entry with negative value, although it's allowed
+    // to merge in negative values after the initial add.
+    if (res.value <= 0) {
+        ALOGW("Ignoring request to add new resource entry with value <= 0");
+        return false;
+    }
+    if (isNewEntry) {
+        *isNewEntry = true;
+    }
+    mResourceList.push_back(res);
+    return true;
+}
+
+void ResourceList::addOrUpdate(const MediaResourceParcel& res) {
+    // See if it's an existing entry, just update the value.
+    for (MediaResourceParcel& item : mResourceList) {
+        if (item.type == res.type && item.subType == res.subType && item.id == res.id) {
+            item.value = res.value;
+            return;
+        }
+    }
+
+    // Add the new entry.
+    mResourceList.push_back(res);
+}
+
+bool ResourceList::remove(const MediaResourceParcel& res, long* removedEntryValue) {
+    // Make sure we have an entry for this resource.
+    for (std::vector<MediaResourceParcel>::iterator it = mResourceList.begin();
+         it != mResourceList.end(); it++) {
+        if (it->type == res.type && it->subType == res.subType && it->id == res.id) {
+            if (it->value > res.value) {
+                // Subtract the resource value by given value.
+                it->value -= res.value;
+            } else {
+                // This entry will be removed.
+                if (removedEntryValue) {
+                    *removedEntryValue = it->value;
+                }
+                mResourceList.erase(it);
+            }
+            return true;
+        }
+    }
+
+    // No such entry.
+    return false;
+}
+
+std::string ResourceList::toString() const {
+    std::string str;
+    for (const ::aidl::android::media::MediaResourceParcel& res : mResourceList) {
+        str.append(android::toString(res).c_str());
+        str.append("\n");
+    }
+
+    return std::move(str);
+}
+
+bool ResourceList::operator==(const ResourceList& rhs) const {
+    // Make sure the size is the same.
+    if (mResourceList.size() != rhs.mResourceList.size()) {
+        return false;
+    }
+
+    // Create a set from this object and check for the items from the rhs.
+    std::set<::aidl::android::media::MediaResourceParcel> lhs(
+            mResourceList.begin(), mResourceList.end());
+    for (const ::aidl::android::media::MediaResourceParcel& res : rhs.mResourceList) {
+        if (lhs.find(res) == lhs.end()) {
+            return false;
+        }
+    }
+    return true;
+}
+
 // Bunch of utility functions that looks for a specific Resource.
 // Check whether a given resource (of type and subtype) is found in given resource parcel.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
@@ -50,8 +140,8 @@
 // Check whether a given resource (of type and subtype) is found in given resource list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
                      const ResourceList& resources) {
-    for (auto it = resources.begin(); it != resources.end(); it++) {
-        if (hasResourceType(type, subType, it->second)) {
+    for (const MediaResourceParcel& res : resources.getResources()) {
+        if (hasResourceType(type, subType, res)) {
             return true;
         }
     }
@@ -88,7 +178,6 @@
                                      const std::shared_ptr<IResourceManagerClient>& client,
                                      ResourceInfos& infos) {
     ResourceInfos::iterator found = infos.find(clientInfo.id);
-
     if (found == infos.end()) {
         ResourceInfo info{.pid = clientInfo.pid,
                           .uid = static_cast<uid_t>(clientInfo.uid),
@@ -96,7 +185,8 @@
                           .name = clientInfo.name.empty()? "<unknown client>" : clientInfo.name,
                           .client = client,
                           .deathNotifier = nullptr,
-                          .pendingRemoval = false};
+                          .pendingRemoval = false,
+                          .importance = static_cast<uint32_t>(std::max(0, clientInfo.importance))};
         auto [it, inserted] = infos.emplace(clientInfo.id, info);
         found = it;
     }
@@ -202,4 +292,30 @@
     return deathNotifier;
 }
 
+void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
+    static const char* const kServiceName = "media_resource_monitor";
+    sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
+    if (binder != NULL) {
+        sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
+        for (size_t i = 0; i < resources.size(); ++i) {
+            switch (resources[i].subType) {
+                case MediaResource::SubType::kHwAudioCodec:
+                case MediaResource::SubType::kSwAudioCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
+                    break;
+                case MediaResource::SubType::kHwVideoCodec:
+                case MediaResource::SubType::kSwVideoCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+                    break;
+                case MediaResource::SubType::kHwImageCodec:
+                case MediaResource::SubType::kSwImageCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
+                    break;
+                case MediaResource::SubType::kUnspecifiedSubType:
+                    break;
+            }
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
index ac1e410..e8f1515 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -18,6 +18,9 @@
 #ifndef ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
 
+#include <map>
+#include <set>
+#include <memory>
 #include <vector>
 
 #include <aidl/android/media/BnResourceManagerService.h>
@@ -97,10 +100,47 @@
     virtual void binderDied();
 };
 
-// A map of tuple(type, sub-type, id) and the resource parcel.
-typedef std::map<std::tuple<
-        MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
-        ::aidl::android::media::MediaResourceParcel> ResourceList;
+// Encapsulate Resource List as vector of resources instead of map.
+// Since the number of resource is very limited, maintaining it as
+// std::vector helps with both performance and memory requiremnts.
+struct ResourceList {
+    // Add or Update an entry into ResourceList.
+    // If a new entry is added, isNewEntry will be set to true upon return
+    // returns true on successful update, false otherwise.
+    bool add(const ::aidl::android::media::MediaResourceParcel& res, bool* isNewEntry = nullptr);
+
+    // reduce the resource usage by subtracting the resource value.
+    // If the resource value is 0 after reducing the resource usage,
+    // that entry will be removed and removedEntryValue is set to the
+    // value before it was removed upon return otherwise it will be set to -1.
+    // returns true on successful removal of the resource, false otherwise.
+    bool remove(const ::aidl::android::media::MediaResourceParcel& res,
+                long* removedEntryValue = nullptr);
+
+    // Returns true if there aren't any resource entries.
+    bool empty() const {
+        return mResourceList.empty();
+    }
+
+    // Returns resource list as a non-modifiable vectors
+    const std::vector<::aidl::android::media::MediaResourceParcel>& getResources() const {
+        return mResourceList;
+    }
+
+    // Converts resource list into string format
+    std::string toString() const;
+
+    // BEGIN: Test only function
+    // Check if two resource lists are the same.
+    bool operator==(const ResourceList& rhs) const;
+
+    // Add or Update an entry into ResourceList.
+    void addOrUpdate(const ::aidl::android::media::MediaResourceParcel& res);
+    // END: Test only function
+
+private:
+    std::vector<::aidl::android::media::MediaResourceParcel> mResourceList;
+};
 
 // Encapsulation for Resource Info, that contains
 // - pid of the app
@@ -120,16 +160,34 @@
     std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
     ResourceList resources;
     bool pendingRemoval{false};
+    uint32_t importance = 0;
+};
+
+/*
+ * Resource Reclaim request info that encapsulates
+ *  - the calling/requesting process pid.
+ *  - id of the client that made reclaim request.
+ *  - the calling/requesting client's importance.
+ *  - the list of resources requesting (to be reclaimed from others)
+ */
+struct ReclaimRequestInfo {
+    int mCallingPid = -1;
+    int64_t mClientId = 0;
+    uint32_t mCallingClientImportance = 0;
+    const std::vector<::aidl::android::media::MediaResourceParcel>& mResources;
 };
 
 /*
  * Resource request info that encapsulates
  *  - the calling/requesting process pid.
+ *  - the calling/requesting client's id.
  *  - the resource requesting (to be reclaimed from others)
  */
 struct ResourceRequestInfo {
     // pid of the calling/requesting process.
     int mCallingPid = -1;
+    // id of the calling/requesting client.
+    int64_t mClientId = 0;
     // resources requested.
     const ::aidl::android::media::MediaResourceParcel* mResource;
 };
@@ -170,7 +228,7 @@
 
 //Check whether a given resource (of type and subtype) is found in given resource parcel.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-                     const MediaResourceParcel& resource);
+                     const ::aidl::android::media::MediaResourceParcel& resource);
 
 //Check whether a given resource (of type and subtype) is found in given resource list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
@@ -193,7 +251,13 @@
         ResourceInfos& infos);
 
 // Merge resources from r2 into r1.
-void mergeResources(MediaResourceParcel& r1, const MediaResourceParcel& r2);
+void mergeResources(::aidl::android::media::MediaResourceParcel& r1,
+                    const ::aidl::android::media::MediaResourceParcel& r2);
+
+// To notify the media_resource_monitor about the resource being granted.
+void notifyResourceGranted(
+        int pid,
+        const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
 
 } // namespace android
 
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 72e249f..21e61e9 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -286,9 +286,9 @@
     {
         std::scoped_lock lock{mObserverLock};
 
-        for (auto &res : resources) {
+        for (const MediaResourceParcel& res : resources.getResources()) {
             // Skip if this resource doesn't map to any observable type.
-            MediaObservableType observableType = getObservableType(res.second);
+            MediaObservableType observableType = getObservableType(res);
             if (observableType == MediaObservableType::kInvalid) {
                 continue;
             }
@@ -303,9 +303,9 @@
                 auto calleeIt = calleeList.find(subscriber.first);
                 if (calleeIt == calleeList.end()) {
                     calleeList.emplace(subscriber.first, CalleeInfo{
-                        subscriber.second, {{observableType, res.second.value}}});
+                        subscriber.second, {{observableType, res.value}}});
                 } else {
-                    calleeIt->second.monitors.push_back({observableType, res.second.value});
+                    calleeIt->second.monitors.push_back({observableType, res.value});
                 }
             }
         }
diff --git a/services/mediaresourcemanager/ResourceTracker.cpp b/services/mediaresourcemanager/ResourceTracker.cpp
new file mode 100644
index 0000000..3ee20cd
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceTracker.cpp
@@ -0,0 +1,774 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceTracker"
+#include <utils/Log.h>
+
+#include <binder/IPCThreadState.h>
+#include <mediautils/ProcessInfo.h>
+#include "ResourceTracker.h"
+#include "ResourceManagerServiceNew.h"
+#include "ResourceObserverService.h"
+
+namespace android {
+
+inline bool isHwCodec(MediaResource::SubType subType) {
+    return subType == MediaResource::SubType::kHwImageCodec ||
+           subType == MediaResource::SubType::kHwVideoCodec;
+}
+
+// Check whether a given resource (of type and subtype) is found in given resource list
+// that also has the given Primary SubType.
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+                            const ResourceList& resources, MediaResource::SubType primarySubType) {
+    bool foundResource = false;
+    bool matchedPrimary =
+        (primarySubType == MediaResource::SubType::kUnspecifiedSubType) ?  true : false;
+    for (const MediaResourceParcel& res : resources.getResources()) {
+        if (hasResourceType(type, subType, res)) {
+            foundResource = true;
+        } else if (res.subType == primarySubType) {
+            matchedPrimary = true;
+        } else if (isHwCodec(res.subType) == isHwCodec(primarySubType)) {
+            matchedPrimary = true;
+        }
+        if (matchedPrimary && foundResource) {
+            return true;
+        }
+    }
+    return false;
+}
+
+// See if the given client is already in the list of clients.
+inline bool contains(const std::vector<ClientInfo>& clients, const int64_t& clientId) {
+    std::vector<ClientInfo>::const_iterator found =
+        std::find_if(clients.begin(), clients.end(),
+                     [clientId](const ClientInfo& client) -> bool {
+                         return client.mClientId == clientId;
+                     });
+
+    return found != clients.end();
+}
+
+
+ResourceTracker::ResourceTracker(const std::shared_ptr<ResourceManagerServiceNew>& service,
+                                 const sp<ProcessInfoInterface>& processInfo) :
+        mService(service),
+        mProcessInfo(processInfo) {
+}
+
+ResourceTracker::~ResourceTracker() {
+}
+
+void ResourceTracker::setResourceObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    mObserverService = observerService;
+}
+
+ResourceInfos& ResourceTracker::getResourceInfosForEdit(int pid) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        // new pid
+        ResourceInfos infosForPid;
+        auto [it, inserted] = mMap.emplace(pid, infosForPid);
+        found = it;
+    }
+
+    return found->second;
+}
+
+bool ResourceTracker::addResource(const ClientInfoParcel& clientInfo,
+                                  const std::shared_ptr<IResourceManagerClient>& client,
+                                  const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+
+    if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        ALOGW("%s called with untrusted pid %d or uid %d, using calling pid %d, uid %d",
+                __func__, pid, uid, callingPid, callingUid);
+        pid = callingPid;
+        uid = callingUid;
+    }
+    ResourceInfos& infos = getResourceInfosForEdit(pid);
+    ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
+    ResourceList resourceAdded;
+
+    for (const MediaResourceParcel& res : resources) {
+        if (res.value < 0 && res.type != MediaResource::Type::kDrmSession) {
+            ALOGV("%s: Ignoring request to remove negative value of non-drm resource", __func__);
+            continue;
+        }
+        bool isNewEntry = false;
+        if (!info.resources.add(res, &isNewEntry)) {
+            continue;
+        }
+        if (isNewEntry) {
+            onFirstAdded(res, info.uid);
+        }
+
+        // Add it to the list of added resources for observers.
+        resourceAdded.add(res);
+    }
+    if (info.deathNotifier == nullptr && client != nullptr) {
+        info.deathNotifier = DeathNotifier::Create(client, mService, clientInfo);
+    }
+    if (mObserverService != nullptr && !resourceAdded.empty()) {
+        mObserverService->onResourceAdded(uid, pid, resourceAdded);
+    }
+
+    return !resourceAdded.empty();
+}
+
+bool ResourceTracker::updateResource(const aidl::android::media::ClientInfoParcel& clientInfo) {
+    ResourceInfos& infos = getResourceInfosForEdit(clientInfo.pid);
+
+    ResourceInfos::iterator found = infos.find(clientInfo.id);
+    if (found == infos.end()) {
+        return false;
+    }
+    // Update the client importance.
+    found->second.importance = std::max(0, clientInfo.importance);
+    return true;
+}
+
+bool ResourceTracker::removeResource(const ClientInfoParcel& clientInfo,
+                                     const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfo& info = foundClient->second;
+    ResourceList resourceRemoved;
+    for (const MediaResourceParcel& res : resources) {
+        if (res.value < 0) {
+            ALOGV("%s: Ignoring request to remove negative value of resource", __func__);
+            continue;
+        }
+
+        long removedEntryValue = -1;
+        if (info.resources.remove(res, &removedEntryValue)) {
+            MediaResourceParcel actualRemoved = res;
+            if (removedEntryValue != -1) {
+                onLastRemoved(res, info.uid);
+                actualRemoved.value = removedEntryValue;
+            }
+
+            // Add it to the list of removed resources for observers.
+            resourceRemoved.add(actualRemoved);
+        }
+    }
+    if (mObserverService != nullptr && !resourceRemoved.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, resourceRemoved);
+    }
+    return true;
+}
+
+bool ResourceTracker::removeResource(const ClientInfoParcel& clientInfo, bool validateCallingPid) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (validateCallingPid && !mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    const ResourceInfo& info = foundClient->second;
+    for (const MediaResourceParcel& res : info.resources.getResources()) {
+        onLastRemoved(res, info.uid);
+    }
+
+    if (mObserverService != nullptr && !info.resources.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, info.resources);
+    }
+
+    infos.erase(foundClient);
+    return true;
+}
+
+std::shared_ptr<IResourceManagerClient> ResourceTracker::getClient(
+        int pid, const int64_t& clientId) const {
+    std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return nullptr;
+    }
+
+    const ResourceInfos& infos = found->second;
+    ResourceInfos::const_iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return nullptr;
+    }
+
+    return foundClient->second.client;
+}
+
+bool ResourceTracker::removeClient(int pid, const int64_t& clientId) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    infos.erase(foundClient);
+    return true;
+}
+
+bool ResourceTracker::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long)clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfo& info = foundClient->second;
+    info.pendingRemoval = true;
+    return true;
+}
+
+bool ResourceTracker::getClientsMarkedPendingRemoval(int32_t pid,
+                                                     std::vector<ClientInfo>& targetClients) {
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__, pid, callingPid);
+        pid = callingPid;
+    }
+
+    // Go through all the MediaResource types (and corresponding subtypes for
+    // each, if applicable) and see if the process (with given pid) holds any
+    // such resources that are marked as pending removal.
+    // Since the use-case of this function is to get all such resources (pending
+    // removal) and reclaim them all - the order in which we look for the
+    // resource type doesn't matter.
+    for (MediaResource::Type type : {MediaResource::Type::kSecureCodec,
+                                     MediaResource::Type::kNonSecureCodec,
+                                     MediaResource::Type::kGraphicMemory,
+                                     MediaResource::Type::kDrmSession}) {
+        switch (type) {
+        // Codec resources are segregated by audio, video and image domains.
+        case MediaResource::Type::kSecureCodec:
+        case MediaResource::Type::kNonSecureCodec:
+            for (MediaResource::SubType subType : {MediaResource::SubType::kHwAudioCodec,
+                                                   MediaResource::SubType::kSwAudioCodec,
+                                                   MediaResource::SubType::kHwVideoCodec,
+                                                   MediaResource::SubType::kSwVideoCodec,
+                                                   MediaResource::SubType::kHwImageCodec,
+                                                   MediaResource::SubType::kSwImageCodec}) {
+                ClientInfo clientInfo;
+                if (getBiggestClientPendingRemoval(pid, type, subType, clientInfo)) {
+                    if (!contains(targetClients, clientInfo.mClientId)) {
+                        targetClients.emplace_back(clientInfo);
+                    }
+                    continue;
+                }
+            }
+            break;
+        // Non-codec resources are shared by audio, video and image codecs (no subtype).
+        default:
+            ClientInfo clientInfo;
+            MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+            if (getBiggestClientPendingRemoval(pid, type, subType, clientInfo)) {
+                if (!contains(targetClients, clientInfo.mClientId)) {
+                    targetClients.emplace_back(clientInfo);
+                }
+            }
+            break;
+        }
+    }
+
+    return true;
+}
+
+bool ResourceTracker::overridePid(int originalPid, int newPid) {
+    mOverridePidMap.erase(originalPid);
+    if (newPid != -1) {
+        mOverridePidMap.emplace(originalPid, newPid);
+        return true;
+    }
+    return false;
+}
+
+bool ResourceTracker::overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                                          int pid, int procState, int oomScore) {
+    removeProcessInfoOverride(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return false;
+    }
+
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
+    std::shared_ptr<DeathNotifier> deathNotifier =
+        DeathNotifier::Create(client, mService, clientInfo, true);
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
+
+    return true;
+}
+
+void ResourceTracker::removeProcessInfoOverride(int pid) {
+    auto it = mProcessInfoOverrideMap.find(pid);
+    if (it == mProcessInfoOverrideMap.end()) {
+        return;
+    }
+
+    mProcessInfo->removeProcessInfoOverride(pid);
+    mProcessInfoOverrideMap.erase(pid);
+}
+
+bool ResourceTracker::getAllClients(const ResourceRequestInfo& resourceRequestInfo,
+                                    std::vector<ClientInfo>& clients,
+                                    MediaResource::SubType primarySubType) {
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
+    bool foundClient = false;
+
+    for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        for (auto& [id, /* ResourceInfo */ info] : infos) {
+            if (hasResourceType(type, subType, info.resources, primarySubType)) {
+                if (!contains(clients, info.clientId)) {
+                    clients.emplace_back(info.pid, info.uid, info.clientId);
+                    foundClient = true;
+                }
+            }
+        }
+    }
+
+    return foundClient;
+}
+
+bool ResourceTracker::getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                                           int& lowestPriorityPid, int& lowestPriority) {
+    int pid = -1;
+    int priority = -1;
+    for (auto& [tempPid, /* ResourceInfos */ infos] : mMap) {
+        if (infos.size() == 0) {
+            // no client on this process.
+            continue;
+        }
+        if (!hasResourceType(type, subType, infos)) {
+            // doesn't have the requested resource type
+            continue;
+        }
+        int tempPriority = -1;
+        if (!getPriority(tempPid, &tempPriority)) {
+            ALOGV("%s: can't get priority of pid %d, skipped", __func__, tempPid);
+            // TODO: remove this pid from mMap?
+            continue;
+        }
+        if (pid == -1 || tempPriority > priority) {
+            // initial the value
+            pid = tempPid;
+            priority = tempPriority;
+        }
+    }
+
+    bool success = (pid != -1);
+
+    if (success) {
+        lowestPriorityPid = pid;
+        lowestPriority = priority;
+    }
+    return success;
+}
+
+bool ResourceTracker::getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                                           MediaResource::SubType primarySubType,
+                                           const std::vector<ClientInfo>& clients,
+                                           int& lowestPriorityPid, int& lowestPriority) {
+    int pid = -1;
+    int priority = -1;
+    for (const ClientInfo& client : clients) {
+        const ResourceInfo* info = getResourceInfo(client.mPid, client.mClientId);
+        if (info == nullptr) {
+            continue;
+        }
+        if (!hasResourceType(type, subType, info->resources, primarySubType)) {
+            // doesn't have the requested resource type
+            continue;
+        }
+        int tempPriority = -1;
+        if (!getPriority(client.mPid, &tempPriority)) {
+            ALOGV("%s: can't get priority of pid %d, skipped", __func__, client.mPid);
+            // TODO: remove this pid from mMap?
+            continue;
+        }
+        if (pid == -1 || tempPriority > priority) {
+            // initial the value
+            pid = client.mPid;
+            priority = tempPriority;
+        }
+    }
+
+    bool success = (pid != -1);
+
+    if (success) {
+        lowestPriorityPid = pid;
+        lowestPriority = priority;
+    }
+    return success;
+}
+
+bool ResourceTracker::getBiggestClientPendingRemoval(int pid, MediaResource::Type type,
+                                                     MediaResource::SubType subType,
+                                                     ClientInfo& clientInfo) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        return false;
+    }
+
+    uid_t   uid = -1;
+    int64_t clientId = -1;
+    uint64_t largestValue = 0;
+    const ResourceInfos& infos = found->second;
+    for (const auto& [id, /* ResourceInfo */ info] : infos) {
+        const ResourceList& resources = info.resources;
+        // Skip if the client is not marked pending removal.
+        if (!info.pendingRemoval) {
+            continue;
+        }
+        for (const MediaResourceParcel& resource : resources.getResources()) {
+            if (hasResourceType(type, subType, resource)) {
+                if (resource.value > largestValue) {
+                    largestValue = resource.value;
+                    clientId = info.clientId;
+                    uid = info.uid;
+                }
+            }
+        }
+    }
+
+    if (clientId == -1) {
+        return false;
+    }
+
+    clientInfo.mPid = pid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
+    return true;
+}
+
+bool ResourceTracker::getBiggestClient(int targetPid,
+                                       MediaResource::Type type, MediaResource::SubType subType,
+                                       const std::vector<ClientInfo>& clients,
+                                       ClientInfo& clientInfo,
+                                       MediaResource::SubType primarySubType) {
+    uid_t   uid = -1;
+    int64_t clientId = -1;
+    uint64_t largestValue = 0;
+
+    for (const ClientInfo& client : clients) {
+        // Skip the clients that doesn't belong go the targetPid
+        if (client.mPid != targetPid) {
+            continue;
+        }
+        const ResourceInfo* info = getResourceInfo(client.mPid, client.mClientId);
+        if (info == nullptr) {
+            continue;
+        }
+
+        const ResourceList& resources = info->resources;
+        bool matchedPrimary =
+            (primarySubType == MediaResource::SubType::kUnspecifiedSubType) ?  true : false;
+        for (const MediaResourceParcel& resource : resources.getResources()) {
+            if (resource.subType == primarySubType) {
+                matchedPrimary = true;
+                break;
+            } else if (isHwCodec(resource.subType) == isHwCodec(primarySubType)) {
+                matchedPrimary = true;
+                break;
+            }
+        }
+        // Primary type doesn't match, skip the client
+        if (!matchedPrimary) {
+            continue;
+        }
+        for (const MediaResourceParcel& resource : resources.getResources()) {
+            if (hasResourceType(type, subType, resource)) {
+                if (resource.value > largestValue) {
+                    largestValue = resource.value;
+                    clientId = info->clientId;
+                    uid = info->uid;
+                }
+            }
+        }
+    }
+
+    if (clientId == -1) {
+        ALOGE("%s: can't find resource type %s and subtype %s for pid %d",
+                 __func__, asString(type), asString(subType), targetPid);
+        return false;
+    }
+
+    clientInfo.mPid = targetPid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
+    return true;
+}
+
+bool ResourceTracker::getLeastImportantBiggestClient(int targetPid, int32_t importance,
+                                                     MediaResource::Type type,
+                                                     MediaResource::SubType subType,
+                                                     MediaResource::SubType primarySubType,
+                                                     const std::vector<ClientInfo>& clients,
+                                                     ClientInfo& clientInfo) {
+    uid_t   uid = -1;
+    int64_t clientId = -1;
+    uint64_t largestValue = 0;
+
+    for (const ClientInfo& client : clients) {
+        // Skip the clients that doesn't belong go the targetPid
+        if (client.mPid != targetPid) {
+            continue;
+        }
+        const ResourceInfo* info = getResourceInfo(client.mPid, client.mClientId);
+        if (info == nullptr) {
+            continue;
+        }
+
+        // Make sure the importance is lower.
+        if (info->importance <= importance) {
+            continue;
+        }
+        const ResourceList& resources = info->resources;
+        bool matchedPrimary =
+            (primarySubType == MediaResource::SubType::kUnspecifiedSubType) ?  true : false;
+        for (const MediaResourceParcel& resource : resources.getResources()) {
+            if (resource.subType == primarySubType) {
+                matchedPrimary = true;
+            } else if (isHwCodec(resource.subType) == isHwCodec(primarySubType)) {
+                matchedPrimary = true;
+            }
+        }
+        // Primary type doesn't match, skip the client
+        if (!matchedPrimary) {
+            continue;
+        }
+        for (const MediaResourceParcel& resource : resources.getResources()) {
+            if (hasResourceType(type, subType, resource)) {
+                if (resource.value > largestValue) {
+                    largestValue = resource.value;
+                    clientId = info->clientId;
+                    uid = info->uid;
+                }
+            }
+        }
+    }
+
+    if (clientId == -1) {
+        ALOGE("%s: can't find resource type %s and subtype %s for pid %d",
+                 __func__, asString(type), asString(subType), targetPid);
+        return false;
+    }
+
+    clientInfo.mPid = targetPid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
+    return true;
+}
+
+void ResourceTracker::dump(std::string& resourceLogs) {
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    resourceLogs.append("  Processes:\n");
+    for (const auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        resourceLogs.append(buffer);
+        int priority = 0;
+        if (getPriority(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
+        } else {
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
+        }
+        resourceLogs.append(buffer);
+
+        for (const auto& [infoKey, /* ResourceInfo */ info] : infos) {
+            resourceLogs.append("      Client:\n");
+            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
+            resourceLogs.append(buffer);
+
+            std::string clientName = info.name;
+            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
+            resourceLogs.append(buffer);
+
+            const ResourceList& resources = info.resources;
+            resourceLogs.append("        Resources:\n");
+            resourceLogs.append(resources.toString());
+        }
+    }
+    resourceLogs.append("  Process Pid override:\n");
+    for (const auto& [oldPid, newPid] : mOverridePidMap) {
+        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n", oldPid, newPid);
+        resourceLogs.append(buffer);
+    }
+}
+
+void ResourceTracker::onFirstAdded(const MediaResourceParcel& resource, uid_t uid) {
+    std::shared_ptr<ResourceManagerServiceNew> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("%s: ResourceManagerService is invalid!", __func__);
+        return;
+    }
+
+    service->onFirstAdded(resource, uid);
+}
+
+void ResourceTracker::onLastRemoved(const MediaResourceParcel& resource, uid_t uid) {
+    std::shared_ptr<ResourceManagerServiceNew> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("%s: ResourceManagerService is invalid!", __func__);
+        return;
+    }
+
+    service->onLastRemoved(resource, uid);
+}
+
+bool ResourceTracker::getPriority(int pid, int* priority) {
+    int newPid = pid;
+
+    if (mOverridePidMap.find(pid) != mOverridePidMap.end()) {
+        newPid = mOverridePidMap[pid];
+        ALOGD("%s: use override pid %d instead original pid %d", __func__, newPid, pid);
+    }
+
+    return mProcessInfo->getPriority(newPid, priority);
+}
+
+bool ResourceTracker::getNonConflictingClients(const ResourceRequestInfo& resourceRequestInfo,
+                                               std::vector<ClientInfo>& clients) {
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
+    for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        for (const auto& [id, /* ResourceInfo */ info] : infos) {
+            if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+                ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+                      __func__, id);
+                continue;
+            }
+            if (hasResourceType(type, subType, info.resources)) {
+                if (!isCallingPriorityHigher(resourceRequestInfo.mCallingPid, pid)) {
+                    // some higher/equal priority process owns the resource,
+                    // this is a conflict.
+                    ALOGE("%s: The resource (%s) request from pid %d is conflicting",
+                          __func__, asString(type), pid);
+                    clients.clear();
+                    return false;
+                } else {
+                    if (!contains(clients, info.clientId)) {
+                        clients.emplace_back(info.pid, info.uid, info.clientId);
+                    }
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+const ResourceInfo* ResourceTracker::getResourceInfo(int pid, const int64_t& clientId) const {
+    std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return nullptr;
+    }
+
+    const ResourceInfos& infos = found->second;
+    ResourceInfos::const_iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return nullptr;
+    }
+
+    return &foundClient->second;
+}
+
+bool ResourceTracker::isCallingPriorityHigher(int callingPid, int pid) {
+    int callingPidPriority;
+    if (!getPriority(callingPid, &callingPidPriority)) {
+        return false;
+    }
+
+    int priority;
+    if (!getPriority(pid, &priority)) {
+        return false;
+    }
+
+    return (callingPidPriority < priority);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceTracker.h b/services/mediaresourcemanager/ResourceTracker.h
new file mode 100644
index 0000000..20c904d
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceTracker.h
@@ -0,0 +1,256 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCETRACKER_H_
+#define ANDROID_MEDIA_RESOURCETRACKER_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+#include <media/MediaResource.h>
+#include <aidl/android/media/ClientInfoParcel.h>
+#include <aidl/android/media/IResourceManagerClient.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+#include "ResourceManagerServiceUtils.h"
+
+namespace android {
+
+class DeathNotifier;
+class ResourceManagerServiceNew;
+class ResourceObserverService;
+struct ProcessInfoInterface;
+struct ResourceRequestInfo;
+struct ClientInfo;
+
+/*
+ * ResourceTracker abstracts the resources managed by the ResourceManager.
+ * It keeps track of the resource used by the clients (clientid) and by the process (pid)
+ */
+class ResourceTracker {
+public:
+    ResourceTracker(const std::shared_ptr<ResourceManagerServiceNew>& service,
+                    const sp<ProcessInfoInterface>& processInfo);
+    ~ResourceTracker();
+
+    /**
+     * Add or update resources for |clientInfo|.
+     *
+     * If |clientInfo| is not tracked yet, it records its associated |client| and adds
+     * |resources| to the tracked resources. If |clientInfo| is already tracked,
+     * it updates the tracked resources by adding |resources| to them (|client| in
+     * this case is unused and unchecked).
+     *
+     * @param clientInfo Info of the calling client.
+     * @param client Interface for the client.
+     * @param resources An array of resources to be added.
+     *
+     * @return true upon successfully adding/updating the resources, false
+     * otherwise.
+     */
+    bool addResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                     const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
+                     const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
+
+    // Update the resource info, if there is any changes.
+    bool updateResource(const aidl::android::media::ClientInfoParcel& clientInfo);
+
+    // Remove a set of resources from the given client.
+    // returns true on success, false otherwise.
+    bool removeResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                        const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
+
+    /**
+     * Remove all resources tracked for |clientInfo|.
+     *
+     * If |validateCallingPid| is true, the (pid of the) calling process is validated that it
+     * is from a trusted process.
+     * Returns true on success (|clientInfo| was tracked and optionally the caller
+     * was a validated trusted process), false otherwise (|clientInfo| was not tracked,
+     * or the caller was not a trusted process)
+     */
+    bool removeResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                        bool validateCallingPid);
+
+    // Mark the client for pending removal.
+    // Such clients are primary candidate for reclaim.
+    // returns true on success, false otherwise.
+    bool markClientForPendingRemoval(const aidl::android::media::ClientInfoParcel& clientInfo);
+
+    // Get a list of clients that belong to process with given pid and are maked to be
+    // pending removal by markClientForPendingRemoval.
+    // returns true on success, false otherwise.
+    bool getClientsMarkedPendingRemoval(int32_t pid, std::vector<ClientInfo>& targetClients);
+
+    // Override the pid of originalPid with newPid
+    // To remove the pid entry from the override list, set newPid as -1
+    // returns true on successful override, false otherwise.
+    bool overridePid(int originalPid, int newPid);
+
+    // Override the process info {state, oom score} of the process with pid.
+    // returns true on success, false otherwise.
+    bool overrideProcessInfo(
+            const std::shared_ptr<aidl::android::media::IResourceManagerClient>& client,
+            int pid, int procState, int oomScore);
+
+    // Remove the overridden process info.
+    void removeProcessInfoOverride(int pid);
+
+    // Find all clients that have given resources.
+    // If applicable, match the primary type too.
+    // The |clients| (list) isn't cleared by this function to allow calling this
+    // function multiple times for different resources.
+    // returns true upon finding at lease one client with the given resource request info,
+    // false otherwise (no clients)
+    bool getAllClients(
+            const ResourceRequestInfo& resourceRequestInfo,
+            std::vector<ClientInfo>& clients,
+            MediaResource::SubType primarySubType = MediaResource::SubType::kUnspecifiedSubType);
+
+    // Look for the lowest priority process with the given resources.
+    // Upon success lowestPriorityPid and lowestPriority are
+    // set accordingly and it returns true.
+    // If there isn't a lower priority process with the given resources, it will return false
+    // with out updating lowestPriorityPid and lowerPriority.
+    bool getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                              int& lowestPriorityPid, int& lowestPriority);
+
+    // Look for the lowest priority process with the given resources
+    // among the given client list.
+    // If applicable, match the primary type too.
+    // returns true on success, false otherwise.
+    bool getLowestPriorityPid(
+            MediaResource::Type type, MediaResource::SubType subType,
+            MediaResource::SubType primarySubType,
+            const std::vector<ClientInfo>& clients,
+            int& lowestPriorityPid, int& lowestPriority);
+
+    // Find the biggest client of the given process with given resources,
+    // that is marked as pending to be removed.
+    // returns true on success, false otherwise.
+    bool getBiggestClientPendingRemoval(
+            int pid, MediaResource::Type type,
+            MediaResource::SubType subType,
+            ClientInfo& clientInfo);
+
+    // Find the biggest client from the process pid, selecting them from the list of clients.
+    // If applicable, match the primary type too.
+    // Returns true when a client is found and clientInfo is updated accordingly.
+    // Upon failure to find a client, it will return false without updating
+    // clientInfo.
+    // Upon failure to find a client, it will return false.
+    bool getBiggestClient(
+            int targetPid,
+            MediaResource::Type type,
+            MediaResource::SubType subType,
+            const std::vector<ClientInfo>& clients,
+            ClientInfo& clientInfo,
+            MediaResource::SubType primarySubType = MediaResource::SubType::kUnspecifiedSubType);
+
+    // Find the biggest client from the process pid, that has the least importance
+    // (than given importance) among the given list of clients.
+    // If applicable, match the primary type too.
+    // returns true on success, false otherwise.
+    bool getLeastImportantBiggestClient(int targetPid, int32_t importance,
+                                        MediaResource::Type type,
+                                        MediaResource::SubType subType,
+                                        MediaResource::SubType primarySubType,
+                                        const std::vector<ClientInfo>& clients,
+                                        ClientInfo& clientInfo);
+
+    // Find the client that belongs to given process(pid) and with the given clientId.
+    // A nullptr is returned upon failure to find the client.
+    std::shared_ptr<::aidl::android::media::IResourceManagerClient> getClient(
+            int pid, const int64_t& clientId) const;
+
+    // Removes the client from the given process(pid) with the given clientId.
+    // returns true on success, false otherwise.
+    bool removeClient(int pid, const int64_t& clientId);
+
+    // Set the resource observer service, to which to notify when the resources
+    // are added and removed.
+    void setResourceObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
+
+    // Dump all the resource allocations for all the processes into a given string
+    void dump(std::string& resourceLogs);
+
+    // get the priority of the process.
+    // If we can't get the priority of the process (with given pid), it will
+    // return false.
+    bool getPriority(int pid, int* priority);
+
+    // Check if the given resource request has conflicting clients.
+    // The resource conflict is defined by the ResourceModel (such as
+    // co-existence of secure codec with another secure or non-secure codec).
+    // But here, the ResourceTracker only looks for resources from lower
+    // priority processes.
+    // If is/are only higher or same priority process/es with the given resource,
+    // it will return false.
+    // Otherwise, adds all the clients to the list of clients and return true.
+    bool getNonConflictingClients(const ResourceRequestInfo& resourceRequestInfo,
+                                  std::vector<ClientInfo>& clients);
+
+    // Returns unmodifiable reference to the resource map.
+    const std::map<int, ResourceInfos>& getResourceMap() const {
+        return mMap;
+    }
+
+private:
+    // Get ResourceInfos associated with the given process.
+    // If none exists, this method will create and associate an empty object and return it.
+    ResourceInfos& getResourceInfosForEdit(int pid);
+
+    // A helper function that returns true if the callingPid has higher priority than pid.
+    // Returns false otherwise.
+    bool isCallingPriorityHigher(int callingPid, int pid);
+
+    // Locate the resource info corresponding to the process pid and
+    // the client clientId.
+    const ResourceInfo* getResourceInfo(int pid, const int64_t& clientId) const;
+
+    // Notify when a resource is added for the first time.
+    void onFirstAdded(const MediaResourceParcel& resource, uid_t uid);
+    // Notify when a resource is removed for the last time.
+    void onLastRemoved(const MediaResourceParcel& resource, uid_t uid);
+
+private:
+    // Structure that defines process info that needs to be overridden.
+    struct ProcessInfoOverride {
+        std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+        std::shared_ptr<::aidl::android::media::IResourceManagerClient> client;
+    };
+
+    // Map of Resource information indexed through the process id.
+    std::map<int, ResourceInfos> mMap;
+    // A weak reference (to avoid cyclic dependency) to the ResourceManagerService.
+    // ResourceTracker uses this to communicate back with the ResourceManagerService.
+    std::weak_ptr<ResourceManagerServiceNew> mService;
+    // To notify the ResourceObserverService abour resources are added or removed.
+    std::shared_ptr<ResourceObserverService> mObserverService;
+    // Map of pid and their overrided id.
+    std::map<int, int> mOverridePidMap;
+    // Map of pid and their overridden process info.
+    std::map<pid_t, ProcessInfoOverride> mProcessInfoOverrideMap;
+    // Interface that gets process specific information.
+    sp<ProcessInfoInterface> mProcessInfo;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCETRACKER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
index eb4bc42..aa14ace 100644
--- a/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
@@ -41,4 +41,11 @@
      * Name of the resource associated with the client.
      */
     @utf8InCpp String name;
+
+    /*
+     * Client importance, which ranges from 0 to int_max.
+     * The default importance is high (0)
+     * Based on the reclaim policy, this could be used during reclaim.
+     */
+    int importance = 0;
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index bbbc737..5bac062 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -43,6 +43,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "aconfig_mediacodec_flags_c_lib",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index f903c62..6a64823 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -12,7 +12,10 @@
     name: "ResourceManagerService_test",
     srcs: ["ResourceManagerService_test.cpp"],
     test_suites: ["device-tests"],
-    static_libs: ["libresourcemanagerservice"],
+    static_libs: [
+        "libresourcemanagerservice",
+        "aconfig_mediacodec_flags_c_lib",
+    ],
     shared_libs: [
         "libbinder",
         "libbinder_ndk",
@@ -24,6 +27,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "server_configurable_flags",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -62,6 +66,7 @@
     static_libs: [
         "libresourcemanagerservice",
         "resourceobserver_aidl_interface-V1-ndk",
+        "aconfig_mediacodec_flags_c_lib",
     ],
     shared_libs: [
         "libbinder",
@@ -74,6 +79,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "server_configurable_flags",
     ],
     include_dirs: [
         "frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 52d82b8..2c8659d 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -123,14 +123,16 @@
 
 
 struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, int uid, const std::shared_ptr<ResourceManagerService> &service)
-        : mPid(pid), mUid(uid), mService(service) {}
+    TestClient(int pid, int uid, int32_t clientImportance,
+               const std::shared_ptr<ResourceManagerService> &service)
+        : mPid(pid), mUid(uid), mClientImportance(clientImportance), mService(service) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
                                     .uid = static_cast<int32_t>(mUid),
                                     .id = getId(ref<TestClient>()),
-                                    .name = "none"};
+                                    .name = "none",
+                                    .importance = mClientImportance};
         mService->removeClient(clientInfo);
         mWasReclaimResourceCalled = true;
         *_aidl_return = true;
@@ -150,14 +152,20 @@
 
     virtual ~TestClient() {}
 
+    inline int pid() const { return mPid; }
+    inline int uid() const { return mUid; }
+    inline int32_t clientImportance() const { return mClientImportance; }
+
 private:
     bool mWasReclaimResourceCalled = false;
     int mPid;
     int mUid;
+    int32_t mClientImportance = 0;
     std::shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
 
+// [pid, uid] used by the test.
 static const int kTestPid1 = 30;
 static const int kTestUid1 = 1010;
 
@@ -168,6 +176,16 @@
 static const int kMidPriorityPid = 25;
 static const int kHighPriorityPid = 10;
 
+// Client Ids used by the test.
+static const int kLowPriorityClientId = 1111;
+static const int kMidPriorityClientId = 2222;
+static const int kHighPriorityClientId = 3333;
+
+// Client importance used by the test.
+static const int32_t kHighestCodecImportance = 0;
+static const int32_t kLowestCodecImportance = 100;
+static const int32_t kMidCodecImportance = 50;
+
 using EventType = TestSystemCallback::EventType;
 using EventEntry = TestSystemCallback::EventEntry;
 bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
@@ -198,8 +216,8 @@
         return static_cast<TestClient*>(testClient.get());
     }
 
-    ResourceManagerServiceTestBase() {
-        ALOGI("ResourceManagerServiceTestBase created");
+    ResourceManagerServiceTestBase(bool newRM = false) : mNewRM(newRM) {
+        ALOGI("ResourceManagerServiceTestBase created with %s RM", newRM ? "new" : "old");
     }
 
     void SetUp() override {
@@ -207,14 +225,19 @@
         // silently ignored.
         ABinderProcess_startThreadPool();
         mSystemCB = new TestSystemCallback();
-        mService = ResourceManagerService::Create(new TestProcessInfo, mSystemCB);
-        mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
-        mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
-        mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+        if (mNewRM) {
+            mService = ResourceManagerService::CreateNew(new TestProcessInfo, mSystemCB);
+        } else {
+            mService = ResourceManagerService::Create(new TestProcessInfo, mSystemCB);
+        }
+        mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, 0, mService);
+        mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, 0, mService);
+        mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, 0, mService);
     }
 
-    std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
-        return ::ndk::SharedRefBase::make<TestClient>(pid, uid, mService);
+    std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid,
+                                                             int32_t importance = 0) {
+        return ::ndk::SharedRefBase::make<TestClient>(pid, uid, importance, mService);
     }
 
     sp<TestSystemCallback> mSystemCB;
@@ -229,9 +252,7 @@
         // convert resource1 to ResourceList
         ResourceList r1;
         for (size_t i = 0; i < resources1.size(); ++i) {
-            const auto &res = resources1[i];
-            const auto resType = std::tuple(res.type, res.subType, res.id);
-            r1[resType] = res;
+            r1.addOrUpdate(resources1[i]);
         }
         return r1 == resources2;
     }
@@ -244,6 +265,8 @@
         EXPECT_EQ(client, info.client);
         EXPECT_TRUE(isEqualResources(resources, info.resources));
     }
+
+    bool mNewRM = false;
 };
 
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 8f05b13..027987e 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -77,8 +77,20 @@
     }
 
 public:
-    ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
+    ResourceManagerServiceTest(bool newRM = false) : ResourceManagerServiceTestBase(newRM) {}
 
+    void updateConfig(bool bSupportsMultipleSecureCodecs, bool bSupportsSecureWithNonSecureCodec) {
+        std::vector<MediaResourcePolicyParcel> policies;
+        policies.push_back(
+                MediaResourcePolicy(
+                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
+                        bSupportsMultipleSecureCodecs ? "true" : "false"));
+        policies.push_back(
+                MediaResourcePolicy(
+                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
+                        bSupportsSecureWithNonSecureCodec ? "true" : "false"));
+        mService->config(policies);
+    }
 
     // test set up
     // ---------------------------------------------------------------------------------
@@ -129,7 +141,7 @@
         resources3.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
         mService->addResource(client3Info, mTestClient3, resources3);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(2u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -159,7 +171,7 @@
         // Expected result:
         // 1) the client should have been added;
         // 2) both resource entries should have been rejected, resource list should be empty.
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -197,7 +209,6 @@
 
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, INT64_MIN));
-        expected.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, INT64_MIN));
         mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
@@ -213,29 +224,11 @@
         EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
 
-        std::vector<MediaResourcePolicyParcel> policies1;
-        policies1.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
-                        "true"));
-        policies1.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
-                        "false"));
-        mService->config(policies1);
+        updateConfig(true, false);
         EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_FALSE(mService->mSupportsSecureWithNonSecureCodec);
 
-        std::vector<MediaResourcePolicyParcel> policies2;
-        policies2.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
-                        "false"));
-        policies2.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
-                        "true"));
-        mService->config(policies2);
+        updateConfig(false, true);
         EXPECT_FALSE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
     }
@@ -254,7 +247,7 @@
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         mService->addResource(client1Info, mTestClient1, resources11);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -299,7 +292,7 @@
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         mService->addResource(client1Info, mTestClient1, resources11);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -339,13 +332,12 @@
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(false, true);
 
             // priority too low to reclaim resource
             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
                                         .uid = static_cast<int32_t>(kTestUid1),
-                                        .id = 0,
+                                        .id = kLowPriorityClientId,
                                         .name = "none"};
             CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
 
@@ -374,7 +366,7 @@
                                      .name = "none"};
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -402,7 +394,7 @@
 
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -428,7 +420,7 @@
 
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             mService->markClientForPendingRemoval(client2Info);
 
@@ -466,7 +458,7 @@
                                      .name = "none"};
         mService->removeClient(client2Info);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(2u, map.size());
         const ResourceInfos &infos1 = map.at(kTestPid1);
         const ResourceInfos &infos2 = map.at(kTestPid2);
@@ -483,9 +475,9 @@
         MediaResource resource(MediaResource::Type::kSecureCodec,
                                MediaResource::SubType::kUnspecifiedSubType,
                                1);
-        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
-        ResourceRequestInfo requestInfoMid { kMidPriorityPid, &resource};
-        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoMid { kMidPriorityPid, kMidPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
 
         EXPECT_FALSE(mService->getAllClients_l(requestInfoLow, targetClients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
@@ -499,6 +491,81 @@
         EXPECT_EQ(getId(mTestClient1), targetClients[1].mClientId);
     }
 
+    // test set up
+    // ---------------------------------------------------------------------------
+    //   pid/priority          client/clientId       type               number
+    // ---------------------------------------------------------------------------
+    //   kTestPid1(30)         mTestClient1          secure codec       1
+    //                                               graphic memory     200
+    //                                               graphic memory     200
+    // ---------------------------------------------------------------------------
+    //   kTestPid2(20)         mTestClient2          non-secure codec   1
+    //                                               graphic memory     300
+    //                         ---------------------------------------------------
+    //                         mTestClient3          secure codec       1
+    //                                               graphic memory     100
+    // ---------------------------------------------------------------------------
+    //   kHighPriorityPid(10)  kHighPriorityClient   secure codec       1
+    // ---------------------------------------------------------------------------
+    // The kHighPriorityClient tries to reclaim request (after adding self)
+    // This should pass (and shouldn't be considered as a new client trying to
+    // reclaim from an existing client from same/higher priority process).
+    void testSelfReclaimResourceSecure() {
+        std::vector<MediaResourceParcel> resources;
+        resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+        resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
+
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = kLowPriorityClientId,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = kMidPriorityClientId,
+                                           .name = "none"};
+        // HighPriority process with client id kHighPriorityClientId.
+        ClientInfoParcel highPriorityClient1{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                             .uid = static_cast<int32_t>(kTestUid2),
+                                             .id = kHighPriorityClientId,
+                                             .name = "none"};
+        // HighPriority process with client id 0xABCD.
+        ClientInfoParcel highPriorityClient2{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                             .uid = static_cast<int32_t>(kTestUid2),
+                                             .id = 0xABCD,
+                                             .name = "none"};
+
+        addResource();
+
+        // Add a secure codec resource for the highPriorityClient1.
+        std::shared_ptr<IResourceManagerClient> testClient4 =
+            createTestClient(kHighPriorityPid, kTestUid2);
+        std::vector<MediaResourceParcel> resources1;
+        resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+        mService->addResource(highPriorityClient1, testClient4, resources1);
+
+        // secure codecs can't coexist and secure codec can't coexist with non-secure codec.
+        updateConfig(false, false);
+
+        // priority too low
+        CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+        CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
+
+        // highPriorityClient2 tries to reclaim SecureCodec with Graphic memory.
+        // This should fail as this process already has an instance of secure
+        // codec through testClient4.
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient2, resources, &result));
+
+        // highPriorityClient1 tries to reclaim SecureCodec with Graphic memory.
+        // This should reclaim all secure and non-secure codecs.
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient1, resources, &result));
+        EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
+
+        // Make sure there is nothing left.
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient1, resources, &result));
+    }
+
     void testReclaimResourceSecure() {
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -506,22 +573,21 @@
 
         ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
                                           .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kLowPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
                                            .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kMidPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
 
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(false, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -546,8 +612,7 @@
         // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(false, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -563,12 +628,10 @@
             CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
-
         // ### secure codecs can coexist but secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(true, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -599,8 +662,7 @@
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -630,8 +692,7 @@
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -663,21 +724,21 @@
 
         ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
                                           .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kLowPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
                                            .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kMidPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
 
         // ### secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(true, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -703,7 +764,7 @@
         // ### secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -733,7 +794,7 @@
         // ### secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -764,8 +825,8 @@
         MediaResource resource(MediaResource::Type::kGraphicMemory,
                                MediaResource::SubType::kUnspecifiedSubType,
                                1);
-        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
-        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(requestInfoHigh, clientInfo));
 
         addResource();
@@ -923,7 +984,7 @@
         reclaimResources.push_back(createNonSecureVideoCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -964,7 +1025,7 @@
         reclaimResources.push_back(createNonSecureAudioCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -1005,7 +1066,7 @@
         reclaimResources.push_back(createNonSecureImageCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -1047,7 +1108,7 @@
         reclaimResources.push_back(createGraphicMemoryResource(100));
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -1510,6 +1571,269 @@
              client3Config.width * client3Config.height));
         EXPECT_TRUE(currentPixelCountP2 == 0);
     }
+
+    void addNonSecureVideoCodecResource(std::shared_ptr<IResourceManagerClient>& client,
+                                        std::vector<ClientInfoParcel>& infos) {
+        std::vector<MediaResourceParcel> resources;
+        resources.push_back(createNonSecureVideoCodecResource(1));
+
+        TestClient* testClient = toTestClient(client);
+        ClientInfoParcel clientInfo {.pid = static_cast<int32_t>(testClient->pid()),
+                                     .uid = static_cast<int32_t>(testClient->uid()),
+                                     .id = getId(client),
+                                     .name = "none",
+                                     .importance = testClient->clientImportance()};
+        mService->addResource(clientInfo, client, resources);
+        infos.push_back(clientInfo);
+    }
+
+    bool doReclaimResource(const ClientInfoParcel& clientInfo) {
+        bool result = false;
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureVideoCodecResource(1));
+        bool success = mService->reclaimResource(clientInfo, reclaimResources, &result).isOk();
+        return success && result;
+    }
+
+    // Verifies the resource reclaim policies
+    // - this verifies the reclaim policies based on:
+    //   - process priority (oom score)
+    //   - client priority
+    void testReclaimPolicies() {
+        // Create 3 clients with codec importance high, mid and low for a low
+        // priority pid.
+        std::vector<std::shared_ptr<IResourceManagerClient>> lowPriPidClients;
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kHighestCodecImportance));
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kMidCodecImportance));
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kLowestCodecImportance));
+
+        // Create 3 clients with codec importance high, mid and low for a high
+        // priority pid.
+        std::vector<std::shared_ptr<IResourceManagerClient>> highPriPidClients;
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kHighestCodecImportance));
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kMidCodecImportance));
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kLowestCodecImportance));
+
+        // Add non secure video codec resources for all the 3 clients of low priority pid.
+        std::vector<ClientInfoParcel> lowPriPidClientInfos;
+        for (auto& client : lowPriPidClients) {
+            addNonSecureVideoCodecResource(client, lowPriPidClientInfos);
+        }
+        // Add non secure video codec resources for all the 3 clients of high priority pid.
+        std::vector<ClientInfoParcel> highPriPidClientInfos;
+        for (auto& client : highPriPidClients) {
+            addNonSecureVideoCodecResource(client, highPriPidClientInfos);
+        }
+
+        // 1. Set reclaim policy as "Process Priority".
+        // - A process should be reclaiming from:
+        //    - a lower priority process if there is any
+        //    - else fail.
+        mService->setReclaimPolicy(true /*process priority*/, false /*codec importance*/);
+
+        // 1.A:
+        // - high priority process should be able to reclaim successfully.
+        // - A process should be reclaiming from the low priority process.
+        EXPECT_TRUE(doReclaimResource(highPriPidClientInfos[0]));
+        // Verify that the high priority pid's clients are untouched.
+        bool success = true;
+        for (auto& client : highPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = false;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+        // Verify that the one of the client from the low priority pid has been reclaimed.
+        success = false;
+        for (auto& client : lowPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+
+        // 1.B:
+        // - low priority process should fail to reclaim.
+        EXPECT_FALSE(doReclaimResource(lowPriPidClientInfos[0]));
+
+        // 2. Set reclaim policy as "Client Importance".
+        // - A process should be reclaiming from:
+        //    - a lower priority client from the same process if any
+        //    - else fail.
+        mService->setReclaimPolicy(false /*process priority*/, true /*codec importance*/);
+
+        // 2.A:
+        // - high priority process should be able to reclaim successfully.
+        // - Process should be reclaiming from a lower priority client from the
+        // same process.
+        EXPECT_TRUE(doReclaimResource(highPriPidClientInfos[0]));
+        // Verify that the low priority pid's clients are untouched.
+        success = true;
+        for (auto& client : lowPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = false;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+        // Verify that the one of the low priority client from the high priority
+        // pid has been reclaimed.
+        success = false;
+        for (auto& client : highPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+
+        // 2.B:
+        // - high priority process should be able to reclaim successfully.
+        // - Process should be reclaiming from a lower priority client from the
+        // same process.
+        EXPECT_TRUE(doReclaimResource(lowPriPidClientInfos[0]));
+        // Verify that the high priority pid's clients are untouched.
+        success = true;
+        for (auto& client : highPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = false;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+        // Verify that the one of the low priority client from the low priority
+        // pid has been reclaimed.
+        success = false;
+        for (auto& client : lowPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+
+        // 2.C:
+        // - lowest priority client from high priority process should fail to reclaim.
+        EXPECT_FALSE(doReclaimResource(highPriPidClientInfos[2]));
+
+        // 2.D:
+        // - lowest priority client from low priority process should fail to reclaim.
+        EXPECT_FALSE(doReclaimResource(lowPriPidClientInfos[2]));
+
+        // 3. Set reclaim policy as "Process Priority and Client Importance".
+        // - A process should be reclaiming from:
+        //    - a lower priority process if there is any
+        //    - else a lower priority client from the same process if any
+        //    - else fail.
+        mService->setReclaimPolicy(true /*process priority*/, true /*codec importance*/);
+
+        // Remove all clients from the low priority process so that we have
+        // only one process (with high priority) with all the resources.
+        for (const auto& clientInfo : lowPriPidClientInfos) {
+            mService->removeClient(clientInfo);
+        }
+        lowPriPidClientInfos.clear();
+        lowPriPidClients.clear();
+        // 3.A:
+        // - high priority process should be able to reclaim successfully.
+        EXPECT_TRUE(doReclaimResource(highPriPidClientInfos[0]));
+        // Verify that the one of the client from the high priority pid has been reclaimed.
+        success = false;
+        for (auto& client : highPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+
+        // 3.B, set the policy back to ReclaimPolicyProcessPriority
+        mService->setReclaimPolicy(true /*process priority*/, false /*codec importance*/);
+
+        // Since there is only one process, the reclaim should fail.
+        EXPECT_FALSE(doReclaimResource(highPriPidClientInfos[0]));
+
+        // 4. Set reclaim policy as "Process Priority and Client Importance".
+        // - A process should be reclaiming from:
+        //    - from a lower priority process if there are any
+        //    - else from a lower priority client from the same process if there are any
+        //    - else fail.
+        mService->setReclaimPolicy(true /*process priority*/, true /*codec importance*/);
+
+        // Remove all clients from the high priority process so that we can
+        // start a new/fresh resource allocation.
+        for (const auto& clientInfo : highPriPidClientInfos) {
+            mService->removeClient(clientInfo);
+        }
+        highPriPidClientInfos.clear();
+        highPriPidClients.clear();
+
+        // Create 3 clients with codec importance high for a low priority pid.
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kHighestCodecImportance));
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kHighestCodecImportance));
+        lowPriPidClients.push_back(
+            createTestClient(kLowPriorityPid, kTestUid1, kHighestCodecImportance));
+
+        // Create 3 clients with codec importance low for a high priority pid.
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kLowestCodecImportance));
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kLowestCodecImportance));
+        highPriPidClients.push_back(
+            createTestClient(kHighPriorityPid, kTestUid2, kLowestCodecImportance));
+
+        // Add non secure video codec resources for all the 3 clients of low priority pid.
+        for (auto& client : lowPriPidClients) {
+            addNonSecureVideoCodecResource(client, lowPriPidClientInfos);
+        }
+        // Add non secure video codec resources for all the 3 clients of high priority pid.
+        for (auto& client : highPriPidClients) {
+            addNonSecureVideoCodecResource(client, highPriPidClientInfos);
+        }
+
+        // 4.A:
+        // - high priority process should be able to reclaim successfully.
+        EXPECT_TRUE(doReclaimResource(highPriPidClientInfos[0]));
+        // Since all clients are of same priority with in high priority process,
+        // none of the clients should be reclaimed.
+        success = true;
+        for (auto& client : highPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = false;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+        // Verify that the one of the client from the low priority pid has been reclaimed.
+        success = false;
+        for (auto& client : lowPriPidClients) {
+            if (toTestClient(client)->checkIfReclaimedAndReset()) {
+                success = true;
+                break;
+            }
+        }
+        EXPECT_TRUE(success);
+
+        // 4.B, set the policy back to ReclaimPolicyProcessPriority
+        // If low priority process tries to reclaim, it should fail as there
+        // aren't any lower priority clients or lower priority processes.
+        EXPECT_FALSE(doReclaimResource(lowPriPidClientInfos[0]));
+    }
+};
+
+class ResourceManagerServiceNewTest : public ResourceManagerServiceTest {
+public:
+    ResourceManagerServiceNewTest(bool newRM = true) : ResourceManagerServiceTest(newRM) {}
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
@@ -1536,6 +1860,10 @@
     testRemoveClient();
 }
 
+TEST_F(ResourceManagerServiceTest, selfReclaimResource) {
+    testSelfReclaimResourceSecure();
+}
+
 TEST_F(ResourceManagerServiceTest, reclaimResource) {
     testReclaimResourceSecure();
     testReclaimResourceNonSecure();
@@ -1598,4 +1926,99 @@
     testConcurrentCodecs();
 }
 
+/////// test cases for ResourceManagerServiceNew ////
+TEST_F(ResourceManagerServiceNewTest, config) {
+    testConfig();
+}
+
+TEST_F(ResourceManagerServiceNewTest, addResource) {
+    addResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, combineResource) {
+    testCombineResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, combineResourceNegative) {
+    testCombineResourceWithNegativeValues();
+}
+
+TEST_F(ResourceManagerServiceNewTest, removeResource) {
+    testRemoveResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, removeClient) {
+    testRemoveClient();
+}
+
+TEST_F(ResourceManagerServiceNewTest, selfReclaimResource) {
+    testSelfReclaimResourceSecure();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResource) {
+    testReclaimResourceSecure();
+    testReclaimResourceNonSecure();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getAllClients_l) {
+    testGetAllClients();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getLowestPriorityBiggestClient_l) {
+    testGetLowestPriorityBiggestClient();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getLowestPriorityPid_l) {
+    testGetLowestPriorityPid();
+}
+
+TEST_F(ResourceManagerServiceNewTest, isCallingPriorityHigher_l) {
+    testIsCallingPriorityHigher();
+}
+
+TEST_F(ResourceManagerServiceNewTest, batteryStats) {
+    testBatteryStats();
+}
+
+TEST_F(ResourceManagerServiceNewTest, cpusetBoost) {
+    testCpusetBoost();
+}
+
+TEST_F(ResourceManagerServiceNewTest, overridePid) {
+    testOverridePid();
+}
+
+TEST_F(ResourceManagerServiceNewTest, markClientForPendingRemoval) {
+    testMarkClientForPendingRemoval();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withVideoCodec_reclaimsOnlyVideoCodec) {
+    testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withAudioCodec_reclaimsOnlyAudioCodec) {
+    testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withImageCodec_reclaimsOnlyImageCodec) {
+    testReclaimResources_withImageCodec_reclaimsOnlyImageCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_whenPartialResourceMatch_reclaims) {
+    testReclaimResources_whenPartialResourceMatch_reclaims();
+}
+
+TEST_F(ResourceManagerServiceNewTest,
+        reclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources) {
+    testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
+}
+
+TEST_F(ResourceManagerServiceNewTest, concurrentCodecs) {
+    testConcurrentCodecs();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimPolicies) {
+    testReclaimPolicies();
+}
+
 } // namespace android
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 6b48075..5b4fca9 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -37,6 +37,8 @@
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointMMAP.h"
 
+#include <com_android_media_aaudio.h>
+
 #define AAUDIO_BUFFER_CAPACITY_MIN    (4 * 512)
 #define AAUDIO_SAMPLE_RATE_DEFAULT    48000
 
@@ -148,9 +150,15 @@
 
         // Try other formats if the config from APM is the same as our current config.
         // Some HALs may report its format support incorrectly.
-        if ((previousConfig.format == config.format) &&
-                (previousConfig.sample_rate == config.sample_rate)) {
-            config.format = getNextFormatToTry(config.format);
+        if (previousConfig.format == config.format) {
+            if (previousConfig.sample_rate == config.sample_rate) {
+                config.format = getNextFormatToTry(config.format);
+            } else if (!com::android::media::aaudio::sample_rate_conversion()) {
+                ALOGI("%s() - AAudio SRC feature not enabled, different rates! %d != %d",
+                      __func__, previousConfig.sample_rate, config.sample_rate);
+                result = AAUDIO_ERROR_INVALID_RATE;
+                break;
+            }
         }
 
         ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5fb152e..dc70c79 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -75,11 +75,7 @@
                         this, getState());
 
     // Stop the command thread before destroying.
-    if (mThreadEnabled) {
-        mThreadEnabled = false;
-        mCommandQueue.stopWaiting();
-        mCommandThread.stop();
-    }
+    stopCommandThread();
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
@@ -194,26 +190,27 @@
 
 error:
     closeAndClear();
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close() {
     aaudio_result_t result = sendCommand(CLOSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+    if (result == AAUDIO_ERROR_ALREADY_CLOSED) {
+        // AAUDIO_ERROR_ALREADY_CLOSED is not a really error but just indicate the stream has
+        // already been closed. In that case, there is no need to close the stream once more.
+        ALOGD("The stream(%d) is already closed", mHandle);
+        return AAUDIO_OK;
+    }
 
-    // Stop the command thread as the stream is closed.
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
 
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close_l() {
     if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
-        return AAUDIO_OK;
+        return AAUDIO_ERROR_ALREADY_CLOSED;
     }
 
     // This will stop the stream, just in case it was not already stopped.
@@ -643,7 +640,7 @@
     int32_t count = mUpMessageQueue->getFifoBuffer()->write(command, 1);
     if (count != 1) {
         ALOGW("%s(): Queue full. Did client stop? Suspending stream. what = %u, %s",
-              __func__, command->what, getTypeText());
+              __func__, static_cast<unsigned>(command->what), getTypeText());
         setSuspended(true);
         return AAUDIO_ERROR_WOULD_BLOCK;
     } else {
@@ -766,3 +763,11 @@
         .record();
     return result;
 }
+
+void AAudioServiceStreamBase::stopCommandThread() {
+    bool threadEnabled = true;
+    if (mThreadEnabled.compare_exchange_strong(threadEnabled, false)) {
+        mCommandQueue.stopWaiting();
+        mCommandThread.stop();
+    }
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index d5061b3..96a6d44 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -360,7 +360,7 @@
         EXIT_STANDBY,
     };
     AAudioThread            mCommandThread;
-    std::atomic<bool>       mThreadEnabled{false};
+    std::atomic_bool        mThreadEnabled{false};
     AAudioCommandQueue      mCommandQueue;
 
     int32_t                 mFramesPerBurst = 0;
@@ -400,6 +400,8 @@
                                 bool waitForReply = false,
                                 int64_t timeoutNanos = 0);
 
+    void stopCommandThread();
+
     aaudio_result_t closeAndClear();
 
     /**
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 9fe06b7..12ce17f 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -97,6 +97,7 @@
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
+        "com.android.media.aaudio-aconfig-cc",
     ],
 
     static_libs: [
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 0230935..31ed8ac 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -51,6 +52,7 @@
         "aaudio-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "com.android.media.aaudio-aconfig-cc",
     ],
     static_libs: [
         "libaaudioservice",
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index cd3e579..63ae4c0 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index 1cc3b2b..d58143e 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -16,8 +16,8 @@
 
 
 # tunables
-DEV_BRANCH=master
-MAINLINE_BRANCH=tm-mainline-prod
+DEV_BRANCH=main
+MAINLINE_BRANCH=udc-mainline-prod
 
 ###
 RED=$(tput setaf 1)