Merge "Use String8/16 c_str [camera]" into main am: 0188ed1780 am: 2e85fba8db am: a3711f8c31 am: 1f6df7ab95

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2717293

Change-Id: Ia172d13fa44b62b914869665a365e42c14443f25
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Android.bp b/camera/Android.bp
index b3f70f4..a3fd7f9 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -144,6 +144,7 @@
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
+        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
new file mode 100644
index 0000000..453f696
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraIdRemapping.aidl
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Specifies a remapping of Camera Ids.
+ *
+ * Example: For a given package, a remapping of camera id0 to id1 specifies
+ * that any operation to perform on id0 should instead be performed on id1.
+ *
+ * @hide
+ */
+parcelable CameraIdRemapping {
+    /**
+     * Specifies remapping of Camera Ids per package.
+     */
+    parcelable PackageIdRemapping {
+        /** Package Name (e.g. com.android.xyz). */
+        @utf8InCpp String packageName;
+        /**
+         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
+         * affected.
+         */
+        @utf8InCpp List<String> cameraIdsToReplace;
+        /**
+         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
+         *  the updated camera id for cameraIdsToReplace[i].
+         */
+        @utf8InCpp List<String> updatedCameraIds;
+    }
+
+    /**
+     * List of Camera Id remappings to perform.
+     */
+    List<PackageIdRemapping> packageIdRemappings;
+}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ed37b2d..409a930 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -29,6 +29,7 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
+import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -131,6 +132,22 @@
             int targetSdkVersion);
 
     /**
+     * Remap Camera Ids in the CameraService.
+     *
+     * Once this is in effect, all binder calls in the ICameraService that
+     * use logicalCameraId should consult remapping state to arrive at the
+     * correct cameraId to perform the operation on.
+     *
+     * Note: Before the new cameraIdRemapping state is applied, the previous
+     * state is cleared.
+     *
+     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
+     *        cameraIdRemapping object will result in clearing of any previous
+     *        cameraIdRemapping state in the camera service.
+     */
+    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+
+    /**
      * Remove listener for changes to camera device and flashlight state.
      */
     void removeListener(ICameraServiceListener listener);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 86fd8ab..a75ce70 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -46,6 +46,7 @@
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
@@ -672,8 +673,7 @@
     }
 
     void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) override {
-        mCodec->mCallback->onOutputFramesRendered(
-                {RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
+        mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
     }
 
     void onOutputBuffersChanged() override {
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 6cb22ef..d80b6bf 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -55,6 +55,30 @@
      */
     oneway void setCsdEnabled(boolean enabled);
 
+    /**
+     * Structure containing a device identifier by address and type together with
+     * the categorization whether it is a headphone or not.
+     */
+    @JavaDerive(toString = true)
+    parcelable AudioDeviceCategory {
+        @utf8InCpp String address;
+        int internalAudioType;
+        boolean csdCompatible;
+    }
+
+    /**
+     * Resets the list of stored device categories for the native layer. Should
+     * only be called once at boot time after parsing the existing AudioDeviceCategories.
+     */
+    oneway void initCachedAudioDeviceCategories(in AudioDeviceCategory[] audioDevices);
+
+    /**
+     * Sets whether a device for a given address and type is a headphone or not.
+     * This is used to determine whether we compute the CSD on the given device
+     * since we can not rely completely on the device annotations.
+     */
+    oneway void setAudioDeviceCategory(in AudioDeviceCategory audioDevice);
+
     /* -------------------------- Test API methods --------------------------
     /** Get the currently used RS2 upper bound. */
     float getOutputRs2UpperBound();
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d0d08d..a1726ad 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -223,8 +223,7 @@
         deltaSamples = kMaxCaptureBufSize;
     }
 
-    int32_t capturePoint;
-    //capturePoint = (int32_t)mCaptureIdx - deltaSamples;
+    int32_t capturePoint, captureSamples = mCaptureSamples;
     __builtin_sub_overflow((int32_t) mCaptureIdx, deltaSamples, &capturePoint);
     // a negative capturePoint means we wrap the buffer.
     if (capturePoint < 0) {
@@ -232,13 +231,14 @@
         if (size > mCaptureSamples) {
             size = mCaptureSamples;
         }
+        // first part of two stages copy, capture to the end of buffer and reset the size/point
         result.insert(result.end(), &mCaptureBuf[kMaxCaptureBufSize + capturePoint],
                         &mCaptureBuf[kMaxCaptureBufSize + capturePoint + size]);
-        mCaptureSamples -= size;
+        captureSamples -= size;
         capturePoint = 0;
     }
     result.insert(result.end(), &mCaptureBuf[capturePoint],
-                    &mCaptureBuf[capturePoint + mCaptureSamples]);
+                  &mCaptureBuf[capturePoint + captureSamples]);
     mLastCaptureIdx = mCaptureIdx;
     return result;
 }
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a91b24a..2223f24 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -43,6 +43,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/MediaBufferHolder.h>
@@ -64,11 +65,14 @@
 #include "include/SharedMemoryBuffer.h"
 #include <media/stagefright/omx/OMXUtils.h>
 
+#include <server_configurable_flags/get_flags.h>
+
 namespace android {
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
 
 using hardware::media::omx::V1_0::Status;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 enum {
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
@@ -81,6 +85,11 @@
 
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -563,6 +572,9 @@
 ACodec::ACodec()
     : mSampleRate(0),
       mNodeGeneration(0),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
+      mIsWindowToDisplay(false),
+      mHasPresentFenceTimes(false),
       mUsingNativeWindow(false),
       mNativeWindowUsageBits(0),
       mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
@@ -634,7 +646,8 @@
     if (!mBufferChannel) {
         mBufferChannel = std::make_shared<ACodecBufferChannel>(
                 new AMessage(kWhatInputBufferFilled, this),
-                new AMessage(kWhatOutputBufferDrained, this));
+                new AMessage(kWhatOutputBufferDrained, this),
+                new AMessage(kWhatPollForRenderedBuffers, this));
     }
     return mBufferChannel;
 }
@@ -744,6 +757,7 @@
     // if we have not yet started the codec, we can simply set the native window
     if (mBuffers[kPortIndexInput].size() == 0) {
         mNativeWindow = surface;
+        initializeFrameTracking();
         return OK;
     }
 
@@ -852,6 +866,7 @@
 
     mNativeWindow = nativeWindow;
     mNativeWindowUsageBits = usageBits;
+    initializeFrameTracking();
     return OK;
 }
 
@@ -962,7 +977,6 @@
                 BufferInfo info;
                 info.mStatus = BufferInfo::OWNED_BY_US;
                 info.mFenceFd = -1;
-                info.mRenderInfo = NULL;
                 info.mGraphicBuffer = NULL;
                 info.mNewGraphicBuffer = false;
 
@@ -1230,6 +1244,7 @@
 
     *bufferCount = def.nBufferCountActual;
     *bufferSize =  def.nBufferSize;
+    initializeFrameTracking();
     return err;
 }
 
@@ -1268,7 +1283,6 @@
         info.mStatus = BufferInfo::OWNED_BY_US;
         info.mFenceFd = fenceFd;
         info.mIsReadFence = false;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = graphicBuffer;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1345,7 +1359,6 @@
         BufferInfo info;
         info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
         info.mFenceFd = -1;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = NULL;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1441,42 +1454,6 @@
     return err;
 }
 
-void ACodec::updateRenderInfoForDequeuedBuffer(
-        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
-
-    info->mRenderInfo =
-        mRenderTracker.updateInfoForDequeuedBuffer(
-                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
-
-    // check for any fences already signaled
-    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
-}
-
-void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
-        mRenderTracker.dumpRenderQueue();
-    }
-}
-
-void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
-    std::list<FrameRenderTracker::Info> done =
-        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
-
-    // unlink untracked frames
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
-        ssize_t index = it->getIndex();
-        if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
-            mBuffers[kPortIndexOutput][index].mRenderInfo = NULL;
-        } else if (index >= 0) {
-            // THIS SHOULD NEVER HAPPEN
-            ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
-        }
-    }
-
-    mCallback->onOutputFramesRendered(done);
-}
-
 void ACodec::onFirstTunnelFrameReady() {
     mCallback->onFirstTunnelFrameReady();
 }
@@ -1531,7 +1508,6 @@
 
                 info->mStatus = BufferInfo::OWNED_BY_US;
                 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
-                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
                 return info;
             }
         }
@@ -1576,18 +1552,96 @@
     oldest->mNewGraphicBuffer = true;
     oldest->mStatus = BufferInfo::OWNED_BY_US;
     oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
-    mRenderTracker.untrackFrame(oldest->mRenderInfo);
-    oldest->mRenderInfo = NULL;
 
     ALOGV("replaced oldest buffer #%u with age %u, graphicBuffer %p",
             (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
             mDequeueCounter - oldest->mDequeuedAt,
             oldest->mGraphicBuffer->handle);
-
-    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
     return oldest;
 }
 
+void ACodec::initializeFrameTracking() {
+    mTrackedFrames.clear();
+
+    int isWindowToDisplay = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER,
+            &isWindowToDisplay);
+    mIsWindowToDisplay = isWindowToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsWindowToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
+    int hasPresentFenceTimes = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT,
+            &hasPresentFenceTimes);
+    mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+    if (!mHasPresentFenceTimes) {
+        ALOGI("Using latch times for frame rendered signals - present fences not supported");
+    }
+
+    status_t err = native_window_enable_frame_timestamps(mNativeWindow.get(), true);
+    if (err) {
+        ALOGE("Failed to enable frame timestamps (%d)", err);
+    }
+}
+
+void ACodec::trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+    // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+    // so track the frame as if the desired render time is now.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (desiredRenderTimeNs < nowNs) {
+        desiredRenderTimeNs = nowNs;
+    }
+    // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+    // actually rendered.
+    TrackedFrame frame;
+    frame.id = frameId;
+    frame.mediaTimeUs = mediaTimeUs;
+    frame.desiredRenderTimeNs = desiredRenderTimeNs;
+    mTrackedFrames.push_back(frame);
+}
+
+void ACodec::pollForRenderedFrames() {
+    std::list<RenderedFrameInfo> renderedFrameInfos;
+    // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+    // in fact, been rendered.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    while (!mTrackedFrames.empty()) {
+        TrackedFrame & frame = mTrackedFrames.front();
+        // Frames that should have been rendered at least 100ms in the past are checked
+        if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+            break;
+        }
+
+        status_t err;
+        nsecs_t latchOrPresentTimeNs = NATIVE_WINDOW_TIMESTAMP_INVALID;
+        err = native_window_get_frame_timestamps(mNativeWindow.get(), frame.id,
+                /* outRequestedPresentTime */ nullptr, /* outAcquireTime */ nullptr,
+                mHasPresentFenceTimes ? nullptr : &latchOrPresentTimeNs, // latch time
+                /* outFirstRefreshStartTime */ nullptr, /* outLastRefreshStartTime */ nullptr,
+                /* outGpuCompositionDoneTime */ nullptr,
+                mHasPresentFenceTimes ? &latchOrPresentTimeNs : nullptr, // display present time,
+                /* outDequeueReadyTime */ nullptr, /* outReleaseTime */ nullptr);
+        if (err) {
+            ALOGE("Failed to get frame timestamps for %lld: %d", (long long) frame.id, err);
+        }
+        // If we don't have a render time by now, then consider the frame as dropped
+        if (latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_PENDING &&
+            latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_INVALID) {
+            renderedFrameInfos.push_back(RenderedFrameInfo(frame.mediaTimeUs,
+                                                           latchOrPresentTimeNs));
+        }
+
+        mTrackedFrames.pop_front();
+    }
+
+    if (!renderedFrameInfos.empty()) {
+        mCallback->onOutputFramesRendered(renderedFrameInfos);
+    }
+}
+
 status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
     if (portIndex == kPortIndexInput) {
         mBufferChannel->setInputBufferArray({});
@@ -1663,11 +1717,6 @@
         ::close(info->mFenceFd);
     }
 
-    if (portIndex == kPortIndexOutput) {
-        mRenderTracker.untrackFrame(info->mRenderInfo, i);
-        info->mRenderInfo = NULL;
-    }
-
     // remove buffer even if mOMXNode->freeBuffer fails
     mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
     return err;
@@ -6032,22 +6081,10 @@
     sp<RefBase> obj;
     CHECK(msg->findObject("messages", &obj));
     sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
-
-    bool receivedRenderedEvents = false;
     for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
           it != msgList->getList().cend(); ++it) {
         (*it)->setWhat(ACodec::kWhatOMXMessageItem);
         mCodec->handleMessage(*it);
-        int32_t type;
-        CHECK((*it)->findInt32("type", &type));
-        if (type == omx_message::FRAME_RENDERED) {
-            receivedRenderedEvents = true;
-        }
-    }
-
-    if (receivedRenderedEvents) {
-        // NOTE: all buffers are rendered in this case
-        mCodec->notifyOfRenderedFrames();
     }
     return true;
 }
@@ -6609,15 +6646,6 @@
     info->mDequeuedAt = ++mCodec->mDequeueCounter;
     info->mStatus = BufferInfo::OWNED_BY_US;
 
-    if (info->mRenderInfo != NULL) {
-        // The fence for an emptied buffer must have signaled, but there still could be queued
-        // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
-        // as we will soon requeue this buffer to the surface. While in theory we could still keep
-        // track of buffers that are requeued to the surface, it is better to add support to the
-        // buffer-queue to notify us of released buffers and their fences (in the future).
-        mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
-    }
-
     // byte buffers cannot take fences, so wait for any fence now
     if (mCodec->mNativeWindow == NULL) {
         (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
@@ -6824,14 +6852,6 @@
             mCodec->mLastHdr10PlusBuffer = hdr10PlusInfo;
         }
 
-        // save buffers sent to the surface so we can get render time when they return
-        int64_t mediaTimeUs = -1;
-        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-        if (mediaTimeUs >= 0) {
-            mCodec->mRenderTracker.onFrameQueued(
-                    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
-        }
-
         int64_t timestampNs = 0;
         if (!msg->findInt64("timestampNs", &timestampNs)) {
             // use media timestamp if client did not request a specific render timestamp
@@ -6845,11 +6865,25 @@
         err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
         ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
 
+        uint64_t frameId;
+        err = native_window_get_next_frame_id(mCodec->mNativeWindow.get(), &frameId);
+
         info->checkReadFence("onOutputBufferDrained before queueBuffer");
         err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
-        // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
-        // buffers, the frame event history delta is retrieved.
+
+        int64_t mediaTimeUs = -1;
+        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
+        if (mCodec->mAreRenderMetricsEnabled && mCodec->mIsWindowToDisplay) {
+            mCodec->trackReleasedFrame(frameId, mediaTimeUs, timestampNs);
+            mCodec->pollForRenderedFrames();
+        } else {
+            // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+            // when the frame is queued to the non-display surface
+            mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs,
+                                                                         timestampNs)});
+        }
+
         info->mFenceFd = -1;
         if (err == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -7076,7 +7110,6 @@
     ++mCodec->mNodeGeneration;
 
     mCodec->mComponentName = componentName;
-    mCodec->mRenderTracker.setComponentName(componentName);
     mCodec->mFlags = 0;
 
     if (componentName.endsWith(".secure")) {
@@ -7713,7 +7746,6 @@
 
 void ACodec::ExecutingState::stateEntered() {
     ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
-    mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
     mCodec->processDeferredMessages();
 }
 
@@ -7824,7 +7856,15 @@
                     mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
                 }
             }
-            return true;
+            handled = true;
+            break;
+        }
+
+        case kWhatPollForRenderedBuffers:
+        {
+            mCodec->pollForRenderedFrames();
+            handled = true;
+            break;
         }
 
         default:
@@ -8520,7 +8560,7 @@
 }
 
 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8694,7 +8734,7 @@
 
 bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
         int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8725,10 +8765,6 @@
                             OMX_CommandPortEnable, kPortIndexOutput);
                 }
 
-                // Clear the RenderQueue in which queued GraphicBuffers hold the
-                // actual buffer references in order to free them early.
-                mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
                 if (err == OK) {
                     err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
                     ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -9112,8 +9148,6 @@
         // the native window for rendering. Let's get those back as well.
         mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
 
-        mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
         mCodec->mCallback->onFlushCompleted();
 
         mCodec->mPortEOS[kPortIndexInput] =
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 8f2bed2..ad42813 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/ACodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/MediaCodecBuffer.h>
 #include <system/window.h>
@@ -87,9 +88,11 @@
 }
 
 ACodecBufferChannel::ACodecBufferChannel(
-        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained)
+        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+        const sp<AMessage> &pollForRenderedBuffers)
     : mInputBufferFilled(inputBufferFilled),
       mOutputBufferDrained(outputBufferDrained),
+      mPollForRenderedBuffers(pollForRenderedBuffers),
       mHeapSeqNum(-1) {
 }
 
@@ -488,7 +491,7 @@
 }
 
 void ACodecBufferChannel::pollForRenderedBuffers() {
-    // TODO(b/266211548): Poll the native window for rendered buffers.
+    mPollForRenderedBuffers->post();
 }
 
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 91286b9..ea24126 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -79,6 +79,7 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
@@ -210,6 +211,7 @@
 // Render metrics
 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
@@ -879,7 +881,7 @@
             const sp<AMessage> &outputFormat) override;
     virtual void onInputSurfaceDeclined(status_t err) override;
     virtual void onSignaledInputEOS(status_t err) override;
-    virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
+    virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
     virtual void onOutputBuffersChanged() override;
     virtual void onFirstTunnelFrameReady() override;
 private:
@@ -988,7 +990,7 @@
     notify->post();
 }
 
-void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
+void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
     sp<AMessage> notify(mNotify->dup());
     notify->setInt32("what", kWhatOutputFramesRendered);
     if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
@@ -1297,6 +1299,7 @@
         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
         if (m.frameReleasedCount > 0) {
             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
@@ -6087,12 +6090,10 @@
     return onQueueInputBuffer(msg);
 }
 
-//static
-size_t MediaCodec::CreateFramesRenderedMessage(
-        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+template<typename T>
+static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
     size_t index = 0;
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
+    for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
         if (it->getRenderTimeNs() < 0) {
             continue; // dropped frame from tracking
         }
@@ -6103,6 +6104,18 @@
     return index;
 }
 
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
     size_t index;
     CHECK(msg->findSize("index", &index));
@@ -6194,7 +6207,9 @@
             // presentation timestamp is used instead, which almost certainly occurs in the past,
             // since it's almost always a zero-based offset from the start of the stream. In these
             // scenarios, we expect the frame to be rendered with no delay.
-            int64_t delayUs = noRenderTime ? 0 : renderTimeNs / 1000 - ALooper::GetNowUs();
+            int64_t nowUs = ALooper::GetNowUs();
+            int64_t renderTimeUs = renderTimeNs / 1000;
+            int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
             delayUs += 100 * 1000; /* 100ms in microseconds */
             status_t err =
                     mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index fbd8577..e920bd1 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -455,6 +455,8 @@
     if (mMetrics.firstRenderTimeUs == 0) {
         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
     }
+    // Capture the timestamp at which the last frame was rendered
+    mMetrics.lastRenderTimeUs = actualRenderTimeUs;
 
     mMetrics.frameRenderedCount++;
 
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 903280f..a464504 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -29,6 +29,7 @@
 #include <media/IOMX.h>
 
 namespace android {
+ struct ACodec;
 namespace hardware {
 class HidlMemory;
 };
@@ -63,7 +64,8 @@
     };
 
     ACodecBufferChannel(
-            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained);
+            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+            const sp<AMessage> &pollForRenderedBuffers);
     virtual ~ACodecBufferChannel();
 
     // BufferChannelBase interface
@@ -138,6 +140,7 @@
 
     const sp<AMessage> mInputBufferFilled;
     const sp<AMessage> mOutputBufferDrained;
+    const sp<AMessage> mPollForRenderedBuffers;
 
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index e535d5d..f876bc6 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -19,7 +19,7 @@
 
 #include <set>
 #include <stdint.h>
-#include <list>
+#include <deque>
 #include <vector>
 #include <android/native_window.h>
 #include <media/hardware/MetadataBufferType.h>
@@ -27,9 +27,9 @@
 #include <media/IOMX.h>
 #include <media/stagefright/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
-#include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/SkipCutBuffer.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/NativeHandle.h>
 #include <OMX_Audio.h>
 #include <hardware/gralloc.h>
@@ -156,6 +156,7 @@
         kWhatForceStateTransition    = 'fstt',
         kWhatCheckIfStuck            = 'Cstk',
         kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
+        kWhatPollForRenderedBuffers  = 'pfrb',
     };
 
     enum {
@@ -177,6 +178,13 @@
                             | static_cast<uint64_t>(BufferUsage::VIDEO_DECODER),
     };
 
+    struct TrackedFrame {
+        int64_t id;
+        int64_t mediaTimeUs;
+        int64_t desiredRenderTimeNs;
+        nsecs_t renderTimeNs;
+    };
+
     struct BufferInfo {
         enum Status {
             OWNED_BY_US,
@@ -204,7 +212,6 @@
         sp<GraphicBuffer> mGraphicBuffer;
         bool mNewGraphicBuffer;
         int mFenceFd;
-        FrameRenderTracker::Info *mRenderInfo;
 
         // The following field and 4 methods are used for debugging only
         bool mIsReadFence;
@@ -251,6 +258,11 @@
     int32_t mNodeGeneration;
     sp<TAllocator> mAllocator[2];
 
+    std::deque<TrackedFrame> mTrackedFrames; // render information for buffers sent to a window
+    bool mAreRenderMetricsEnabled;
+    bool mIsWindowToDisplay;
+    bool mHasPresentFenceTimes;
+
     bool mUsingNativeWindow;
     sp<ANativeWindow> mNativeWindow;
     int mNativeWindowUsageBits;
@@ -267,7 +279,6 @@
     // format updates. This will equal to mOutputFormat until the first actual frame is received.
     sp<AMessage> mBaseOutputFormat;
 
-    FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
     std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
@@ -349,6 +360,10 @@
     status_t freeOutputBuffersNotOwnedByComponent();
     BufferInfo *dequeueBufferFromNativeWindow();
 
+    void initializeFrameTracking();
+    void trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+    void pollForRenderedFrames();
+
     inline bool storingMetadataInDecodedBuffers() {
         return (mPortMode[kPortIndexOutput] == IOMX::kPortModeDynamicANWBuffer) && !mIsEncoder;
     }
@@ -571,21 +586,6 @@
     void processDeferredMessages();
 
     void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
-    // called when we have dequeued a buffer |buf| from the native window to track render info.
-    // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
-    // stored.
-    void updateRenderInfoForDequeuedBuffer(
-            ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
-
-    // Checks to see if any frames have rendered up until |until|, and to notify client
-    // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
-    // unrendered frame. These frames are removed from the render queue.
-    // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
-    // queue, allowing all rendered framed up till then to be notified of.
-    // (This will effectively clear the render queue up-until (and including) |until|.)
-    // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
-    void notifyOfRenderedFrames(
-            bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
     void onFirstTunnelFrameReady();
 
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 916d41e..90347f9 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -41,7 +41,7 @@
 struct BufferProducerWrapper;
 class MediaCodecBuffer;
 struct PersistentSurface;
-struct RenderedFrameInfo;
+class RenderedFrameInfo;
 class Surface;
 struct ICrypto;
 class IMemory;
diff --git a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
index c14755a..cab7ecc 100644
--- a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
+++ b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
@@ -32,61 +32,59 @@
 
 namespace android {
 
-// Tracks the render information about a frame. Frames go through several states while
-// the render information is tracked:
-//
-// 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
-// queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
-// Key characteristics: mFence is not NULL and mIndex is negative.
-//
-// 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
-// Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
-// invalid.
-//
-// 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
-// Key characteristics: mFence is NULL.
-//
-struct RenderedFrameInfo {
-    // set by client during onFrameQueued or onFrameRendered
-    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
-
-    // -1 if frame is not yet rendered
-    nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
-
-    // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
-    ssize_t getIndex() const        { return mIndex; }
-
-    // creates information for a queued frame
-    RenderedFrameInfo(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
-            const sp<Fence> &fence)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(-1),
-          mIndex(-1),
-          mGraphicBuffer(graphicBuffer),
-          mFence(fence) {
-    }
-
-    // creates information for a frame rendered on a tunneled surface
-    RenderedFrameInfo(int64_t mediaTimeUs, nsecs_t renderTimeNs)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(renderTimeNs),
-          mIndex(-1),
-          mGraphicBuffer(NULL),
-          mFence(NULL) {
-    }
-
-private:
-    int64_t mMediaTimeUs;
-    nsecs_t mRenderTimeNs;
-    ssize_t mIndex;         // to be used by client
-    sp<GraphicBuffer> mGraphicBuffer;
-    sp<Fence> mFence;
-
-    friend struct FrameRenderTracker;
-};
-
 struct FrameRenderTracker {
-    typedef RenderedFrameInfo Info;
+    // Tracks the render information about a frame. Frames go through several states while
+    // the render information is tracked:
+    //
+    // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+    // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+    // Key characteristics: mFence is not NULL and mIndex is negative.
+    //
+    // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+    // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+    // invalid.
+    //
+    // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+    // Key characteristics: mFence is NULL.
+    //
+    struct Info {
+        // set by client during onFrameQueued or onFrameRendered
+        int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+
+        // -1 if frame is not yet rendered
+        nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+        // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+        ssize_t getIndex() const        { return mIndex; }
+
+        // creates information for a queued frame
+        Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
+                const sp<Fence> &fence)
+          : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(-1),
+            mIndex(-1),
+            mGraphicBuffer(graphicBuffer),
+            mFence(fence) {
+        }
+
+        // creates information for a frame rendered on a tunneled surface
+        Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+            : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(renderTimeNs),
+            mIndex(-1),
+            mGraphicBuffer(NULL),
+            mFence(NULL) {
+        }
+
+    private:
+        int64_t mMediaTimeUs;
+        nsecs_t mRenderTimeNs;
+        ssize_t mIndex;         // to be used by client
+        sp<GraphicBuffer> mGraphicBuffer;
+        sp<Fence> mFence;
+
+        friend struct FrameRenderTracker;
+    };
 
     FrameRenderTracker();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index bc0f6c5..ceba7d7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -64,6 +64,7 @@
 class MediaCodecBuffer;
 class IMemory;
 struct PersistentSurface;
+class RenderedFrameInfo;
 class SoftwareRenderer;
 class Surface;
 namespace hardware {
@@ -281,6 +282,8 @@
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
     static size_t CreateFramesRenderedMessage(
+            const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg);
+    static size_t CreateFramesRenderedMessage(
             const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg);
 
     static status_t CanFetchLinearBlock(
diff --git a/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
new file mode 100644
index 0000000..4b8a58d
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RENDERED_FRAME_INFO_H
+#define RENDERED_FRAME_INFO_H
+
+namespace android {
+
+class RenderedFrameInfo {
+public:
+    RenderedFrameInfo(int64_t mediaTimeUs, int64_t renderTimeNs)
+        : mMediaTimeUs(mediaTimeUs), mRenderTimeNs(renderTimeNs) {}
+
+    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+    nsecs_t getRenderTimeNs() const { return mRenderTimeNs;}
+
+private:
+    int64_t mMediaTimeUs;
+    nsecs_t mRenderTimeNs;
+};
+
+} // android
+
+#endif // RENDERED_FRAME_INFO_H
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index 82ba81c..a656e6e 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -38,6 +38,9 @@
     // The render time of the first video frame.
     int64_t firstRenderTimeUs;
 
+    // The render time of the last video frame.
+    int64_t lastRenderTimeUs;
+
     // The number of frames released to be rendered.
     int64_t frameReleasedCount;
 
diff --git a/media/utils/BatteryNotifier.cpp b/media/utils/BatteryNotifier.cpp
index 09bc042..7762c24 100644
--- a/media/utils/BatteryNotifier.cpp
+++ b/media/utils/BatteryNotifier.cpp
@@ -85,8 +85,8 @@
 
 void BatteryNotifier::noteStopAudio(uid_t uid) {
     Mutex::Autolock _l(mLock);
-    if (mAudioRefCounts.find(uid) == mAudioRefCounts.end()) {
-        ALOGW("%s: audio refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
+    if (mAudioRefCounts.find(uid) == mAudioRefCounts.end() || (mAudioRefCounts[uid] == 0)) {
+        ALOGE("%s: audio refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
         return;
     }
 
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index 3812d7a..73bed4a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -68,6 +68,38 @@
     sp<IBatteryStats> getBatteryService_l();
 };
 
+namespace mediautils {
+class BatteryStatsAudioHandle {
+  public:
+    static constexpr uid_t INVALID_UID = static_cast<uid_t>(-1);
+
+    explicit BatteryStatsAudioHandle(uid_t uid) : mUid(uid) {
+        if (uid != INVALID_UID) {
+            BatteryNotifier::getInstance().noteStartAudio(mUid);
+        }
+    }
+
+    BatteryStatsAudioHandle(BatteryStatsAudioHandle&& other) : mUid(other.mUid) {
+        other.mUid = INVALID_UID;
+    }
+
+    BatteryStatsAudioHandle(const BatteryStatsAudioHandle& other) = delete;
+
+    BatteryStatsAudioHandle& operator=(const BatteryStatsAudioHandle& other) = delete;
+
+    BatteryStatsAudioHandle& operator=(BatteryStatsAudioHandle&& other) = delete;
+
+    ~BatteryStatsAudioHandle() {
+        if (mUid != INVALID_UID) {
+            BatteryNotifier::getInstance().noteStopAudio(mUid);
+        }
+    }
+
+  private:
+    // Logically const
+    uid_t mUid = INVALID_UID;
+};
+}  // namespace mediautils
 }  // namespace android
 
 #endif // MEDIA_BATTERY_NOTIFIER_H
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index a3575c9..69a6faa 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -3825,7 +3825,7 @@
         patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
         patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
     }
-    track->setTeePatchesToUpdate(std::move(teePatches));
+    track->setTeePatchesToUpdate_l(std::move(teePatches));
 }
 
 sp<audioflinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index cf30ded..2302e13 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -227,6 +227,18 @@
     virtual void setMetadataHasChanged() = 0;
 
     /**
+     * Called when a track moves to active state to record its contribution to battery usage.
+     * Track state transitions should eventually be handled within the track class.
+     */
+    virtual void beginBatteryAttribution() = 0;
+
+    /**
+     * Called when a track moves out of the active state to record its contribution
+     * to battery usage.
+     */
+    virtual void endBatteryAttribution() = 0;
+
+    /**
      * For RecordTrack
      * TODO(b/291317964) either use this or add asRecordTrack or asTrack etc.
      */
@@ -339,10 +351,10 @@
     virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
 
     // This function should be called with holding thread lock.
-    virtual void updateTeePatches() = 0;
+    virtual void updateTeePatches_l() = 0;
 
     // Argument teePatchesToUpdate is by value, use std::move to optimize.
-    virtual void setTeePatchesToUpdate(TeePatches teePatchesToUpdate) = 0;
+    virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
 
     static bool checkServerLatencySupported(audio_format_t format, audio_output_flags_t flags) {
         return audio_is_linear_pcm(format) && (flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index bcc6536..64a5843 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -78,30 +78,8 @@
 
 void MelReporter::onFirstRef() {
     mAfMelReporterCallback->getPatchCommandThread()->addListener(this);
-}
 
-bool MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
-    if (!mSoundDoseManager->isCsdEnabled()) {
-        ALOGV("%s csd is disabled", __func__);
-        return false;
-    }
-    if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
-        return true;
-    }
-
-    switch (device) {
-        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
-        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
-        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-        case AUDIO_DEVICE_OUT_USB_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
-            return true;
-        default:
-            return false;
-    }
+    mSoundDoseManager = sp<SoundDoseManager>::make(sp<IMelReporterCallback>::fromExisting(this));
 }
 
 void MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
@@ -127,16 +105,17 @@
     }
 
     auto activeMelPatchIt = mActiveMelPatches.find(activeMelPatchId.value());
-    if (activeMelPatchIt != mActiveMelPatches.end()
-        && shouldActivateCsd != activeMelPatchIt->second.csdActive) {
-        if (activeMelPatchIt->second.csdActive) {
-            ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
-            stopMelComputationForPatch_l(activeMelPatchIt->second);
-        } else {
-            ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
-            startMelComputationForActivePatch_l(activeMelPatchIt->second);
+    if (activeMelPatchIt != mActiveMelPatches.end()) {
+        if (shouldActivateCsd != activeMelPatchIt->second.csdActive) {
+            if (activeMelPatchIt->second.csdActive) {
+                ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
+                stopMelComputationForPatch_l(activeMelPatchIt->second);
+            } else {
+                ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
+                startMelComputationForActivePatch_l(activeMelPatchIt->second);
+            }
+            activeMelPatchIt->second.csdActive = shouldActivateCsd;
         }
-        activeMelPatchIt->second.csdActive = shouldActivateCsd;
     }
 }
 
@@ -159,23 +138,28 @@
     audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
     ActiveMelPatch newPatch;
     newPatch.streamHandle = streamHandle;
+    newPatch.csdActive = false;
     for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
-        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
-            && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE &&
+                mSoundDoseManager->shouldComputeCsdForDeviceType(
+                        patch.mAudioPatch.sinks[i].ext.device.type)) {
             audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
-            newPatch.deviceHandles.push_back(deviceId);
+            bool shouldComputeCsd = mSoundDoseManager->shouldComputeCsdForDeviceWithAddress(
+                    patch.mAudioPatch.sinks[i].ext.device.type,
+                    patch.mAudioPatch.sinks[i].ext.device.address);
+            newPatch.deviceStates.push_back({deviceId, shouldComputeCsd});
+            newPatch.csdActive |= shouldComputeCsd;
             AudioDeviceTypeAddr adt{patch.mAudioPatch.sinks[i].ext.device.type,
                                     patch.mAudioPatch.sinks[i].ext.device.address};
             mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
         }
     }
 
-    if (!newPatch.deviceHandles.empty()) {
+    if (!newPatch.deviceStates.empty() && newPatch.csdActive) {
         std::lock_guard _afl(mAfMelReporterCallback->mutex());
         std::lock_guard _l(mLock);
         ALOGV("%s add patch handle %d to active devices", __func__, handle);
         startMelComputationForActivePatch_l(newPatch);
-        newPatch.csdActive = true;
         mActiveMelPatches[handle] = newPatch;
     }
 }
@@ -189,18 +173,41 @@
         return;
     }
 
-    for (const auto& deviceHandle : patch.deviceHandles) {
-        ++mActiveDevices[deviceHandle];
-        ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
-              patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
+    for (const auto& device : patch.deviceStates) {
+        if (device.second) {
+            ++mActiveDevices[device.first];
+            ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
+                  patch.streamHandle, device.first, mActiveDevices[device.first]);
 
-        if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
-            outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
-                deviceHandle,
-                patch.streamHandle,
-                outputThread->sampleRate(),
-                outputThread->channelCount(),
-                outputThread->format()));
+            if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+                outputThread->startMelComputation_l(
+                        mSoundDoseManager->getOrCreateProcessorForDevice(
+                                device.first,
+                                patch.streamHandle,
+                                outputThread->sampleRate(),
+                                outputThread->channelCount(),
+                                outputThread->format()));
+            }
+        }
+    }
+}
+
+void MelReporter::startMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    std::lock_guard _laf(mAfMelReporterCallback->mutex());
+    std::lock_guard _l(mLock);
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && !device.second) {
+                device.second = true;
+            }
+            csdActive |= device.second;
+        }
+        if (csdActive && !activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            startMelComputationForActivePatch_l(activeMelPatch.second);
         }
     }
 }
@@ -247,30 +254,48 @@
 void MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
 NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
 {
-    if (!patch.csdActive) {
-        // no need to stop CSD inactive patches
-        return;
-    }
-
     auto outputThread = mAfMelReporterCallback->checkOutputThread_l(patch.streamHandle);
 
     ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
-    for (const auto& deviceId : patch.deviceHandles) {
-        if (mActiveDevices[deviceId] > 0) {
-            --mActiveDevices[deviceId];
-            if (mActiveDevices[deviceId] == 0) {
+    for (const auto& device : patch.deviceStates) {
+        if (mActiveDevices[device.first] > 0) {
+            --mActiveDevices[device.first];
+            if (mActiveDevices[device.first] == 0) {
                 // no stream is using deviceId anymore
-                ALOGI("%s removing device %d from active CSD devices", __func__, deviceId);
-                mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+                ALOGI("%s removing device %d from active CSD devices", __func__, device.first);
+                mSoundDoseManager->clearMapDeviceIdEntries(device.first);
             }
         }
     }
 
+    mSoundDoseManager->removeStreamProcessor(patch.streamHandle);
     if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
         outputThread->stopMelComputation_l();
     }
 }
 
+void MelReporter::stopMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    std::lock_guard _laf(mAfMelReporterCallback->mutex());
+    std::lock_guard _l(mLock);
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && device.second) {
+                device.second = false;
+            }
+            csdActive |= device.second;
+        }
+
+        if (!csdActive && activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            stopMelComputationForPatch_l(activeMelPatch.second);
+        }
+    }
+
+}
+
 std::optional<audio_patch_handle_t> MelReporter::activePatchStreamHandle_l(
         audio_io_handle_t streamHandle) {
     for(const auto& patchIt : mActiveMelPatches) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 2a1f3b3..78c6c0c 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -40,11 +40,11 @@
  * Class for listening to new patches and starting the MEL computation. MelReporter is
  * concealed within AudioFlinger, their lifetimes are the same.
  */
-class MelReporter : public PatchCommandThread::PatchCommandListener {
+class MelReporter : public PatchCommandThread::PatchCommandListener,
+                    public IMelReporterCallback {
 public:
     explicit MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback)
-        : mAfMelReporterCallback(afMelReporterCallback),
-         mSoundDoseManager(sp<SoundDoseManager>::make()) {}
+        : mAfMelReporterCallback(afMelReporterCallback) {}
 
     void onFirstRef() override;
 
@@ -76,6 +76,10 @@
 
     std::string dump();
 
+    // IMelReporterCallback methods
+    void stopMelComputationForDeviceId(audio_port_handle_t deviceId) override;
+    void startMelComputationForDeviceId(audio_port_handle_t deviceId) override;
+
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
         const IAfPatchPanel::Patch& patch) final;
@@ -91,13 +95,15 @@
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
-        std::vector<audio_port_handle_t> deviceHandles;
+        /**
+         * Stores device ids and whether they are compatible for CSD calculation.
+         * The boolean value can change since BT audio device types are user-configurable
+         * to headphones/headsets or other device types.
+         */
+        std::vector<std::pair<audio_port_handle_t,bool>> deviceStates;
         bool csdActive;
     };
 
-    /** Returns true if we should compute MEL for the given device. */
-    bool shouldComputeMelForDeviceType(audio_devices_t device);
-
     void stopInternalMelComputation();
 
     /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index beb3e1c..5f54e11 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -192,8 +192,8 @@
     sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
 
             // This function should be called with holding thread lock.
-    void updateTeePatches() final;
-    void setTeePatchesToUpdate(TeePatches teePatchesToUpdate) final;
+    void updateTeePatches_l() final;
+    void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
 
     void tallyUnderrunFrames(size_t frames) final {
        if (isOut()) { // we expect this from output tracks only
@@ -348,8 +348,9 @@
 
 private:
     void                interceptBuffer(const AudioBufferProvider::Buffer& buffer);
+    // Must hold thread lock to access tee patches
     template <class F>
-    void                forEachTeePatchTrack(F f) {
+    void                forEachTeePatchTrack_l(F f) {
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index fcf3833..194f0f0 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1940,7 +1940,7 @@
     logTrack("add", track);
     mActiveTracksGeneration++;
     mLatestActiveTrack = track;
-    ++mBatteryCounter[track->uid()].second;
+    track->beginBatteryAttribution();
     mHasChanged = true;
     return mActiveTracks.add(track);
 }
@@ -1954,7 +1954,7 @@
     }
     logTrack("remove", track);
     mActiveTracksGeneration++;
-    --mBatteryCounter[track->uid()].second;
+    track->endBatteryAttribution();
     // mLatestActiveTrack is not cleared even if is the same as track.
     mHasChanged = true;
 #ifdef TEE_SINK
@@ -1967,14 +1967,13 @@
 template <typename T>
 void ThreadBase::ActiveTracks<T>::clear() {
     for (const sp<T> &track : mActiveTracks) {
-        BatteryNotifier::getInstance().noteStopAudio(track->uid());
+        track->endBatteryAttribution();
         logTrack("clear", track);
     }
     mLastActiveTracksGeneration = mActiveTracksGeneration;
     if (!mActiveTracks.empty()) { mHasChanged = true; }
     mActiveTracks.clear();
     mLatestActiveTrack.clear();
-    mBatteryCounter.clear();
 }
 
 template <typename T>
@@ -1985,27 +1984,6 @@
         thread->updateWakeLockUids_l(getWakeLockUids());
         mLastActiveTracksGeneration = mActiveTracksGeneration;
     }
-
-    // Updates BatteryNotifier uids
-    for (auto it = mBatteryCounter.begin(); it != mBatteryCounter.end();) {
-        const uid_t uid = it->first;
-        ssize_t &previous = it->second.first;
-        ssize_t &current = it->second.second;
-        if (current > 0) {
-            if (previous == 0) {
-                BatteryNotifier::getInstance().noteStartAudio(uid);
-            }
-            previous = current;
-            ++it;
-        } else if (current == 0) {
-            if (previous > 0) {
-                BatteryNotifier::getInstance().noteStopAudio(uid);
-            }
-            it = mBatteryCounter.erase(it); // std::map<> is stable on iterator erase.
-        } else /* (current < 0) */ {
-            LOG_ALWAYS_FATAL("negative battery count %zd", current);
-        }
-    }
 }
 
 template <typename T>
@@ -4160,7 +4138,7 @@
             setHalLatencyMode_l();
 
             for (const auto &track : mActiveTracks ) {
-                track->updateTeePatches();
+                track->updateTeePatches_l();
             }
 
             // signal actual start of output stream when the render position reported by the kernel
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 3098892..c6c585b 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -783,8 +783,6 @@
                         return wakeLockUids; // moved by underlying SharedBuffer
                     }
 
-                    std::map<uid_t, std::pair<ssize_t /* previous */, ssize_t /* current */>>
-                                        mBatteryCounter;
                     SortedVector<sp<T>> mActiveTracks;
                     int                 mActiveTracksGeneration;
                     int                 mLastActiveTracksGeneration;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 4e37953..5708c61 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -236,6 +236,22 @@
     /** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
     void setMetadataHasChanged() final { mChangeNotified.clear(); }
 
+    /**
+     * Called when a track moves to active state to record its contribution to battery usage.
+     * Track state transitions should eventually be handled within the track class.
+     */
+    void beginBatteryAttribution() final {
+        mBatteryStatsHolder.emplace(uid());
+    }
+
+    /**
+     * Called when a track moves out of the active state to record its contribution
+     * to battery usage.
+     */
+    void endBatteryAttribution() final {
+        mBatteryStatsHolder.reset();
+    }
+
 protected:
     DISALLOW_COPY_AND_ASSIGN(TrackBase);
 
@@ -379,6 +395,8 @@
 
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag    mChangeNotified = ATOMIC_FLAG_INIT;
+    // RAII object for battery stats book-keeping
+    std::optional<mediautils::BatteryStatsAudioHandle> mBatteryStatsHolder;
 };
 
 class PatchTrackBase : public PatchProxyBufferProvider, public virtual IAfPatchTrackBase
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 2340018..ecea9eb 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -892,12 +892,12 @@
             Mutex::Autolock _l(thread->mutex());
             auto* const playbackThread = thread->asIAfPlaybackThread().get();
             wasActive = playbackThread->destroyTrack_l(this);
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
         }
         if (isExternalTrack() && !wasActive) {
             AudioSystem::releaseOutput(mPortId);
         }
     }
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
 }
 
 void Track::appendDumpHeader(String8& result) const
@@ -1270,12 +1270,13 @@
             buffer.mFrameCount = 1;
             (void) mAudioTrackServerProxy->obtainBuffer(&buffer, true /*ackFlush*/);
         }
+        if (status == NO_ERROR) {
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->start(); });
+        }
     } else {
         status = BAD_VALUE;
     }
     if (status == NO_ERROR) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
-
         // send format to AudioManager for playback activity monitoring
         const sp<IAudioManager> audioManager =
                 thread->afThreadCallback()->getOrCreateAudioManager();
@@ -1326,8 +1327,8 @@
             ALOGV("%s(%d): not stopping/stopped => stopping/stopped on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
         }
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->stop(); });
     }
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->stop(); });
 }
 
 void Track::pause()
@@ -1362,9 +1363,9 @@
         default:
             break;
         }
+        // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->pause(); });
     }
-    // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->pause(); });
 }
 
 void Track::flush()
@@ -1425,9 +1426,10 @@
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
         playbackThread->broadcast_l();
+        // Flush the Tee to avoid on resume playing old data and glitching on the transition to
+        // new data
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->flush(); });
     }
-    // Flush the Tee to avoid on resume playing old data and glitching on the transition to new data
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->flush(); });
 }
 
 // must be called with thread lock held
@@ -1606,19 +1608,19 @@
     *backInserter++ = metadata;
 }
 
-void Track::updateTeePatches() {
+void Track::updateTeePatches_l() {
     if (mTeePatchesToUpdate.has_value()) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
         mTeePatches = mTeePatchesToUpdate.value();
         if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
                 mState == TrackBase::STOPPING_1) {
-            forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->start(); });
         }
         mTeePatchesToUpdate.reset();
     }
 }
 
-void Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+void Track::setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) {
     ALOGW_IF(mTeePatchesToUpdate.has_value(),
              "%s, existing tee patches to update will be ignored", __func__);
     mTeePatchesToUpdate = std::move(teePatchesToUpdate);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 21f346e..97470a2 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -301,6 +301,25 @@
     return binder::Status::ok();
 }
 
+binder::Status SoundDoseManager::SoundDose::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& btDeviceCategories) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->initCachedAudioDeviceCategories(btDeviceCategories);
+    }
+    return binder::Status::ok();
+}
+binder::Status SoundDoseManager::SoundDose::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& btAudioDevice) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setAudioDeviceCategory(btAudioDevice);
+    }
+    return binder::Status::ok();
+}
+
 binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
     ALOGV("%s", __func__);
     auto soundDoseManager = mSoundDoseManager.promote();
@@ -358,7 +377,9 @@
         auto melProcessor = mp.second.promote();
         if (melProcessor != nullptr) {
             auto deviceId = melProcessor->getDeviceId();
-            if (mActiveDeviceTypes[deviceId] == deviceType) {
+            const auto deviceTypeIt = mActiveDeviceTypes.find(deviceId);
+            if (deviceTypeIt != mActiveDeviceTypes.end() &&
+                deviceTypeIt->second == deviceType) {
                 ALOGV("%s: set attenuation for deviceId %d to %f",
                         __func__, deviceId, attenuationDB);
                 melProcessor->setAttenuation(attenuationDB);
@@ -390,6 +411,103 @@
     return mEnabledCsd;
 }
 
+void SoundDoseManager::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories) {
+    ALOGV("%s", __func__);
+    {
+        const std::lock_guard _l(mLock);
+        mBluetoothDevicesWithCsd.clear();
+    }
+    for (const auto& btDeviceCategory : deviceCategories) {
+        setAudioDeviceCategory(btDeviceCategory);
+    }
+}
+
+void SoundDoseManager::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& audioDevice) {
+    ALOGV("%s: set BT audio device type with address %s to headphone %d", __func__,
+          audioDevice.address.c_str(), audioDevice.csdCompatible);
+
+    std::vector<audio_port_handle_t> devicesToStart;
+    std::vector<audio_port_handle_t> devicesToStop;
+    {
+        const std::lock_guard _l(mLock);
+        const auto deviceIt = mBluetoothDevicesWithCsd.find(
+                std::make_pair(audioDevice.address,
+                               static_cast<audio_devices_t>(audioDevice.internalAudioType)));
+        if (deviceIt != mBluetoothDevicesWithCsd.end()) {
+            deviceIt->second = audioDevice.csdCompatible;
+        } else {
+            mBluetoothDevicesWithCsd.emplace(
+                    std::make_pair(audioDevice.address,
+                                   static_cast<audio_devices_t>(audioDevice.internalAudioType)),
+                    audioDevice.csdCompatible);
+        }
+
+        for (const auto &activeDevice: mActiveDevices) {
+            if (activeDevice.first.address() == audioDevice.address &&
+                activeDevice.first.mType ==
+                static_cast<audio_devices_t>(audioDevice.internalAudioType)) {
+                if (audioDevice.csdCompatible) {
+                    devicesToStart.push_back(activeDevice.second);
+                } else {
+                    devicesToStop.push_back(activeDevice.second);
+                }
+            }
+        }
+    }
+
+    for (const auto& deviceToStart : devicesToStart) {
+        mMelReporterCallback->startMelComputationForDeviceId(deviceToStart);
+    }
+    for (const auto& deviceToStop : devicesToStop) {
+        mMelReporterCallback->stopMelComputationForDeviceId(deviceToStop);
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceType(audio_devices_t device) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    switch (device) {
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
+        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                                            const std::string& deviceAddress) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    if (!audio_is_ble_out_device(type) && !audio_is_a2dp_device(type)) {
+        return shouldComputeCsdForDeviceType(type);
+    }
+
+    const std::lock_guard _l(mLock);
+    const auto deviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(deviceAddress, type));
+    return deviceIt != mBluetoothDevicesWithCsd.end() && deviceIt->second;
+}
+
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
     // invalidate any HAL sound dose interface used
     setHalSoundDoseInterface(nullptr);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 9ed0661..b5a1362 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -32,6 +32,15 @@
 
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 
+class IMelReporterCallback : public virtual RefBase {
+public:
+    IMelReporterCallback() {};
+    virtual ~IMelReporterCallback() {};
+
+    virtual void stopMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+    virtual void startMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+};
+
 class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
 public:
     /** CSD is computed with a rolling window of 7 days. */
@@ -39,8 +48,9 @@
     /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
     static constexpr float kDefaultRs2UpperBound = 100.f;
 
-    SoundDoseManager()
-        : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+    explicit SoundDoseManager(const sp<IMelReporterCallback>& melReporterCallback)
+        : mMelReporterCallback(melReporterCallback),
+          mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
           mRs2UpperBound(kDefaultRs2UpperBound) {};
 
     /**
@@ -104,6 +114,21 @@
     /** Returns true if CSD is enabled. */
     bool isCsdEnabled();
 
+    void initCachedAudioDeviceCategories(
+            const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories);
+
+    void setAudioDeviceCategory(
+            const media::ISoundDose::AudioDeviceCategory& audioDevice);
+
+    /**
+     * Returns true if the type can compute CSD. For bluetooth devices we rely on whether we
+     * categorized the address as headphones/headsets, only in this case we return true.
+     */
+    bool shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                              const std::string& deviceAddress);
+    /** Returns true for all device types which could support CSD computation. */
+    bool shouldComputeCsdForDeviceType(audio_devices_t device);
+
     std::string dump() const;
 
     // used for testing only
@@ -139,6 +164,13 @@
         binder::Status getOutputRs2UpperBound(float* value) override;
         binder::Status setCsdEnabled(bool enabled) override;
 
+        binder::Status initCachedAudioDeviceCategories(
+                const std::vector<media::ISoundDose::AudioDeviceCategory> &btDeviceCategories)
+                override;
+
+        binder::Status setAudioDeviceCategory(
+                const media::ISoundDose::AudioDeviceCategory& btAudioDevice) override;
+
         binder::Status getCsd(float* value) override;
         binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
         binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
@@ -179,6 +211,8 @@
 
     mutable std::mutex mLock;
 
+    const sp<IMelReporterCallback> mMelReporterCallback;
+
     // no need for lock since MelAggregator is thread-safe
     const sp<audio_utils::MelAggregator> mMelAggregator;
 
@@ -191,6 +225,17 @@
     std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
     std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
 
+    struct bt_device_type_hash {
+        std::size_t operator() (const std::pair<std::string, audio_devices_t> &deviceType) const {
+            return std::hash<std::string>()(deviceType.first) ^
+                   std::hash<audio_devices_t>()(deviceType.second);
+        }
+    };
+    // storing the BT cached information as received from the java side
+    // see SoundDoseManager::setCachedAudioDeviceCategories
+    std::unordered_map<std::pair<std::string, audio_devices_t>, bool, bt_device_type_hash>
+            mBluetoothDevicesWithCsd GUARDED_BY(mLock);
+
     float mRs2UpperBound GUARDED_BY(mLock);
     std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
 
@@ -199,7 +244,7 @@
     std::shared_ptr<ISoundDose> mHalSoundDose GUARDED_BY(mLock);
     std::shared_ptr<HalSoundDoseCallback> mHalSoundDoseCallback GUARDED_BY(mLock);
 
-    bool mUseFrameworkMel GUARDED_BY(mLock) = true;
+    bool mUseFrameworkMel GUARDED_BY(mLock) = false;
     bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
 
     bool mEnabledCsd GUARDED_BY(mLock) = true;
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 9fab77d..5860d96 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -39,10 +39,18 @@
                 (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
 };
 
+class MelReporterCallback : public IMelReporterCallback {
+public:
+    MOCK_METHOD(void, startMelComputationForDeviceId, (audio_port_handle_t), (override));
+    MOCK_METHOD(void, stopMelComputationForDeviceId, (audio_port_handle_t), (override));
+};
+
+
 class SoundDoseManagerTest : public ::testing::Test {
 protected:
     void SetUp() override {
-        mSoundDoseManager = sp<SoundDoseManager>::make();
+        mMelReporterCallback = sp<MelReporterCallback>::make();
+        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback);
         mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
 
         ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
@@ -52,6 +60,7 @@
             });
     }
 
+    sp<MelReporterCallback> mMelReporterCallback;
     sp<SoundDoseManager> mSoundDoseManager;
     std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
 };
@@ -239,9 +248,56 @@
 }
 
 TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
-    // TODO: for now dogfooding with internal MEL. Revert to false when using the HAL MELs
-    EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
+    EXPECT_FALSE(mSoundDoseManager->forceUseFrameworkMel());
 }
 
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStopsNonHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = false;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), stopMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStartsHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+        // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, InitCachedAudioDevicesStartsOnlyActiveDevices) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    media::ISoundDose::AudioDeviceCategory device2;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    device2.address = "dev2";
+    device2.csdCompatible = true;
+    device2.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+    std::vector<media::ISoundDose::AudioDeviceCategory> btDevices = {device1, device2};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->initCachedAudioDeviceCategories(btDevices);
+}
+
+
 }  // namespace
 }  // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 876911d..1e57edd 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -102,9 +102,13 @@
     void setVolume(float volumeDb) { mCurVolumeDb = volumeDb; }
     float getVolume() const { return mCurVolumeDb; }
 
+    void setIsVoice(bool isVoice) { mIsVoice = isVoice; }
+    bool isVoice() const { return mIsVoice; }
+
 private:
     int mMuteCount = 0; /**< mute request counter */
     float mCurVolumeDb = NAN; /**< current volume in dB. */
+    bool mIsVoice = false; /** true if this volume source is used for voice call volume */
 };
 /**
  * Note: volume activities shall be indexed by CurvesId if we want to allow multiple
@@ -162,7 +166,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     /**
      * @brief setStopTime set the stop time due to the client stoppage or a re routing of this
@@ -222,17 +227,25 @@
     {
         return mVolumeActivities[vs].decMuteCount();
     }
-    void setCurVolume(VolumeSource vs, float volumeDb)
+    void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
     {
         // Even if not activity for this source registered, need to create anyway
         mVolumeActivities[vs].setVolume(volumeDb);
+        mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
     }
     float getCurVolume(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities) ?
                     mVolumeActivities.at(vs).getVolume() : NAN;
     }
-
+    VolumeSource getVoiceSource() {
+        for (const auto &iter : mVolumeActivities) {
+            if (iter.second.isVoice()) {
+                return iter.first;
+            }
+        }
+        return VOLUME_SOURCE_NONE;
+    }
     bool isStrategyActive(product_strategy_t ps, uint32_t inPastMs = 0, nsecs_t sysTime = 0) const
     {
         return mRoutingActivities.find(ps) != std::end(mRoutingActivities)?
@@ -381,7 +394,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
@@ -424,6 +438,15 @@
     bool supportsAllDevices(const DeviceVector &devices) const;
 
     /**
+     * @brief supportsAtLeastOne checks if any device in devices is currently supported
+     * @param devices to be checked against
+     * @return true if the device is weakly supported by type (e.g. for non bus / rsubmix devices),
+     *         true if the device is supported (both type and address) for bus / remote submix
+     *         false otherwise
+     */
+    bool supportsAtLeastOne(const DeviceVector &devices) const;
+
+    /**
      * @brief supportsDevicesForPlayback
      * @param devices to be checked against
      * @return true if the devices is a supported combo for playback
@@ -475,7 +498,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 92292e1..7e29e10 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -138,7 +138,7 @@
      */
     status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
     status_t removeUserIdDeviceAffinities(int userId);
-    status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
+    status_t getDevicesForUserId(int userId, AudioDeviceTypeAddrVector& devices) const;
 
     void dump(String8 *dst) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 4877166..2f424b8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -163,7 +163,8 @@
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
                                       uint32_t delayMs,
-                                      bool force)
+                                      bool force,
+                                      bool isVoiceVolSrc)
 {
 
     if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
@@ -176,7 +177,7 @@
     // - the force flag is set
     if (volumeDb != getCurVolume(volumeSource) || force) {
         ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
-        setCurVolume(volumeSource, volumeDb);
+        setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
         return true;
     }
     return false;
@@ -389,6 +390,11 @@
     return supportedDevices().containsAllDevices(devices);
 }
 
+bool SwAudioOutputDescriptor::supportsAtLeastOne(const DeviceVector &devices) const
+{
+    return filterSupportedDevices(devices).size() > 0;
+}
+
 bool SwAudioOutputDescriptor::supportsDevicesForPlayback(const DeviceVector &devices) const
 {
     // No considering duplicated output
@@ -505,11 +511,12 @@
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
-            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
         return false;
     }
     if (streams.empty()) {
@@ -555,6 +562,10 @@
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
         mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
+        VolumeSource callVolSrc = getVoiceSource();
+        if (callVolSrc != VOLUME_SOURCE_NONE) {
+            setCurVolume(callVolSrc, getCurVolume(vs), true);
+        }
     }
     for (const auto &stream : streams) {
         ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
@@ -783,10 +794,11 @@
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b41f86d..f870b4f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -642,7 +642,7 @@
 }
 
 status_t AudioPolicyMixCollection::getDevicesForUserId(int userId,
-        Vector<AudioDeviceTypeAddr>& devices) const {
+        AudioDeviceTypeAddrVector& devices) const {
     // for each player mix:
     // find rules that don't exclude this userId, and add the device to the list
     for (size_t i = 0; i < size(); i++) {
@@ -660,7 +660,7 @@
             }
         }
         if (ruleAllowsUserId) {
-            devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.c_str()));
+            devices.push_back(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.c_str()));
         }
     }
     return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 8ccb8b9..82f51ad 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -115,12 +115,22 @@
         profile->setDynamicFormat(true);
         profile->setDynamicChannels(dynamicFormatProfile->isDynamicChannels());
         profile->setDynamicRate(dynamicFormatProfile->isDynamicRate());
-        addAudioProfileAndSort(audioProfileVector, profile);
+        size_t profileIndex = 0;
+        for (; profileIndex < audioProfileVector.size(); profileIndex++) {
+            if (profile->equals(audioProfileVector.at(profileIndex))) {
+                // The dynamic profile is already there
+                break;
+            }
+        }
+        if (profileIndex >= audioProfileVector.size()) {
+            // Only add when the dynamic profile is not there
+            addAudioProfileAndSort(audioProfileVector, profile);
+        }
     }
 }
 
 void addDynamicAudioProfileAndSort(AudioProfileVector &audioProfileVector,
-                                      const sp<AudioProfile> &profileToAdd)
+                                   const sp<AudioProfile> &profileToAdd)
 {
     // Check valid profile to add:
     if (!profileToAdd->hasValidFormat()) {
@@ -143,11 +153,15 @@
                 audioProfileVector, profileToAdd->getChannels(), profileToAdd->getFormat());
         return;
     }
+    const bool originalIsDynamicFormat = profileToAdd->isDynamicFormat();
+    profileToAdd->setDynamicFormat(true); // set the format as dynamic to allow removal
     // Go through the list of profile to avoid duplicates
     for (size_t profileIndex = 0; profileIndex < audioProfileVector.size(); profileIndex++) {
         const sp<AudioProfile> &profile = audioProfileVector.at(profileIndex);
-        if (profile->isValid() && profile == profileToAdd) {
-            // Nothing to do
+        if (profile->isValid() && profile->equals(profileToAdd)) {
+            // The same profile is already there, no need to add.
+            // Reset `isDynamicProfile` as original value.
+            profileToAdd->setDynamicFormat(originalIsDynamicFormat);
             return;
         }
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1658f40..972e6c5 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1298,7 +1298,8 @@
         if (outputDevices.size() == 1) {
             info = getPreferredMixerAttributesInfo(
                     outputDevices.itemAt(0)->getId(),
-                    mEngine->getProductStrategyForAttributes(*resultAttr));
+                    mEngine->getProductStrategyForAttributes(*resultAttr),
+                    true /*activeBitPerfectPreferred*/);
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
@@ -2152,6 +2153,26 @@
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
+            if (info->getActiveClientCount() == 1 &&
+                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+                // If it is first bit-perfect client, reroute all clients that will be routed to
+                // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
+                PortHandleVector clientsToInvalidate;
+                for (size_t i = 0; i < mOutputs.size(); i++) {
+                    if (mOutputs[i] == outputDesc ||
+                        mOutputs[i]->devices().filter(outputDesc->devices()).isEmpty()) {
+                        continue;
+                    }
+                    for (const auto& c : mOutputs[i]->getClientIterable()) {
+                        clientsToInvalidate.push_back(c->portId());
+                    }
+                }
+                if (!clientsToInvalidate.empty()) {
+                    ALOGD("%s Invalidate clients due to first bit-perfect client started",
+                          __func__);
+                    mpClientInterface->invalidateTracks(clientsToInvalidate);
+                }
+            }
         }
     }
 
@@ -3787,6 +3808,44 @@
     return true;
 }
 
+void AudioPolicyManager::changeOutputDevicesMuteState(
+        const AudioDeviceTypeAddrVector& devices) {
+    ALOGVV("%s() num devices %zu", __func__, devices.size());
+
+    std::vector<sp<SwAudioOutputDescriptor>> outputs =
+            getSoftwareOutputsForDevices(devices);
+
+    for (size_t i = 0; i < outputs.size(); i++) {
+        sp<SwAudioOutputDescriptor> outputDesc = outputs[i];
+        DeviceVector prevDevices = outputDesc->devices();
+        checkDeviceMuteStrategies(outputDesc, prevDevices, 0 /* delayMs */);
+    }
+}
+
+std::vector<sp<SwAudioOutputDescriptor>> AudioPolicyManager::getSoftwareOutputsForDevices(
+        const AudioDeviceTypeAddrVector& devices) const
+{
+    std::vector<sp<SwAudioOutputDescriptor>> outputs;
+    DeviceVector deviceDescriptors;
+    for (size_t j = 0; j < devices.size(); j++) {
+        sp<DeviceDescriptor> desc = mHwModules.getDeviceDescriptor(
+                devices[j].mType, devices[j].getAddress(), String8(), AUDIO_FORMAT_DEFAULT);
+        if (desc == nullptr || !audio_is_output_device(devices[j].mType)) {
+            ALOGE("%s: device type %#x address %s not supported or not an output device",
+                __func__, devices[j].mType, devices[j].getAddress());
+                    continue;
+        }
+        deviceDescriptors.add(desc);
+    }
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (!mOutputs.valueAt(i)->supportsAtLeastOne(deviceDescriptors)) {
+            continue;
+        }
+        outputs.push_back(mOutputs.valueAt(i));
+    }
+    return outputs;
+}
+
 status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
         const AudioDeviceTypeAddrVector& devices) {
     ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
@@ -3853,7 +3912,8 @@
     return NO_ERROR;
 }
 
-void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
+void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs,
+    bool skipDelays)
 {
     uint32_t waitMs = 0;
     bool wasLeUnicastActive = isLeUnicastActive();
@@ -3879,8 +3939,8 @@
                 continue;
             }
             waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
-                                      true /*requiresMuteCheck*/,
-                                      !forceRouting /*requiresVolumeCheck*/);
+                                      !skipDelays /*requiresMuteCheck*/,
+                                      !forceRouting /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -4065,13 +4125,18 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
 
 status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
     ALOGV("%s() userId=%d", __FUNCTION__, userId);
+    AudioDeviceTypeAddrVector devices;
+    mPolicyMixes.getDevicesForUserId(userId, devices);
     status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
     if (status != NO_ERROR) {
         ALOGE("%s() Could not remove all device affinities fo userId = %d",
@@ -4081,7 +4146,10 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
@@ -4490,16 +4558,24 @@
 }
 
 sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
-        audio_port_handle_t devicePortId, product_strategy_t strategy) {
+        audio_port_handle_t devicePortId,
+        product_strategy_t strategy,
+        bool activeBitPerfectPreferred) {
     auto it = mPreferredMixerAttrInfos.find(devicePortId);
     if (it == mPreferredMixerAttrInfos.end()) {
         return nullptr;
     }
-    auto mixerAttrInfoIt = it->second.find(strategy);
-    if (mixerAttrInfoIt == it->second.end()) {
-        return nullptr;
+    if (activeBitPerfectPreferred) {
+        for (auto [strategy, info] : it->second) {
+            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->getActiveClientCount() != 0) {
+                return info;
+            }
+        }
     }
-    return mixerAttrInfoIt->second;
+    auto strategyMatchedMixerAttrInfoIt = it->second.find(strategy);
+    return strategyMatchedMixerAttrInfoIt == it->second.end()
+            ? nullptr : strategyMatchedMixerAttrInfoIt->second;
 }
 
 status_t AudioPolicyManager::getPreferredMixerAttributes(
@@ -5841,22 +5917,26 @@
         }
     }
 
+    // The caller can have the audio config criteria ignored by either passing a null ptr or
+    // the AUDIO_CONFIG_INITIALIZER value.
+    // If an audio config is specified, current policy is to only allow spatialization for
+    // some positional channel masks and PCM format
+
+    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
+        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
+            return false;
+        }
+        if (!audio_is_linear_pcm(config->format)) {
+            return false;
+        }
+    }
+
     sp<IOProfile> profile =
             getSpatializerOutputProfile(config, devices);
     if (profile == nullptr) {
         return false;
     }
 
-    // The caller can have the audio config criteria ignored by either passing a null ptr or
-    // the AUDIO_CONFIG_INITIALIZER value.
-    // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks.
-
-    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
-            return false;
-        }
-    }
     return true;
 }
 
@@ -7320,7 +7400,8 @@
                                               bool force,
                                               int delayMs,
                                               audio_patch_handle_t *patchHandle,
-                                              bool requiresMuteCheck, bool requiresVolumeCheck)
+                                              bool requiresMuteCheck, bool requiresVolumeCheck,
+                                              bool skipMuteDelay)
 {
     // TODO(b/262404095): Consider if the output need to be reopened.
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
@@ -7328,9 +7409,9 @@
 
     if (outputDesc->isDuplicated()) {
         muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         return muteWaitMs;
     }
 
@@ -7396,12 +7477,16 @@
 
         // Add half reported latency to delayMs when muteWaitMs is null in order
         // to avoid disordered sequence of muting volume and changing devices.
-        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(),
-                muteWaitMs == 0 ? (delayMs + (outputDesc->latency() / 2)) : delayMs);
+        int actualDelayMs = !skipMuteDelay && muteWaitMs == 0
+                ? (delayMs + (outputDesc->latency() / 2)) : delayMs;
+        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(), actualDelayMs);
     }
 
-    // update stream volumes according to new device
-    applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    // Since the mute is skip, also skip the apply stream volume as that will be applied externally
+    if (!skipMuteDelay) {
+        // update stream volumes according to new device
+        applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    }
 
     return muteWaitMs;
 }
@@ -7759,8 +7844,8 @@
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
-    outputDesc->setVolume(
-            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+    outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
+            deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
         float voiceVolume;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 88bafef..863c785 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -535,8 +535,9 @@
          *        and currently active, allow to have proper drain and avoid pops
          * @param requiresVolumeCheck true if called requires to reapply volume if the routing did
          * not change (but the output is still routed).
+         * @param skipMuteDelay if true will skip mute delay when installing audio patch
          * @return the number of ms we have slept to allow new routing to take effect in certain
-         * cases.
+         *        cases.
          */
         uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
@@ -544,7 +545,8 @@
                                   int delayMs = 0,
                                   audio_patch_handle_t *patchHandle = NULL,
                                   bool requiresMuteCheck = true,
-                                  bool requiresVolumeCheck = false);
+                                  bool requiresVolumeCheck = false,
+                                  bool skipMuteDelay = false);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
@@ -647,8 +649,10 @@
         /**
          * @brief updates routing for all outputs (including call if call in progress).
          * @param delayMs delay for unmuting if required
+         * @param skipDelays if true all the delays will be skip while updating routing
          */
-        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
+        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
+                bool skipDelays = false);
 
         bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
@@ -1241,6 +1245,21 @@
                 const char* context,
                 bool matchAddress = true);
 
+        /**
+         * @brief changeOutputDevicesMuteState mute/unmute devices using checkDeviceMuteStrategies
+         * @param devices devices to mute/unmute
+         */
+        void changeOutputDevicesMuteState(const AudioDeviceTypeAddrVector& devices);
+
+        /**
+         * @brief Returns a vector of software output descriptor that support the queried devices
+         * @param devices devices to query
+         * @param openOutputs open outputs where the devices are supported as determined by
+         *      SwAudioOutputDescriptor::supportsAtLeastOne
+         */
+        std::vector<sp<SwAudioOutputDescriptor>> getSoftwareOutputsForDevices(
+                const AudioDeviceTypeAddrVector& devices) const;
+
         bool isScoRequestedForComm() const;
 
         bool isHearingAidUsedForComm() const;
@@ -1298,8 +1317,15 @@
                                        uint32_t flags,
                                        bool isInput);
 
+        /**
+         * Returns the preferred mixer attributes info for the given device port id and strategy.
+         * Bit-perfect mixer attributes will be returned if it is active and
+         * `activeBitPerfectPreferred` is true.
+         */
         sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
-                audio_port_handle_t devicePortId, product_strategy_t strategy);
+                audio_port_handle_t devicePortId,
+                product_strategy_t strategy,
+                bool activeBitPerfectPreferred = false);
 
         sp<SwAudioOutputDescriptor> reopenOutput(
                 sp<SwAudioOutputDescriptor> outputDesc,
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index d7aa5c9..041aa1c 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,6 +25,7 @@
 #include <sys/time.h>
 #include <dlfcn.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -215,6 +216,27 @@
 {
     delete interface;
 }
+
+namespace {
+int getTargetSdkForPackageName(std::string_view packageName) {
+    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+    int targetSdk = -1;
+    if (binder != nullptr) {
+        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (pm != nullptr) {
+            const auto status = pm->getTargetSdkVersionForPackage(
+                    String16{packageName.data(), packageName.size()}, &targetSdk);
+            ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
+            return status.isOk() ? targetSdk : -1;
+        }
+    }
+    return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+} // anonymous
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
@@ -1926,10 +1948,14 @@
     checkOp();
     mOpCallback = new RecordAudioOpCallback(this);
     ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+    int flags = doesPackageTargetAtLeastU(
+            mAttributionSource.packageName.value_or("")) ?
+            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
     // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
     // since it controls the mic permission for legacy apps.
     mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
         mAttributionSource.packageName.value_or(""))),
+        flags,
         mOpCallback);
 }
 
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5e58dbb..15eae14 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1232,6 +1232,19 @@
     EXPECT_FALSE(isBitPerfect);
     EXPECT_EQ(bitPerfectOutput, output);
 
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
+
     // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
     // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
     getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3e7af3d..da64b8d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -305,14 +305,23 @@
     for (auto& i : mListenerList) {
         if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
                 i->getListenerUid())) {
-            ALOGV("Skipping torch callback for system-only camera device %s",
-                    cameraId.c_str());
+            ALOGV("%s: Skipping torch callback for system-only camera device %s",
+                    __FUNCTION__, cameraId.c_str());
             continue;
         }
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
                 cameraId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
+        // for the specific package that this listener belongs to.
+        std::vector<std::string> remappedCameraIds =
+                findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
+        for (auto& remappedCameraId : remappedCameraIds) {
+            ret = i->getListener()->onTorchStatusChanged(mapToInterface(status), remappedCameraId);
+            i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                    __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        }
     }
 }
 
@@ -729,6 +738,156 @@
     return Status::ok();
 }
 
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
+      cameraIdRemapping) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
+        const int pid = CameraThreadState::getCallingPid();
+        const int uid = CameraThreadState::getCallingUid();
+        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
+                __FUNCTION__, pid, uid);
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
+                "Permission Denial: no permission to configure camera id mapping");
+    }
+    TCameraIdRemapping cameraIdRemappingMap{};
+    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
+    if (!parseStatus.isOk()) {
+        return parseStatus;
+    }
+    remapCameraIds(cameraIdRemappingMap);
+    return Status::ok();
+}
+
+Status CameraService::parseCameraIdRemapping(
+        const hardware::CameraIdRemapping& cameraIdRemapping,
+        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
+    std::string packageName;
+    std::string cameraIdToReplace, updatedCameraId;
+    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
+        packageName = packageIdRemapping.packageName;
+        if (packageName == "") {
+            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Package name cannot be empty");
+        }
+
+        if (packageIdRemapping.cameraIdsToReplace.size()
+            != packageIdRemapping.updatedCameraIds.size()) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
+                     packageName.c_str());
+        }
+        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
+            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
+            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
+            if (cameraIdToReplace == "" || updatedCameraId == "") {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
+                        packageName.c_str());
+            }
+            if (cameraIdToReplace == updatedCameraId) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
+                        " as updatedCameraId for %s",
+                        packageName.c_str());
+            }
+            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
+        }
+    }
+    return Status::ok();
+}
+
+void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
+    // Acquire mServiceLock and prevent other clients from connecting
+    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
+            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
+
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    // This will disconnect all existing clients for camera Ids that are being
+    // remapped in cameraIdRemapping, but only if they were being used by an
+    // affected packageName.
+    std::vector<sp<BasicClient>> clientsToDisconnect;
+    std::vector<std::string> cameraIdsToUpdate;
+    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
+        for (auto& [id0, id1] : injectionMap) {
+            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
+                    id0.c_str(), id1.c_str());
+            auto clientDescriptor = mActiveClientManager.get(id0);
+            if (clientDescriptor != nullptr) {
+                sp<BasicClient> clientSp = clientDescriptor->getValue();
+                if (clientSp->getPackageName() == packageName) {
+                    // This camera ID is being used by the affected packageName.
+                    clientsToDisconnect.push_back(clientSp);
+                    cameraIdsToUpdate.push_back(id0);
+                }
+            }
+        }
+    }
+
+    // Update mCameraIdRemapping.
+    mCameraIdRemapping.clear();
+    mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
+
+    // Do not hold mServiceLock while disconnecting clients, but retain the condition
+    // blocking other clients from connecting in mServiceLockWrapper if held.
+    mServiceLock.unlock();
+
+    // Disconnect clients.
+    for (auto& clientSp : clientsToDisconnect) {
+        // We send up ERROR_CAMERA_DEVICE so that the app attempts to reconnect
+        // automatically.
+        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+                CaptureResultExtras{});
+        // This also triggers the status updates
+        clientSp->disconnect();
+    }
+
+    mServiceLock.lock();
+}
+
+std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
+    const std::string& inputCameraId, int clientUid) {
+    std::string packageName = getPackageNameFromUid(clientUid);
+    std::vector<std::string> cameraIds;
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
+        packageMapIter != mCameraIdRemapping.end()) {
+        for (auto& [id0, id1]: packageMapIter->second) {
+            if (id1 == inputCameraId) {
+                cameraIds.push_back(id0);
+            }
+        }
+    }
+    return cameraIds;
+}
+
+std::string CameraService::resolveCameraId(const std::string& inputCameraId) {
+  return resolveCameraId(inputCameraId, std::string(""));
+}
+
+std::string CameraService::resolveCameraId(
+    const std::string& inputCameraId,
+    const std::string& packageName) {
+    std::string packageNameVal = packageName;
+    if (packageName == "") {
+        int clientUid = CameraThreadState::getCallingUid();
+        packageNameVal = getPackageNameFromUid(clientUid);
+    }
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
+        packageMapIter != mCameraIdRemapping.end()) {
+        ALOGI("%s: resolveCameraId: packageName found %s",
+                __FUNCTION__, packageNameVal.c_str());
+        auto packageMap = packageMapIter->second;
+        if (auto replacementIdIter = packageMap.find(inputCameraId);
+            replacementIdIter != packageMap.end()) {
+            ALOGI("%s: resolveCameraId: inputId found %s, replacing with %s",
+                    __FUNCTION__, inputCameraId.c_str(),
+                    replacementIdIter->second.c_str());
+            return replacementIdIter->second;
+        }
+    }
+    return inputCameraId;
+}
+
 Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
@@ -799,9 +958,12 @@
     return cameraIdIntToStrLocked(cameraIdInt);
 }
 
-Status CameraService::getCameraCharacteristics(const std::string& cameraId,
+Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
         int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
+
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
+
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -1008,7 +1170,7 @@
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const std::string& originalCameraId, /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1052,7 +1214,7 @@
         *client = new CameraDeviceClient(cameraService, tmp,
                 cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait);
+                overrideForPerfClass, overrideToPortrait, originalCameraId);
         ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
@@ -1143,7 +1305,7 @@
             kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, /*out*/ tmp)
+            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1348,8 +1510,8 @@
     attributionSource.uid = clientUid;
     attributionSource.packageName = clientName;
     bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            toString16(sCameraPermission), attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+            toString16(sCameraPermission), attributionSource, String16(), AppOpsManager::OP_NONE)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1681,7 +1843,7 @@
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
             clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, /*out*/client);
+            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
@@ -1761,7 +1923,7 @@
 
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-        const std::string& cameraId,
+        const std::string& unresolvedCameraId,
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
@@ -1771,6 +1933,7 @@
 
     ATRACE_CALL();
     Status ret = Status::ok();
+    const std::string cameraId = resolveCameraId(unresolvedCameraId, clientPackageName);
     sp<CameraDeviceClient> client = nullptr;
     std::string clientPackageNameAdj = clientPackageName;
     int callingPid = CameraThreadState::getCallingPid();
@@ -1818,9 +1981,9 @@
             cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
             targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
-            /*out*/client);
+            unresolvedCameraId, /*out*/client);
 
-    if(!ret.isOk()) {
+    if (!ret.isOk()) {
         logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
         return ret;
     }
@@ -1887,7 +2050,7 @@
         int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode,
+        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
@@ -2002,7 +2165,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing,
                 orientation, clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode,
+                overrideToPortrait, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2016,6 +2179,9 @@
         if (err != OK) {
             ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
             // Errors could be from the HAL module open call or from AppOpsManager
+            mServiceLock.unlock();
+            client->disconnect();
+            mServiceLock.lock();
             switch(err) {
                 case BAD_VALUE:
                     return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
@@ -2256,8 +2422,9 @@
     return OK;
 }
 
-Status CameraService::turnOnTorchWithStrengthLevel(const std::string& cameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder) {
+Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
+        int32_t torchStrength,
+        const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2267,6 +2434,7 @@
                 "Torch client binder in null.");
     }
 
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
     int uid = CameraThreadState::getCallingUid();
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -2384,7 +2552,8 @@
     return Status::ok();
 }
 
-Status CameraService::setTorchMode(const std::string& cameraId, bool enabled,
+Status CameraService::setTorchMode(const std::string& unresolvedCameraId,
+        bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2395,6 +2564,7 @@
                 "Torch client Binder is null");
     }
 
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
     int uid = CameraThreadState::getCallingUid();
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -2922,10 +3092,20 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
+    std::string resolvedCameraId;
+    if (apiVersion == API_VERSION_2) {
+        resolvedCameraId = resolveCameraId(unresolvedCameraId);
+    } else { // if (apiVersion == API_VERSION_1)
+        // We don't support remapping for API 1.
+        // TODO(b/286287541): Also support remapping for API 1.
+        resolvedCameraId = unresolvedCameraId;
+    }
+    const std::string cameraId = resolvedCameraId;
+
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
@@ -2984,10 +3164,12 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
+
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
@@ -4918,7 +5100,6 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const std::string& cameraId, StatusInternal status) {
-
             if (status != StatusInternal::ENUMERATING) {
                 // Update torch status if it has a flash unit.
                 Mutex::Autolock al(mTorchStatusMutex);
@@ -4951,9 +5132,21 @@
                 auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
                         cameraId);
                 listener->handleBinderStatus(ret,
-                        "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                         "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                         __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                         ret.exceptionCode());
+                // Also trigger the callbacks for cameras that were remapped to the current
+                // cameraId for the specific package that this listener belongs to.
+                std::vector<std::string> remappedCameraIds =
+                        findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
+                for (auto& remappedCameraId : remappedCameraIds) {
+                    ret = listener->getListener()->onStatusChanged(
+                            mapToInterface(status), remappedCameraId);
+                    listener->handleBinderStatus(ret,
+                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                            __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                            ret.exceptionCode());
+                }
             }
         });
 }
@@ -5163,6 +5356,8 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
+    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
+        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
@@ -5171,6 +5366,23 @@
     return BAD_VALUE;
 }
 
+status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
+    uid_t uid = IPCThreadState::self()->getCallingUid();
+    if (uid != AID_ROOT) {
+        dprintf(err, "Must be adb root\n");
+        return PERMISSION_DENIED;
+    }
+    if (args.size() != 4) {
+        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
+        return BAD_VALUE;
+    }
+    std::string packageName = toStdString(args[1]);
+    std::string cameraIdToReplace = toStdString(args[2]);
+    std::string cameraIdNew = toStdString(args[3]);
+    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
+    return OK;
+}
+
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     std::string packageName = toStdString(args[1]);
 
@@ -5784,6 +5996,7 @@
         "  clear-stream-use-case-override clear the stream use case override\n"
         "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
         "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
+        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index bc65293..647b2ef 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -20,6 +20,7 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -61,6 +62,7 @@
 #include <utility>
 #include <unordered_map>
 #include <unordered_set>
+#include <vector>
 
 namespace android {
 
@@ -138,6 +140,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // ICameraService
+    // IMPORTANT: All binder calls that deal with logicalCameraId should use
+    // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
+    // perform the operation on (in case of Id Remapping).
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
@@ -222,6 +227,9 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
+    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
+        cameraIdRemapping);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -915,7 +923,7 @@
             int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -943,6 +951,48 @@
     // Mutex guarding mCameraStates map
     mutable Mutex mCameraStatesLock;
 
+    /**
+     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
+     *
+     * This specifies that for packageName, for every binder operation targeting
+     * cameraIdToReplace, use newCameraIdToUse instead.
+     */
+    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
+    TCameraIdRemapping mCameraIdRemapping{};
+    /** Mutex guarding mCameraIdRemapping. */
+    Mutex mCameraIdRemappingLock;
+
+    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
+    binder::Status parseCameraIdRemapping(
+            const hardware::CameraIdRemapping& cameraIdRemapping,
+            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
+
+    /**
+     * Resolve the (potentially remapped) camera Id to use for packageName.
+     *
+     * This returns the Camera Id to use in case inputCameraId was remapped to a
+     * different Id for the given packageName. Otherwise, it returns the inputCameraId.
+     */
+    std::string resolveCameraId(const std::string& inputCameraId, const std::string& packageName);
+    /**
+     * Resolve the (potentially remapped) camera Id to use.
+     *
+     * This returns the Camera Id to use in case inputCameraId was remapped to a
+     * different Id for the packageName of the client. Otherwise, it returns the inputCameraId.
+     */
+    std::string resolveCameraId(const std::string& inputCameraId);
+
+    /**
+     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
+     */
+    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
+
+    /**
+     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
+     */
+    std::vector<std::string> findOriginalIdsForRemappedCameraId(
+        const std::string& inputCameraId, int clientUid);
+
     // Circular buffer for storing event logging for dumps
     RingBuffer<std::string> mEventLog;
     Mutex mLogLock;
@@ -1325,6 +1375,9 @@
     // Set or clear the zoom override flag
     status_t handleSetZoomOverride(const Vector<String16>& args);
 
+    // Set Camera Id remapping using 'cmd'
+    status_t handleCameraIdRemapping(const Vector<String16>& args, int errFd);
+
     // Handle 'watch' command as passed through 'cmd'
     status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
 
@@ -1370,14 +1423,15 @@
      */
     static std::string getFormattedCurrentTime();
 
-    static binder::Status makeClient(const sp<CameraService>& cameraService,
-            const sp<IInterface>& cameraCb, const std::string& packageName,
-            bool systemNativeClient, const std::optional<std::string>& featureId,
-            const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
-            int clientPid, uid_t clientUid, int servicePid,
+    static binder::Status makeClient(
+            const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
+            const std::string& packageName, bool systemNativeClient,
+            const std::optional<std::string>& featureId, const std::string& cameraId, int api1CameraId,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
             bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
-            /*out*/sp<BasicClient>* client);
+            const std::string& originalCameraId,
+            /*out*/ sp<BasicClient>* client);
 
     static std::string toString(std::set<userid_t> intSet);
     static int32_t mapToInterface(TorchModeStatus status);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index b388e5a..b217476 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -73,7 +73,9 @@
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
                 /*legacyClient*/ true),
-        mParameters(api1CameraId, cameraFacing)
+        mParameters(api1CameraId, cameraFacing),
+        mLatestRequestIds(kMaxRequestIds),
+        mLatestFailedRequestIds(kMaxRequestIds)
 {
     ATRACE_CALL();
 
@@ -1835,7 +1837,7 @@
                     (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
                 Mutex::Autolock al(mLatestRequestMutex);
 
-                mLatestFailedRequestId = resultExtras.requestId;
+                mLatestFailedRequestIds.add(resultExtras.requestId);
                 mLatestRequestSignal.signal();
             }
             mCaptureSequencer->notifyError(errorCode, resultExtras);
@@ -2410,7 +2412,10 @@
 
 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
-    while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
+    while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
+            mLatestRequestIds.end()) &&
+           (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
+            mLatestFailedRequestIds.end())) {
         nsecs_t startTime = systemTime();
 
         auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -2419,13 +2424,14 @@
         timeout -= (systemTime() - startTime);
     }
 
-    return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
+    return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
+             mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
 }
 
 void Camera2Client::notifyRequestId(int32_t requestId) {
     Mutex::Autolock al(mLatestRequestMutex);
 
-    mLatestRequestId = requestId;
+    mLatestRequestIds.add(requestId);
     mLatestRequestSignal.signal();
 }
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index fe12690..9ec1eb5 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -22,11 +22,7 @@
 #include "common/Camera2ClientBase.h"
 #include "api1/client2/Parameters.h"
 #include "api1/client2/FrameProcessor.h"
-//#include "api1/client2/StreamingProcessor.h"
-//#include "api1/client2/JpegProcessor.h"
-//#include "api1/client2/ZslProcessor.h"
-//#include "api1/client2/CaptureSequencer.h"
-//#include "api1/client2/CallbackProcessor.h"
+#include <media/RingBuffer.h>
 
 namespace android {
 
@@ -263,8 +259,8 @@
 
     mutable Mutex mLatestRequestMutex;
     Condition mLatestRequestSignal;
-    int32_t mLatestRequestId = -1;
-    int32_t mLatestFailedRequestId = -1;
+    static constexpr size_t kMaxRequestIds = BufferQueueDefs::NUM_BUFFER_SLOTS;
+    RingBuffer<int32_t> mLatestRequestIds, mLatestFailedRequestIds;
     status_t waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout);
     status_t waitUntilCurrentRequestIdLocked();
 };
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c60f327..3761f75 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -99,7 +99,8 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait) :
+        bool overrideToPortrait,
+        const std::string& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
@@ -107,8 +108,8 @@
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
-    mOverrideForPerfClass(overrideForPerfClass) {
-
+    mOverrideForPerfClass(overrideForPerfClass),
+    mOriginalCameraId(originalCameraId) {
     ATRACE_CALL();
     ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
 }
@@ -323,7 +324,7 @@
 
         //The first capture settings should always match the logical camera id
         const std::string &logicalId = request.mPhysicalCameraSettings.begin()->id;
-        if (mDevice->getId() != logicalId) {
+        if (mDevice->getId() != logicalId && mOriginalCameraId != logicalId) {
             ALOGE("%s: Camera %s: Invalid camera request settings.", __FUNCTION__,
                     mCameraIdStr.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -438,6 +439,7 @@
 
         CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
         for (const auto& it : request.mPhysicalCameraSettings) {
+            std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
             if (it.settings.isEmpty()) {
                 ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
                         __FUNCTION__, mCameraIdStr.c_str());
@@ -448,7 +450,7 @@
             // Check whether the physical / logical stream has settings
             // consistent with the sensor pixel mode(s) it was configured with.
             // mCameraIdToStreamSet will only have ids that are high resolution
-            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(resolvedId);
             if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
                 std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
                         outputStreamIds);
@@ -456,26 +458,25 @@
                         !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
                      ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
                             "consistent with configured streams. Rejecting request.",
-                            __FUNCTION__, it.id.c_str());
+                            __FUNCTION__, resolvedId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
                         "streams configured");
                 }
             }
 
-            const std::string &physicalId = it.id;
             bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
                     mSupportedPhysicalRequestKeys.end();
             bool hasTestPatternDataPhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_DATA) !=
                     mSupportedPhysicalRequestKeys.end();
-            if (physicalId != mDevice->getId()) {
+            if (resolvedId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
-                        it.id);
+                        resolvedId);
                 if (found == requestedPhysicalIds.end()) {
                     ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
-                            __FUNCTION__, mCameraIdStr.c_str(), physicalId.c_str());
+                            __FUNCTION__, mCameraIdStr.c_str(), resolvedId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Invalid physical camera id");
                 }
@@ -495,11 +496,11 @@
                         }
                     }
 
-                    physicalSettingsList.push_back({it.id, filteredParams,
+                    physicalSettingsList.push_back({resolvedId, filteredParams,
                             hasTestPatternModePhysicalKey, hasTestPatternDataPhysicalKey});
                 }
             } else {
-                physicalSettingsList.push_back({it.id, it.settings});
+                physicalSettingsList.push_back({resolvedId, it.settings});
             }
         }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45c904a..68bd958 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -191,7 +191,8 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait);
+            bool overrideToPortrait,
+            const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -368,6 +369,9 @@
     std::string mUserTag;
     // The last set video stabilization mode
     int mVideoStabilizationMode = -1;
+
+    // This only exists in case of camera ID Remapping.
+    std::string mOriginalCameraId;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a0e2778..b653094 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -3034,6 +3034,7 @@
         mNotifyPipelineDrain(false),
         mFrameNumber(0),
         mLatestRequestId(NAME_NOT_FOUND),
+        mLatestFailedRequestId(NAME_NOT_FOUND),
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
@@ -3285,7 +3286,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mLatestRequestMutex);
     status_t res;
-    while (mLatestRequestId != requestId) {
+    while (mLatestRequestId != requestId && mLatestFailedRequestId != requestId) {
         nsecs_t startTime = systemTime();
 
         res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -4014,8 +4015,11 @@
                 sp<Camera3Device> parent = mParent.promote();
                 if (parent != nullptr) {
                     const std::string& streamCameraId = outputStream->getPhysicalCameraId();
+                    // Consider the case where clients are sending a single logical camera request
+                    // to physical output/outputs
+                    bool singleRequest = captureRequest->mSettingsList.size() == 1;
                     for (const auto& settings : captureRequest->mSettingsList) {
-                        if ((streamCameraId.empty() &&
+                        if (((streamCameraId.empty() || singleRequest) &&
                                 parent->getId() == settings.cameraId) ||
                                 streamCameraId == settings.cameraId) {
                             outputStream->fireBufferRequestForFrameNumber(
@@ -4360,6 +4364,12 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
+            {
+                Mutex::Autolock al(mLatestRequestMutex);
+
+                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
+                mLatestRequestSignal.signal();
+            }
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0c1bbcb..a1e25fd 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1142,6 +1142,7 @@
         Condition          mLatestRequestSignal;
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
+        int32_t            mLatestFailedRequestId;
         CameraMetadata     mLatestRequest;
         std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;