Merge "codecserviceregistrant: don't attempt to register AIDL service pre-V" into main am: 60e4a5ba33 am: bb5baf9082 am: 7c594bd2e9
Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2833650
Change-Id: If48cab2e3a60a81d5e18f9dedb786b1c306b9764
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Android.bp b/camera/Android.bp
index a3fd7f9..7de8a62 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -43,6 +43,22 @@
],
}
+aconfig_declarations {
+ name: "camera_platform_flags",
+ package: "com.android.internal.camera.flags",
+ srcs: ["camera_platform.aconfig"],
+}
+
+cc_aconfig_library {
+ name: "camera_platform_flags_c_lib",
+ aconfig_declarations: "camera_platform_flags",
+}
+
+java_aconfig_library {
+ name: "camera_platform_flags_java_lib",
+ aconfig_declarations: "camera_platform_flags",
+}
+
cc_library_headers {
name: "camera_headers",
export_include_dirs: ["include"],
@@ -85,6 +101,7 @@
],
shared_libs: [
+ "camera_platform_flags_c_lib",
"libbase",
"libcutils",
"libutils",
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 1af899d..6759f3b 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -104,7 +104,6 @@
namespace {
sp<::android::hardware::ICameraService> gCameraService;
- const int kCameraServicePollDelay = 500000; // 0.5s
const char* kCameraServiceName = "media.camera";
Mutex gLock;
@@ -142,14 +141,10 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
- do {
- binder = sm->getService(toString16(kCameraServiceName));
- if (binder != 0) {
- break;
- }
- ALOGW("CameraService not published, waiting...");
- usleep(kCameraServicePollDelay);
- } while(true);
+ binder = sm->waitForService(toString16(kCameraServiceName));
+ if (binder == nullptr) {
+ return nullptr;
+ }
if (gDeathNotifier == NULL) {
gDeathNotifier = new DeathNotifier();
}
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 36bf24c..057ec99 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -16,6 +16,7 @@
// #define LOG_NDEBUG 0
#define LOG_TAG "CameraSessionStats"
+
#include <utils/Log.h>
#include <utils/String16.h>
@@ -414,6 +415,18 @@
return err;
}
+ bool usedUltraWide = false;
+ if ((err = parcel->readBool(&usedUltraWide)) != OK) {
+ ALOGE("%s: Failed to read ultrawide usage from parcel", __FUNCTION__);
+ return err;
+ }
+
+ bool usedZoomOverride = false;
+ if ((err = parcel->readBool(&usedZoomOverride)) != OK) {
+ ALOGE("%s: Failed to read zoom override usage from parcel", __FUNCTION__);
+ return err;
+ }
+
int32_t sessionIdx;
if ((err = parcel->readInt32(&sessionIdx)) != OK) {
ALOGE("%s: Failed to read session index from parcel", __FUNCTION__);
@@ -443,6 +456,8 @@
mStreamStats = std::move(streamStats);
mUserTag = toStdString(userTag);
mVideoStabilizationMode = videoStabilizationMode;
+ mUsedUltraWide = usedUltraWide;
+ mUsedZoomOverride = usedZoomOverride;
mSessionIndex = sessionIdx;
mCameraExtensionSessionStats = extStats;
@@ -542,6 +557,16 @@
return err;
}
+ if ((err = parcel->writeBool(mUsedUltraWide)) != OK) {
+ ALOGE("%s: Failed to write ultrawide usage!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeBool(mUsedZoomOverride)) != OK) {
+ ALOGE("%s: Failed to write zoom override usage!", __FUNCTION__);
+ return err;
+ }
+
if ((err = parcel->writeInt32(mSessionIndex)) != OK) {
ALOGE("%s: Failed to write session index!", __FUNCTION__);
return err;
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
new file mode 100644
index 0000000..5f4faba
--- /dev/null
+++ b/camera/camera_platform.aconfig
@@ -0,0 +1,43 @@
+package: "com.android.internal.camera.flags"
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_hsum_permission"
+ description: "Camera access by headless system user"
+ bug: "273539631"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "log_ultrawide_usage"
+ description: "Enable measuring how much usage there is for ultrawide-angle cameras"
+ bug: "300515796"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_manual_flash_strength_control"
+ description: "Flash brightness level control in manual flash mode"
+ bug: "238348881"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "lazy_aidl_wait_for_service"
+ description: "Use waitForService instead of getService with lazy AIDL HALs"
+ bug: "285546208"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "log_zoom_override_usage"
+ description: "Enable measuring how much usage there is for zoom settings overrde"
+ bug: "307409002"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "virtual_camera_service_discovery"
+ description: "Enable discovery of the Virtual Camera HAL without a VINTF entry"
+ bug: "305170199"
+}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8472562..13b705c 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -26,12 +26,15 @@
srcs: ["main_cameraserver.cpp"],
+ defaults: [
+ "libcameraservice_deps",
+ ],
+
header_libs: [
"libmedia_headers",
],
shared_libs: [
- "libcameraservice",
"liblog",
"libutils",
"libui",
@@ -40,15 +43,13 @@
"libbinder_ndk",
"libhidlbase",
"android.hardware.camera.common@1.0",
- "android.hardware.camera.provider@2.4",
- "android.hardware.camera.provider@2.5",
- "android.hardware.camera.provider@2.6",
- "android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
],
+ static_libs: [
+ "libcameraservice",
+ ],
compile_multilib: "first",
cflags: [
"-Wall",
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 70ca0b3..06c154d 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -161,6 +161,8 @@
std::vector<CameraStreamStats> mStreamStats;
std::string mUserTag;
int mVideoStabilizationMode;
+ bool mUsedUltraWide;
+ bool mUsedZoomOverride;
int mSessionIndex;
CameraExtensionSessionStats mCameraExtensionSessionStats;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 61c7551..b6b8012 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -540,6 +540,7 @@
case ACAMERA_CONTROL_AUTOFRAMING:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
+ case ACAMERA_FLASH_STRENGTH_LEVEL:
case ACAMERA_HOT_PIXEL_MODE:
case ACAMERA_JPEG_GPS_COORDINATES:
case ACAMERA_JPEG_GPS_PROCESSING_METHOD:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index fe0ef67..0899251 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2364,6 +2364,125 @@
*/
ACAMERA_FLASH_STATE = // byte (acamera_metadata_enum_android_flash_state_t)
ACAMERA_FLASH_START + 5,
+ /**
+ * <p>Flash strength level to be used when manual flash control is active.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Flash strength level to use in capture mode i.e. when the applications control
+ * flash with either SINGLE or TORCH mode.</p>
+ * <p>Use android.flash.info.singleStrengthMaxLevel and
+ * android.flash.info.torchStrengthMaxLevel to check whether the device supports
+ * flash strength control or not.
+ * If the values of android.flash.info.singleStrengthMaxLevel and
+ * android.flash.info.torchStrengthMaxLevel are greater than 1,
+ * then the device supports manual flash strength control.</p>
+ * <p>If the ACAMERA_FLASH_MODE <code>==</code> TORCH the value must be >= 1
+ * and <= android.flash.info.torchStrengthMaxLevel.
+ * If the application doesn't set the key and
+ * android.flash.info.torchStrengthMaxLevel > 1,
+ * then the flash will be fired at the default level set by HAL in
+ * android.flash.info.torchStrengthDefaultLevel.
+ * If the ACAMERA_FLASH_MODE <code>==</code> SINGLE, then the value must be >= 1
+ * and <= android.flash.info.singleStrengthMaxLevel.
+ * If the application does not set this key and
+ * android.flash.info.singleStrengthMaxLevel > 1,
+ * then the flash will be fired at the default level set by HAL
+ * in android.flash.info.singleStrengthDefaultLevel.
+ * If ACAMERA_CONTROL_AE_MODE is set to any of ON_AUTO_FLASH, ON_ALWAYS_FLASH,
+ * ON_AUTO_FLASH_REDEYE, ON_EXTERNAL_FLASH values, then the strengthLevel will be ignored.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_FLASH_MODE
+ */
+ ACAMERA_FLASH_STRENGTH_LEVEL = // int32
+ ACAMERA_FLASH_START + 6,
+ /**
+ * <p>Maximum flash brightness level for manual flash control in SINGLE mode.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Maximum flash brightness level in camera capture mode and
+ * ACAMERA_FLASH_MODE set to SINGLE.
+ * Value will be > 1 if the manual flash strength control feature is supported,
+ * otherwise the value will be equal to 1.
+ * Note that this level is just a number of supported levels (the granularity of control).
+ * There is no actual physical power units tied to this level.</p>
+ *
+ * @see ACAMERA_FLASH_MODE
+ */
+ ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL = // int32
+ ACAMERA_FLASH_START + 7,
+ /**
+ * <p>Default flash brightness level for manual flash control in SINGLE mode.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>If flash unit is available this will be greater than or equal to 1 and less
+ * or equal to <code>android.flash.info.singleStrengthMaxLevel</code>.
+ * Note for devices that do not support the manual flash strength control
+ * feature, this level will always be equal to 1.</p>
+ */
+ ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL = // int32
+ ACAMERA_FLASH_START + 8,
+ /**
+ * <p>Maximum flash brightness level for manual flash control in TORCH mode</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Maximum flash brightness level in camera capture mode and
+ * ACAMERA_FLASH_MODE set to TORCH.
+ * Value will be > 1 if the manual flash strength control feature is supported,
+ * otherwise the value will be equal to 1.</p>
+ * <p>Note that this level is just a number of supported levels(the granularity of control).
+ * There is no actual physical power units tied to this level.
+ * There is no relation between android.flash.info.torchStrengthMaxLevel and
+ * android.flash.info.singleStrengthMaxLevel i.e. the ratio of
+ * android.flash.info.torchStrengthMaxLevel:android.flash.info.singleStrengthMaxLevel
+ * is not guaranteed to be the ratio of actual brightness.</p>
+ *
+ * @see ACAMERA_FLASH_MODE
+ */
+ ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL = // int32
+ ACAMERA_FLASH_START + 9,
+ /**
+ * <p>Default flash brightness level for manual flash control in TORCH mode</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>If flash unit is available this will be greater than or equal to 1 and less
+ * or equal to android.flash.info.torchStrengthMaxLevel.
+ * Note for the devices that do not support the manual flash strength control feature,
+ * this level will always be equal to 1.</p>
+ */
+ ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL = // int32
+ ACAMERA_FLASH_START + 10,
ACAMERA_FLASH_END,
/**
@@ -4597,8 +4716,8 @@
ACAMERA_SENSOR_EXPOSURE_TIME = // int64
ACAMERA_SENSOR_START,
/**
- * <p>Duration from start of frame exposure to
- * start of next frame exposure.</p>
+ * <p>Duration from start of frame readout to
+ * start of next frame readout.</p>
*
* <p>Type: int64</p>
*
@@ -4668,6 +4787,10 @@
* <p>For more details about stalling, see {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
* OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><em>Note:</em> Prior to Android 13, this field was described as measuring the duration from
+ * start of frame exposure to start of next frame exposure, which doesn't reflect the
+ * definition from sensor manufacturer. A mobile sensor defines the frame duration as
+ * intervals between sensor readouts.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
* @see ACAMERA_CONTROL_MODE
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index bae8706..b74b7a1 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -26,8 +26,10 @@
name: "camera_defaults",
static_libs: [
"libcamera_client",
+ "libbinder_random_parcel",
],
shared_libs: [
+ "camera_platform_flags_c_lib",
"libbase",
"libcutils",
"libutils",
@@ -37,11 +39,22 @@
"libcamera_metadata",
"libnativewindow",
],
+ header_libs: [
+ "libbinder_headers",
+ ],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-camera-fwk-eng@google.com",
],
- componentid: 155276,
+ componentid: 41727,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libcamera_client",
+ vector: "local_no_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -148,3 +161,23 @@
"camera_defaults",
],
}
+
+cc_fuzz {
+ name: "camera_utils_fuzzer",
+ srcs: [
+ "camera_utils_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "camera_metadata_fuzzer",
+ srcs: [
+ "camera_metadata_fuzzer.cpp",
+ ],
+ defaults: [
+ "camera_defaults",
+ ],
+}
diff --git a/camera/tests/fuzzer/README.md b/camera/tests/fuzzer/README.md
index c07ac04..96557f8 100644
--- a/camera/tests/fuzzer/README.md
+++ b/camera/tests/fuzzer/README.md
@@ -52,6 +52,8 @@
$ mm -j$(nproc) camera_Parameters_fuzzer
$ mm -j$(nproc) camera_SessionStats_fuzzer
$ mm -j$(nproc) camera_captureResult_fuzzer
+ $ mm -j$(nproc) camera_utils_fuzzer
+ $ mm -j$(nproc) camera_metadata_fuzzer
```
#### Steps to run
To run on device
@@ -67,6 +69,8 @@
$ adb shell /data/fuzz/${TARGET_ARCH}/camera_Parameters_fuzzer/camera_Parameters_fuzzer
$ adb shell /data/fuzz/${TARGET_ARCH}/camera_SessionStats_fuzzer/camera_SessionStats_fuzzer
$ adb shell /data/fuzz/${TARGET_ARCH}/camera_captureResult_fuzzer/camera_captureResult_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_utils_fuzzer/camera_utils_fuzzer
+ $ adb shell /data/fuzz/${TARGET_ARCH}/camera_metadata_fuzzer/camera_metadata_fuzzer
```
## References:
diff --git a/camera/tests/fuzzer/camera2common.h b/camera/tests/fuzzer/camera2common.h
index 14a1b1b..c82e74d 100644
--- a/camera/tests/fuzzer/camera2common.h
+++ b/camera/tests/fuzzer/camera2common.h
@@ -16,10 +16,93 @@
#ifndef CAMERA2COMMON_H
#define CAMERA2COMMON_H
+#include <CameraSessionStats.h>
+#include <android-base/logging.h>
+#include <binder/IServiceManager.h>
#include <binder/Parcel.h>
+#include <fuzzbinder/random_binder.h>
+#include <fuzzbinder/random_fd.h>
+#include <fuzzbinder/random_parcel.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
using namespace android;
+const std::string kFetchCameraService = "media.camera";
+
+constexpr int8_t kMinIterations = 0;
+constexpr int8_t kMaxIterations = 20;
+constexpr int8_t kMinExtraFDs = 0;
+constexpr int8_t kMinExtraBinder = 0;
+constexpr int32_t kMaxFDs = 1000;
+constexpr int32_t kMinBytes = 0;
+constexpr int32_t kMaxBytes = 20;
+constexpr int32_t kMinCapacity = 1;
+constexpr int32_t kMaxCapacity = 1000;
+
+const int32_t kValidFacing[] = {android::hardware::CameraSessionStats::CAMERA_FACING_BACK,
+ android::hardware::CameraSessionStats::CAMERA_FACING_FRONT};
+const int32_t kValidOrientation[] = {0, 90, 180, 270};
+
+void randomizeParcel(Parcel* parcel, FuzzedDataProvider& provider) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16(kFetchCameraService.c_str()));
+ RandomParcelOptions options{
+ .extraBinders = {binder},
+ .extraFds = {},
+ };
+
+ auto retFds = parcel->debugReadAllFileDescriptors();
+ for (size_t i = 0; i < retFds.size(); ++i) {
+ options.extraFds.push_back(base::unique_fd(dup(retFds[i])));
+ }
+ int8_t iterations = provider.ConsumeIntegralInRange<int8_t>(kMinIterations, kMaxIterations);
+ while (--iterations >= 0) {
+ auto fillFunc = provider.PickValueInArray<const std::function<void()>>({
+ // write data
+ [&]() {
+ size_t toWrite = provider.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes);
+ std::vector<uint8_t> data = provider.ConsumeBytes<uint8_t>(toWrite);
+ CHECK(OK == parcel->write(data.data(), data.size()));
+ },
+ // write FD
+ [&]() {
+ if (options.extraFds.size() > 0 && provider.ConsumeBool()) {
+ const base::unique_fd& fd =
+ options.extraFds.at(provider.ConsumeIntegralInRange<size_t>(
+ kMinExtraFDs, options.extraFds.size() - 1));
+ CHECK(OK == parcel->writeFileDescriptor(fd.get(), false /*takeOwnership*/));
+ } else {
+ // b/260119717 - Adding more FDs can eventually lead to FD limit exhaustion
+ if (options.extraFds.size() > kMaxFDs) {
+ return;
+ }
+
+ std::vector<base::unique_fd> fds = getRandomFds(&provider);
+ CHECK(OK == parcel->writeFileDescriptor(fds.begin()->release(),
+ true /*takeOwnership*/));
+
+ options.extraFds.insert(options.extraFds.end(),
+ std::make_move_iterator(fds.begin() + 1),
+ std::make_move_iterator(fds.end()));
+ }
+ },
+ // write binder
+ [&]() {
+ sp<IBinder> binder;
+ if (options.extraBinders.size() > 0 && provider.ConsumeBool()) {
+ binder = options.extraBinders.at(provider.ConsumeIntegralInRange<size_t>(
+ kMinExtraBinder, options.extraBinders.size() - 1));
+ } else {
+ binder = getRandomBinder(&provider);
+ }
+ CHECK(OK == parcel->writeStrongBinder(binder));
+ },
+ });
+ fillFunc();
+ }
+}
+
template <class type>
void invokeReadWriteNullParcel(type* obj) {
Parcel* parcelNull = nullptr;
@@ -52,4 +135,24 @@
delete parcel;
}
+template <class type>
+void invokeNewReadWriteParcel(type* obj, FuzzedDataProvider& provider) {
+ Parcel* parcel = new Parcel();
+ obj->writeToParcel(parcel);
+ randomizeParcel(parcel, provider);
+ parcel->setDataPosition(0);
+ obj->readFromParcel(parcel);
+ delete parcel;
+}
+
+template <class type>
+void invokeNewReadWriteParcelsp(sp<type> obj, FuzzedDataProvider& provider) {
+ Parcel* parcel = new Parcel();
+ obj->writeToParcel(parcel);
+ randomizeParcel(parcel, provider);
+ parcel->setDataPosition(0);
+ obj->readFromParcel(parcel);
+ delete parcel;
+}
+
#endif // CAMERA2COMMON_H
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index d09a6dd..c2a7549 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -15,36 +15,24 @@
*/
#include <Camera.h>
-#include <CameraBase.h>
-#include <CameraMetadata.h>
#include <CameraParameters.h>
-#include <CameraUtils.h>
-#include <VendorTagDescriptor.h>
-#include <binder/IMemory.h>
#include <binder/MemoryDealer.h>
#include <fuzzer/FuzzedDataProvider.h>
-#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
-#include <utils/Log.h>
#include "camera2common.h"
-#include <android/hardware/ICameraService.h>
using namespace std;
using namespace android;
using namespace android::hardware;
constexpr int32_t kFrameRateMin = 1;
-constexpr int32_t kFrameRateMax = 120;
-constexpr int32_t kCamIdMin = 0;
-constexpr int32_t kCamIdMax = 1;
+constexpr int32_t kFrameRateMax = 1000;
constexpr int32_t kNumMin = 0;
constexpr int32_t kNumMax = 1024;
constexpr int32_t kMemoryDealerSize = 1000;
-constexpr int32_t kRangeMin = 0;
-constexpr int32_t kRangeMax = 1000;
-constexpr int32_t kSizeMin = 0;
-constexpr int32_t kSizeMax = 1000;
+constexpr int8_t kMinElements = 1;
+constexpr int8_t kMaxElements = 10;
constexpr int32_t kValidCMD[] = {CAMERA_CMD_START_SMOOTH_ZOOM,
CAMERA_CMD_STOP_SMOOTH_ZOOM,
@@ -68,11 +56,6 @@
CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER, CAMERA_FRAME_CALLBACK_FLAG_CAMERA,
CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER};
-constexpr int32_t kValidFacing[] = {android::hardware::CAMERA_FACING_BACK,
- android::hardware::CAMERA_FACING_FRONT};
-
-constexpr int32_t kValidOrientation[] = {0, 90, 180, 270};
-
class TestCameraListener : public CameraListener {
public:
virtual ~TestCameraListener() = default;
@@ -100,36 +83,12 @@
class CameraFuzzer : public ::android::hardware::BnCameraClient {
public:
void process(const uint8_t* data, size_t size);
- ~CameraFuzzer() {
- delete mCameraMetadata;
- mComposerClient.clear();
- mSurfaceControl.clear();
- mSurface.clear();
- mCamera.clear();
- mMemoryDealer.clear();
- mIMem.clear();
- mCameraListener.clear();
- mCameraService.clear();
- }
private:
bool initCamera();
- void initCameraMetadata();
void invokeCamera();
- void invokeCameraUtils();
- void invokeCameraBase();
- void invokeCameraMetadata();
void invokeSetParameters();
sp<Camera> mCamera = nullptr;
- CameraMetadata* mCameraMetadata = nullptr;
- sp<SurfaceComposerClient> mComposerClient = nullptr;
- sp<SurfaceControl> mSurfaceControl = nullptr;
- sp<Surface> mSurface = nullptr;
- sp<MemoryDealer> mMemoryDealer = nullptr;
- sp<IMemory> mIMem = nullptr;
- sp<TestCameraListener> mCameraListener = nullptr;
- sp<ICameraService> mCameraService = nullptr;
- sp<ICamera> cameraDevice = nullptr;
FuzzedDataProvider* mFDP = nullptr;
// CameraClient interface
@@ -147,12 +106,26 @@
ProcessState::self()->startThreadPool();
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera"));
- mCameraService = interface_cast<ICameraService>(binder);
- mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
- "CAMERAFUZZ", hardware::ICameraService::USE_CALLING_UID,
- hardware::ICameraService::USE_CALLING_PID,
- /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+ sp<ICameraService> cameraService = nullptr;
+ cameraService = interface_cast<ICameraService>(binder);
+ sp<ICamera> cameraDevice = nullptr;
+ if (mFDP->ConsumeBool()) {
+ cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
+ hardware::ICameraService::USE_CALLING_UID,
+ hardware::ICameraService::USE_CALLING_PID,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
+ &cameraDevice);
+ } else {
+ cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
+ mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
+ mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
+ mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
+ /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
+ /*overrideToPortrait*/ mFDP->ConsumeBool(),
+ /*forceSlowJpegMode*/ mFDP->ConsumeBool(), &cameraDevice);
+ }
+
mCamera = Camera::create(cameraDevice);
if (!mCamera) {
return false;
@@ -176,221 +149,195 @@
return;
}
- int32_t cameraId = mFDP->ConsumeIntegralInRange<int32_t>(kCamIdMin, kCamIdMax);
+ int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
Camera::getNumberOfCameras();
CameraInfo cameraInfo;
cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
- : mFDP->ConsumeIntegral<int>();
+ : mFDP->ConsumeIntegral<int32_t>();
cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
- : mFDP->ConsumeIntegral<int>();
+ : mFDP->ConsumeIntegral<int32_t>();
Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
mCamera->reconnect();
- mComposerClient = new SurfaceComposerClient;
- mSurfaceControl = mComposerClient->createSurface(
- static_cast<String8>(mFDP->ConsumeRandomLengthString().c_str()) /* name */,
- mFDP->ConsumeIntegral<uint32_t>() /* width */,
- mFDP->ConsumeIntegral<uint32_t>() /* height */,
- mFDP->ConsumeIntegral<int32_t>() /* format */,
- mFDP->ConsumeIntegral<int32_t>() /* flags */);
- if (mSurfaceControl) {
- mSurface = mSurfaceControl->getSurface();
- mCamera->setPreviewTarget(mSurface->getIGraphicBufferProducer());
- mCamera->startPreview();
- mCamera->stopPreview();
- mCamera->previewEnabled();
- mCamera->startRecording();
- mCamera->stopRecording();
- }
-
- mCamera->lock();
- mCamera->unlock();
- mCamera->autoFocus();
- mCamera->cancelAutoFocus();
-
- int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
- mCamera->takePicture(msgType);
- invokeSetParameters();
- int32_t cmd;
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ sp<SurfaceControl> surfaceControl = nullptr;
if (mFDP->ConsumeBool()) {
- cmd = mFDP->PickValueInArray(kValidCMD);
+ surfaceControl = composerClient->createSurface(String8("FUZZSURFACE"), 1280, 800,
+ HAL_PIXEL_FORMAT_YV12);
} else {
- cmd = mFDP->ConsumeIntegral<int32_t>();
+ surfaceControl = composerClient->createSurface(
+ static_cast<String8>(mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()) /* name */,
+ mFDP->ConsumeIntegral<uint32_t>() /* width */,
+ mFDP->ConsumeIntegral<uint32_t>() /* height */,
+ mFDP->ConsumeIntegral<int32_t>() /* format */,
+ mFDP->ConsumeIntegral<int32_t>() /* flags */);
}
- int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
- int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
- mCamera->sendCommand(cmd, arg1, arg2);
-
- int32_t videoBufferMode = mFDP->PickValueInArray(kValidVideoBufferMode);
- mCamera->setVideoBufferMode(videoBufferMode);
- if (mSurfaceControl) {
- mSurface = mSurfaceControl->getSurface();
- mCamera->setVideoTarget(mSurface->getIGraphicBufferProducer());
- }
- mCameraListener = sp<TestCameraListener>::make();
- mCamera->setListener(mCameraListener);
- int32_t previewCallbackFlag;
- if (mFDP->ConsumeBool()) {
- previewCallbackFlag = mFDP->PickValueInArray(kValidPreviewCallbackFlag);
- } else {
- previewCallbackFlag = mFDP->ConsumeIntegral<int32_t>();
- }
- mCamera->setPreviewCallbackFlags(previewCallbackFlag);
- if (mSurfaceControl) {
- mSurface = mSurfaceControl->getSurface();
- mCamera->setPreviewCallbackTarget(mSurface->getIGraphicBufferProducer());
- }
-
- mCamera->getRecordingProxy();
- int32_t mode = mFDP->ConsumeIntegral<int32_t>();
- mCamera->setAudioRestriction(mode);
- mCamera->getGlobalAudioRestriction();
- mCamera->recordingEnabled();
-
- mMemoryDealer = new MemoryDealer(kMemoryDealerSize);
- mIMem = mMemoryDealer->allocate(kMemoryDealerSize);
- mCamera->releaseRecordingFrame(mIMem);
-
- int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- native_handle_t* handle = native_handle_create(numFds, numInts);
- mCamera->releaseRecordingFrameHandle(handle);
-
- int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
- int32_t ext = mFDP->ConsumeIntegral<int32_t>();
- int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
- mCamera->notifyCallback(msgTypeNC, ext, ext2);
-
- int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
- mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, mIMem);
- mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-}
-
-void CameraFuzzer::invokeCameraUtils() {
- CameraMetadata staticMetadata;
- int32_t orientVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
- : mFDP->ConsumeIntegral<int32_t>();
- uint8_t facingVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
- : mFDP->ConsumeIntegral<uint8_t>();
- staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
- staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
- int32_t transform = 0;
- CameraUtils::getRotationTransform(
- staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */, &transform /*out*/);
- CameraUtils::isCameraServiceDisabled();
-}
-
-void CameraFuzzer::invokeCameraBase() {
- CameraInfo cameraInfo;
- cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
- : mFDP->ConsumeIntegral<int>();
- cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
- : mFDP->ConsumeIntegral<int>();
- invokeReadWriteParcel<CameraInfo>(&cameraInfo);
-
- CameraStatus* cameraStatus = nullptr;
if (mFDP->ConsumeBool()) {
- cameraStatus = new CameraStatus();
- } else {
- string cid = mFDP->ConsumeRandomLengthString();
- int32_t status = mFDP->ConsumeIntegral<int32_t>();
- size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
- vector<std::string> unavailSubIds;
- for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
- string subId = mFDP->ConsumeRandomLengthString();
- unavailSubIds.push_back(subId);
- }
- string clientPackage = mFDP->ConsumeRandomLengthString();
- cameraStatus = new CameraStatus(cid, status, unavailSubIds, clientPackage);
+ invokeSetParameters();
}
-
- invokeReadWriteParcel<CameraStatus>(cameraStatus);
- delete cameraStatus;
-}
-
-void CameraFuzzer::initCameraMetadata() {
- if (mFDP->ConsumeBool()) {
- mCameraMetadata = new CameraMetadata();
- } else {
- size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
- size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
- mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+ sp<Surface> surface = nullptr;
+ if (surfaceControl) {
+ surface = surfaceControl->getSurface();
}
-}
+ sp<MemoryDealer> memoryDealer = nullptr;
+ sp<IMemory> iMem = nullptr;
+ sp<CameraListener> cameraListener = nullptr;
-void CameraFuzzer::invokeCameraMetadata() {
- initCameraMetadata();
-
- const camera_metadata_t* metadataBuffer = nullptr;
- if (mFDP->ConsumeBool()) {
- metadataBuffer = mCameraMetadata->getAndLock();
+ while (mFDP->remaining_bytes()) {
+ auto callCameraAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+ [&]() {
+ if (surfaceControl) {
+ mCamera->setPreviewTarget(surface->getIGraphicBufferProducer());
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->startPreview();
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->stopPreview();
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->stopPreview();
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->previewEnabled();
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->startRecording();
+ }
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->stopRecording();
+ }
+ },
+ [&]() { mCamera->lock(); },
+ [&]() { mCamera->unlock(); },
+ [&]() { mCamera->autoFocus(); },
+ [&]() { mCamera->cancelAutoFocus(); },
+ [&]() {
+ int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->takePicture(msgType);
+ },
+ [&]() {
+ int32_t cmd;
+ cmd = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidCMD)
+ : mFDP->ConsumeIntegral<int32_t>();
+ int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
+ int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->sendCommand(cmd, arg1, arg2);
+ },
+ [&]() {
+ int32_t videoBufferMode =
+ mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidVideoBufferMode)
+ : mFDP->ConsumeIntegral<int32_t>();
+ mCamera->setVideoBufferMode(videoBufferMode);
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->setVideoTarget(surface->getIGraphicBufferProducer());
+ }
+ },
+ [&]() {
+ cameraListener = sp<TestCameraListener>::make();
+ mCamera->setListener(cameraListener);
+ },
+ [&]() {
+ int32_t previewCallbackFlag;
+ previewCallbackFlag =
+ mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidPreviewCallbackFlag)
+ : mFDP->ConsumeIntegral<int32_t>();
+ mCamera->setPreviewCallbackFlags(previewCallbackFlag);
+ },
+ [&]() {
+ if (surfaceControl) {
+ mCamera->setPreviewCallbackTarget(surface->getIGraphicBufferProducer());
+ }
+ },
+ [&]() { mCamera->getRecordingProxy(); },
+ [&]() {
+ int32_t mode = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->setAudioRestriction(mode);
+ },
+ [&]() { mCamera->getGlobalAudioRestriction(); },
+ [&]() { mCamera->recordingEnabled(); },
+ [&]() {
+ memoryDealer = new MemoryDealer(kMemoryDealerSize);
+ iMem = memoryDealer->allocate(kMemoryDealerSize);
+ },
+ [&]() {
+ int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+ int32_t ext = mFDP->ConsumeIntegral<int32_t>();
+ int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
+ mCamera->notifyCallback(msgTypeNC, ext, ext2);
+ },
+ [&]() {
+ int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+ int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+ mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, iMem);
+ },
+ [&]() {
+ int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+ },
+ [&]() {
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ mCamera->releaseRecordingFrameHandle(handle);
+ },
+ [&]() { mCamera->releaseRecordingFrame(iMem); },
+ [&]() {
+ std::vector<native_handle_t*> handles;
+ for (int8_t i = 0;
+ i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
+ ++i) {
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ handles.push_back(handle);
+ }
+ mCamera->releaseRecordingFrameHandleBatch(handles);
+ },
+ [&]() {
+ std::vector<native_handle_t*> handles;
+ for (int8_t i = 0;
+ i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
+ ++i) {
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ handles.push_back(handle);
+ }
+ std::vector<nsecs_t> timestamps;
+ for (int8_t i = 0;
+ i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
+ ++i) {
+ timestamps.push_back(mFDP->ConsumeIntegral<int64_t>());
+ }
+ mCamera->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
+ },
+ });
+ callCameraAPIs();
}
-
- mCameraMetadata->entryCount();
- mCameraMetadata->isEmpty();
- mCameraMetadata->bufferSize();
- mCameraMetadata->sort();
-
- uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
- uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
- int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
- int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
- float dataFloat = mFDP->ConsumeFloatingPoint<float>();
- double dataDouble = mFDP->ConsumeFloatingPoint<double>();
- camera_metadata_rational dataRational;
- dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
- dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
- string dataStr = mFDP->ConsumeRandomLengthString();
- String8 dataString(dataStr.c_str());
- size_t data_count = 1;
- mCameraMetadata->update(tag, &dataUint8, data_count);
- mCameraMetadata->update(tag, &dataInt32, data_count);
- mCameraMetadata->update(tag, &dataFloat, data_count);
- mCameraMetadata->update(tag, &dataInt64, data_count);
- mCameraMetadata->update(tag, &dataRational, data_count);
- mCameraMetadata->update(tag, &dataDouble, data_count);
- mCameraMetadata->update(tag, dataString);
-
- uint32_t tagExists = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
- mCameraMetadata->exists(tagExists);
-
- uint32_t tagFind = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
- mCameraMetadata->find(tagFind);
-
- uint32_t tagErase = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
- mCameraMetadata->erase(tagErase);
-
- mCameraMetadata->unlock(metadataBuffer);
- std::vector<int32_t> tagsRemoved;
- uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
- mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
-
- string name = mFDP->ConsumeRandomLengthString();
- VendorTagDescriptor vTags;
- uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
- mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
-
- invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
- invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
-
- int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
- int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
- int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
- mCameraMetadata->dump(fd, verbosity, indentation);
-
- CameraMetadata metadataCopy(mCameraMetadata->release());
- CameraMetadata otherCameraMetadata;
- mCameraMetadata->swap(otherCameraMetadata);
- close(fd);
}
void CameraFuzzer::process(const uint8_t* data, size_t size) {
mFDP = new FuzzedDataProvider(data, size);
invokeCamera();
- invokeCameraUtils();
- invokeCameraBase();
- invokeCameraMetadata();
delete mFDP;
}
diff --git a/camera/tests/fuzzer/camera_metadata_fuzzer.cpp b/camera/tests/fuzzer/camera_metadata_fuzzer.cpp
new file mode 100644
index 0000000..a609e19
--- /dev/null
+++ b/camera/tests/fuzzer/camera_metadata_fuzzer.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraBase.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+constexpr int32_t kMinMetadataCapacity = 0;
+constexpr int32_t kMaxMetadataCapacity = 1000;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+class CameraMetadataFuzzer {
+ public:
+ void process(const uint8_t* data, size_t size);
+
+ private:
+ void initCameraMetadata();
+ void invokeCameraMetadata();
+ CameraMetadata* mCameraMetadata = nullptr;
+ FuzzedDataProvider* mFDP = nullptr;
+ camera_metadata* mMetaBuffer = nullptr;
+ bool mMetadataLocked = false;
+ template <typename T>
+ void callCameraMetadataUpdate(size_t dataCount, T data) {
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->update(tag, &data, dataCount);
+ }
+};
+
+void CameraMetadataFuzzer::initCameraMetadata() {
+ auto selectMetadataConstructor = mFDP->PickValueInArray<const std::function<void()>>({
+ [&]() {
+ mMetaBuffer = allocate_camera_metadata(
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity, kMaxMetadataCapacity) /* entry_capacity */,
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity, kMaxMetadataCapacity) /* data_capacity */);
+ mCameraMetadata = new CameraMetadata(mMetaBuffer);
+ },
+ [&]() {
+ mCameraMetadata = new CameraMetadata();
+ },
+ [&]() {
+ size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+ },
+ });
+ selectMetadataConstructor();
+}
+void CameraMetadataFuzzer::invokeCameraMetadata() {
+ initCameraMetadata();
+
+ const camera_metadata_t* metadataBuffer = nullptr;
+ mMetadataLocked = mFDP->ConsumeBool();
+ if (mMetadataLocked) {
+ metadataBuffer = mCameraMetadata->getAndLock();
+ }
+
+ size_t dataCount = 1;
+ while (mFDP->remaining_bytes()) {
+ auto callMetadataAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+
+ [&]() { mCameraMetadata->entryCount(); },
+ [&]() { mCameraMetadata->isEmpty(); },
+ [&]() { mCameraMetadata->bufferSize(); },
+ [&]() { mCameraMetadata->sort(); },
+ [&]() {
+ uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
+ callCameraMetadataUpdate(dataCount, dataUint8);
+ },
+ [&]() {
+ int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
+ callCameraMetadataUpdate(dataCount, dataInt32);
+ },
+ [&]() {
+ int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
+ callCameraMetadataUpdate(dataCount, dataInt64);
+ },
+ [&]() {
+ float dataFloat = mFDP->ConsumeFloatingPoint<float>();
+ callCameraMetadataUpdate(dataCount, dataFloat);
+ },
+ [&]() {
+ double dataDouble = mFDP->ConsumeFloatingPoint<double>();
+ callCameraMetadataUpdate(dataCount, dataDouble);
+ },
+ [&]() {
+ camera_metadata_rational dataRational;
+ dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
+ dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
+ callCameraMetadataUpdate(dataCount, dataRational);
+ },
+ [&]() {
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ string dataStr = mFDP->ConsumeRandomLengthString(kMaxBytes);
+ String8 dataString(dataStr.c_str());
+ mCameraMetadata->update(tag, dataString);
+ },
+ [&]() {
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t tagExists =
+ mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->exists(tagExists);
+ },
+ [&]() {
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t tagFind =
+ mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->find(tagFind);
+ },
+ [&]() {
+ uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+ uint32_t tagErase =
+ mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->erase(tagErase);
+ },
+ [&]() { mCameraMetadata->unlock(metadataBuffer); },
+ [&]() {
+ std::vector<int32_t> tagsRemoved;
+ uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
+ mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
+ },
+ [&]() {
+ string name = mFDP->ConsumeRandomLengthString(kMaxBytes);
+ VendorTagDescriptor vTags;
+ uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
+ mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
+ },
+ [&]() {
+ int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+ int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ int32_t indentation =
+ mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+ mCameraMetadata->dump(fd, verbosity, indentation);
+ close(fd);
+ },
+ [&]() { CameraMetadata metadataCopy(mCameraMetadata->release()); },
+ [&]() {
+ if (mFDP->ConsumeBool()) {
+ CameraMetadata otherCameraMetadata;
+ mCameraMetadata->swap(otherCameraMetadata);
+ } else {
+ std::vector<int8_t> entryCapacityVector =
+ mFDP->ConsumeBytes<int8_t>(kMaxBytes);
+ /**
+ * Resizing vector to a size between 1 to 1000 so that vector is not empty.
+ */
+ entryCapacityVector.resize(0, mFDP->ConsumeIntegralInRange<int32_t>(
+ kMinCapacity, kMaxCapacity));
+ CameraMetadata otherCameraMetadata(entryCapacityVector.size());
+ mCameraMetadata->swap(otherCameraMetadata);
+ }
+ },
+ [&]() {
+ if (!mMetadataLocked) {
+ camera_metadata* metaBuffer = allocate_camera_metadata(
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity,
+ kMaxMetadataCapacity) /* entry_capacity */,
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity,
+ kMaxMetadataCapacity) /* data_capacity */);
+ mCameraMetadata->acquire(metaBuffer);
+ }
+ },
+ [&]() {
+ if (!mMetadataLocked) {
+ camera_metadata* metaBuffer = allocate_camera_metadata(
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity,
+ kMaxMetadataCapacity) /* entry_capacity */,
+ mFDP->ConsumeIntegralInRange<size_t>(
+ kMinMetadataCapacity,
+ kMaxMetadataCapacity) /* data_capacity */);
+ mCameraMetadata->append(metaBuffer);
+ free_camera_metadata(metaBuffer);
+ }
+ },
+ });
+ callMetadataAPIs();
+
+ // Not keeping invokeReadWrite() APIs in while loop to avoid possible OOM.
+ invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
+ if (mFDP->ConsumeBool()) {
+ invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
+ } else {
+ invokeNewReadWriteParcel<CameraMetadata>(mCameraMetadata, *mFDP);
+ }
+ }
+ delete mCameraMetadata;
+}
+
+void CameraMetadataFuzzer::process(const uint8_t* data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ invokeCameraMetadata();
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ CameraMetadataFuzzer cameraMetadataFuzzer;
+ cameraMetadataFuzzer.process(data, size);
+ return 0;
+}
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
new file mode 100644
index 0000000..365305e
--- /dev/null
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraBase.h>
+#include <CameraUtils.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int8_t kMaxLoopIterations = 20;
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+class CameraUtilsFuzzer {
+ public:
+ void process(const uint8_t* data, size_t size);
+
+ private:
+ void invokeCameraUtils();
+ void invokeCameraBase();
+ FuzzedDataProvider* mFDP = nullptr;
+};
+
+void CameraUtilsFuzzer::invokeCameraUtils() {
+ int8_t count = kMaxLoopIterations;
+ while (--count > 0) {
+ int32_t transform = 0;
+ auto callCameraUtilsAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+ [&]() {
+ CameraMetadata staticMetadata;
+ if (mFDP->ConsumeBool()) {
+ int32_t orientVal = mFDP->ConsumeBool()
+ ? mFDP->PickValueInArray(kValidOrientation)
+ : mFDP->ConsumeIntegral<int32_t>();
+ uint8_t facingVal = mFDP->ConsumeBool()
+ ? mFDP->PickValueInArray(kValidFacing)
+ : mFDP->ConsumeIntegral<uint8_t>();
+ staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
+ staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
+ } else {
+ std::vector<int32_t> orientVal;
+ for (int8_t i = 0;
+ i <= mFDP->ConsumeIntegralInRange<int32_t>(kMinCapacity, kMaxCapacity);
+ ++i) {
+ orientVal.push_back(mFDP->ConsumeIntegral<int32_t>());
+ }
+ std::vector<uint8_t> facingVal = mFDP->ConsumeBytes<uint8_t>(kMaxBytes);
+ /**
+ * Resizing vector to a size between 1 to 1000 so that vector is not empty.
+ */
+ orientVal.resize(0, mFDP->ConsumeIntegralInRange<int32_t>(kMinCapacity,
+ kMaxCapacity));
+ facingVal.resize(0, mFDP->ConsumeIntegralInRange<int32_t>(kMinCapacity,
+ kMaxCapacity));
+ staticMetadata.update(ANDROID_SENSOR_ORIENTATION, orientVal.data(),
+ orientVal.size());
+ staticMetadata.update(ANDROID_LENS_FACING, facingVal.data(),
+ facingVal.size());
+ }
+
+ CameraUtils::getRotationTransform(
+ staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */,
+ &transform /*out*/);
+ },
+ [&]() { CameraUtils::isCameraServiceDisabled(); },
+ });
+ callCameraUtilsAPIs();
+ }
+}
+
+void CameraUtilsFuzzer::invokeCameraBase() {
+ int8_t count = kMaxLoopIterations;
+ while (--count > 0) {
+ CameraInfo cameraInfo;
+ cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+ : mFDP->ConsumeIntegral<int>();
+ cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+ : mFDP->ConsumeIntegral<int>();
+ if (mFDP->ConsumeBool()) {
+ invokeReadWriteParcel<CameraInfo>(&cameraInfo);
+ } else {
+ invokeNewReadWriteParcel<CameraInfo>(&cameraInfo, *mFDP);
+ }
+
+ CameraStatus* cameraStatus = nullptr;
+
+ if (mFDP->ConsumeBool()) {
+ cameraStatus = new CameraStatus();
+ } else {
+ string id = mFDP->ConsumeRandomLengthString(kMaxBytes);
+ int32_t status = mFDP->ConsumeIntegral<int32_t>();
+ size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+ vector<string> unavailSubIds;
+ for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
+ string unavailSubId = mFDP->ConsumeRandomLengthString(kMaxBytes);
+ unavailSubIds.push_back(unavailSubId);
+ }
+ string clientPackage = mFDP->ConsumeRandomLengthString(kMaxBytes);
+
+ cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+ }
+
+ if (mFDP->ConsumeBool()) {
+ invokeReadWriteParcel<CameraStatus>(cameraStatus);
+ } else {
+ invokeNewReadWriteParcel<CameraStatus>(cameraStatus, *mFDP);
+ }
+ delete cameraStatus;
+ }
+}
+
+void CameraUtilsFuzzer::process(const uint8_t* data, size_t size) {
+ mFDP = new FuzzedDataProvider(data, size);
+ if (mFDP->ConsumeBool()) {
+ invokeCameraUtils();
+ } else {
+ invokeCameraBase();
+ }
+ delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ CameraUtilsFuzzer cameraUtilsFuzzer;
+ cameraUtilsFuzzer.process(data, size);
+ return 0;
+}
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 17d7046..a19ef8e 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -90,9 +90,12 @@
status_t Overlay::stop() {
ALOGV("Overlay::stop");
- Mutex::Autolock _l(mMutex);
- mState = STOPPING;
- mEventCond.signal();
+ {
+ Mutex::Autolock _l(mMutex);
+ mState = STOPPING;
+ mEventCond.signal();
+ }
+ join();
return NO_ERROR;
}
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 55bfbd8..a6b20cf 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -892,9 +892,9 @@
gPhysicalDisplayId ? displayState.layerStackSpaceRect : getMaxDisplaySize();
if (gVerbose) {
printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
- layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
- displayMode.refreshRate, toCString(displayState.orientation),
- displayState.layerStack.id);
+ layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
+ displayMode.peakRefreshRate, toCString(displayState.orientation),
+ displayState.layerStack.id);
fflush(stdout);
}
@@ -911,7 +911,8 @@
sp<FrameOutput> frameOutput;
sp<IGraphicBufferProducer> encoderInputSurface;
if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
- err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder, &encoderInputSurface);
+ err = prepareEncoder(displayMode.peakRefreshRate, &recordingData.encoder,
+ &encoderInputSurface);
if (err != NO_ERROR && !gSizeSpecified) {
// fallback is defined for landscape; swap if we're in portrait
@@ -924,8 +925,8 @@
gVideoWidth, gVideoHeight, newWidth, newHeight);
gVideoWidth = newWidth;
gVideoHeight = newHeight;
- err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder,
- &encoderInputSurface);
+ err = prepareEncoder(displayMode.peakRefreshRate, &recordingData.encoder,
+ &encoderInputSurface);
}
}
if (err != NO_ERROR) return err;
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 11e1704..ec99fac 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -37,10 +37,12 @@
// will calculate frame buffer size if |hasData| is set to true.
VideoFrame(uint32_t width, uint32_t height,
uint32_t displayWidth, uint32_t displayHeight,
+ uint32_t displayLeft, uint32_t displayTop,
uint32_t tileWidth, uint32_t tileHeight,
uint32_t angle, uint32_t bpp, uint32_t bitDepth, bool hasData, size_t iccSize):
mWidth(width), mHeight(height),
mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
+ mDisplayLeft(displayLeft), mDisplayTop(displayTop),
mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
mBitDepth(bitDepth) {
@@ -82,6 +84,8 @@
uint32_t mHeight; // Decoded image height before rotation
uint32_t mDisplayWidth; // Display width before rotation
uint32_t mDisplayHeight; // Display height before rotation
+ uint32_t mDisplayLeft; // Display left (column coordinate) before rotation
+ uint32_t mDisplayTop; // Display top (row coordinate) before rotation
uint32_t mTileWidth; // Tile width (0 if image doesn't have grid)
uint32_t mTileHeight; // Tile height (0 if image doesn't have grid)
int64_t mDurationUs; // Frame duration in microseconds
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
new file mode 100644
index 0000000..ee1a2c8
--- /dev/null
+++ b/media/audio/aconfig/Android.bp
@@ -0,0 +1,97 @@
+// media_audio namespace flags
+
+cc_defaults {
+ name: "audio-aconfig-cc-defaults",
+ host_supported: true,
+}
+
+aconfig_declarations {
+ name: "com.android.media.audioserver-aconfig",
+ package: "com.android.media.audioserver",
+ srcs: ["audioserver.aconfig"],
+}
+
+aconfig_declarations {
+ name: "com.android.media.audio-aconfig",
+ package: "com.android.media.audio",
+ srcs: ["audio.aconfig"],
+}
+
+cc_aconfig_library {
+ name: "com.android.media.audioserver-aconfig-cc",
+ aconfig_declarations: "com.android.media.audioserver-aconfig",
+ defaults: ["audio-aconfig-cc-defaults"],
+ double_loadable: true,
+ host_supported: true,
+ product_available: true,
+ vendor_available: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
+}
+
+cc_aconfig_library {
+ name: "com.android.media.audio-aconfig-cc",
+ aconfig_declarations: "com.android.media.audio-aconfig",
+ defaults: ["audio-aconfig-cc-defaults"],
+}
+
+java_aconfig_library {
+ name: "com.android.media.audio-aconfig-java",
+ aconfig_declarations: "com.android.media.audio-aconfig",
+}
+
+// Framework available flags to follow
+// Care must be taken to avoid namespace conflicts.
+// These flags are accessible outside of the platform! Limit usage to @FlaggedApi wherever possible
+
+aconfig_declarations {
+ name: "android.media.audio-aconfig",
+ package: "android.media.audio",
+ srcs: ["audio_framework.aconfig"],
+ visibility: ["//visibility:private"],
+}
+
+aconfig_declarations {
+ name: "android.media.audiopolicy-aconfig",
+ package: "android.media.audiopolicy",
+ srcs: ["audiopolicy_framework.aconfig"],
+ visibility: ["//visibility:private"],
+}
+
+aconfig_declarations {
+ name: "android.media.midi-aconfig",
+ package: "android.media.midi",
+ srcs: ["midi_flags.aconfig"],
+ visibility: ["//visibility:private"],
+}
+
+java_aconfig_library {
+ name: "android.media.audio-aconfig-java",
+ aconfig_declarations: "android.media.audio-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+java_aconfig_library {
+ name: "android.media.audiopolicy-aconfig-java",
+ aconfig_declarations: "android.media.audiopolicy-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+java_aconfig_library {
+ name: "android.media.midi-aconfig-java",
+ aconfig_declarations: "android.media.midi-aconfig",
+ defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+filegroup {
+ name: "audio-framework-aconfig",
+ srcs: [
+ ":android.media.audio-aconfig-java{.generated_srcjars}",
+ ":android.media.audiopolicy-aconfig-java{.generated_srcjars}",
+ ":android.media.midi-aconfig-java{.generated_srcjars}",
+ ],
+}
diff --git a/media/audio/aconfig/README.md b/media/audio/aconfig/README.md
new file mode 100644
index 0000000..8ce1259
--- /dev/null
+++ b/media/audio/aconfig/README.md
@@ -0,0 +1,136 @@
+# Android Audio Flagging Guide
+
+This directory contains the aconfig flag definitions and the associated build files for Android
+Audio features.
+
+## Build Configuration
+
+### Namespaces
+
+There are several global namespaces involved with flagging which must be maintained:
+ - **Mendel project**: `media_audio`. Associated with mdb group and used by gantry.
+All aconfig flags in this namespace should be defined in this directory, and all flags defined in
+this directory should belong to this namespace.
+ - **Blueprint namespaces**: the name field of any module is unique over a particular build
+namespace (for platform development purposes there is one).
+ - **Aconfig package**: All flags generated by aconfig are converted by Halyard into Mendel flags
+of the form {PACKAGE}{FLAG}, where both fields are stripped of spaces, underscores and dashes. This
+combo **must** be globally unique (across Android).
+ - **Generated code**: Both java and cpp aconfig library rules generate code which pollutes the
+namespace of the backend. This means that if two aconfig java libraries exist which consume **any**
+`aconfig_declarations` with packages which intersect, these libraries are incompatible to be linked
+together (and equivalent in cc). If the library is included in the framework, it is on the
+bootclasspath, so no other library for that package can exist. Additionally, the cpp backend
+generates header files with the same name as the unqualified name of the package which means
+include path conflicts are possible.
+
+
+### Naming convention
+
+Given above, follow the following naming convention (given a `{PACKAGE}`).
+
+Generally, `aconfig_declarations` and `{backend}_aconfig_library` should be 1-1, except in cases
+where different package configurations are needed with pre-processing with different classpaths.
+This is because multiple libraries with the same aconfig package cannot compile together.
+
+```
+aconfig_declarations {
+ name: "{PACKAGE}-aconfig",
+ package: "{PACKAGE}",
+ srcs: ["{PACKAGE}.aconfig"]
+}
+
+{backend}_aconfig_library {
+ name: "{PACKAGE}-aconfig-{backend}",
+ aconfig_declarations: "{PACKAGE}-aconfig",
+ defaults: ["audio-aconfig-{backend}-defaults"],
+}
+
+```
+If the flags are required as part of the framework, the java backend is ingested into the framework
+via srcjar for cyclic dependency/simplicity reasons. Add the following
+
+```
+ ":{PACKAGE}-aconfig-java{.generated_srcjars}"
+```
+
+as a source in `audio-framework-aconfig`. This target is included in the file-list which compiles
+the framework. If a lib is included in this list, it is unecessary to add it in any other blueprint
+file for platform code (for non-host). For tests, the library should be **statically** linked
+(which shadows the bootclasspath), to get around @hide restrictions (this approach may change).
+
+
+### Visibility
+
+Visibility should be controlled (private) so that flag cleanup remains maintainable by the team.
+This constraints the locations where extant flags need to be chased for removal.
+
+
+### Packaging preference
+As a rule, prefer the most constrained package appropriate for a particular flag. This limits viral
+dependencies on a flag library which restricts where/how testing can occur. It also speeds up
+incremental rebuilds. Packages which end up in the framework should be avoided as much as possible,
+since they require rebuilding the framework, pollute the bootclasspath, and require special build
+handling to operate (srcjar generation, jarjar to access in CTS).
+
+Utilize "java-like" package naming for all packages, to ensure packages are globally unique, and
+to avoid class conflicts if these flags are later included in java code.
+
+## Workflow
+
+### Usage Style
+In general, prefer flags at the integration level, rather than at the implementation level.
+This allows flags to be queried in as few locations as possible such as by (parametrizing functions,
+classes, types, etc.) It also allows callers to make different decisions on when behavior can/should
+be changed (e.g. different flags could be used for different libraries). This follows dependency
+injection principles and ensures that global state has a smaller blast radius, and also makes it
+easier to test code by relying less on infrastructure. The more places a flag is queried, the more
+its implementation should be deferred upwards, either by the injection interfaces, or by explicit
+parametrization.
+
+### Flag scope
+In general, flags should be scoped as precisely as possible while retaining orthogonality. If
+flags are too specific (multiple flags interact together to encompass a single change), then
+reasoning about independently switching flags becomes difficult. Other than all-on and all-of
+configurations, there is limited soak for flag combinations, so it is critical that flags
+interact with each other minimally (although emergent properties are inevitable).
+However, if flags are too general, graduating/reverting flags can carry too much behavior change,
+the build graph degenerates, and
+
+### Instant Staging
+In general, it is recommended that the moment a flag is added to the tree, it should be moved to
+staging on gantry. There is no benefit to adding a flag but not moving into staging.
+This allows:
+1. Verification by treehugger in both pre/post-submit
+1. Automatic configuration for feature development for local engineering workflows
+1. Integration with other engineering features in development
+
+
+### FlaggedApi
+FlaggedApi operates differently than other types of flags since these flags only graduate at the
+next API bump. These flags also must be exposed through the framework. FlaggedApis should only
+operate over the same set of flags as an implementation if the implementation is entirely related
+to the feature. Given the constraints on the flag lifetime, it is preferable to use a "regular"
+flag for implementation, which can integrate/soak/ship/clean-up faster.
+
+Additionally, unlike "regular" flags, @FlaggedApis are not effectively soaked, so like non-trunk
+API development, they are heavily reliant on CTS to integrate.
+
+On non-next configs, @FlaggedApi has no runtime control, but it is undefined for callers to
+call a FlaggedApi without querying the status of the flag. The build itself is not changed on
+staging. Anything which builds against the platform can see all @FlaggedApis.
+
+Methods annotated FlaggedApis are changed for release finalization -- if an API is not in next,
+it is stripped from the Sdk (and left @hide). If an API graduated to next, it is included fully
+included in the Sdk, and the @FlaggedApi annotation is stripped.
+
+
+### TestApis
+TestApis do not require flagging, since their existence in the tree implies that they should
+be accessible to callers (xTS not building on trunk enables this).
+
+
+### Api Changes
+Currently, the flag infra does not support any type of Api modification (arguments, annotation,
+renaming, deletion, etc.) In any of these cases (including for SystemApi), exceptions will need to
+be granted.
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
new file mode 100644
index 0000000..28e5752
--- /dev/null
+++ b/media/audio/aconfig/audio.aconfig
@@ -0,0 +1,22 @@
+# Audio flags intended for general consumption (cross-cutting), but NOT included in framework.
+#
+# Please add flags in alphabetical order.
+
+package: "com.android.media.audio"
+
+flag {
+ name: "bluetooth_mac_address_anonymization"
+ namespace: "media_audio"
+ description:
+ "Enable Bluetooth MAC address anonymization when reporting "
+ "audio device descriptors to non privileged apps."
+ bug: "285588444"
+}
+
+flag {
+ name: "disable_prescale_absolute_volume"
+ namespace: "media_audio"
+ description:
+ "Disable pre-scale absolute volume."
+ bug: "302553525"
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
new file mode 100644
index 0000000..c6d0036
--- /dev/null
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -0,0 +1,37 @@
+# Top level framework (android.media) flags
+# Only add flags here which must be included in framework.jar
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.audio"
+
+flag {
+ name: "auto_public_volume_api_hardening"
+ namespace: "media_audio"
+ description: "On AAOS, make volume and ringer SDK APIs in AudioManager no-ops."
+ bug: "302751899"
+}
+
+# TODO remove
+flag {
+ name: "focus_freeze_test_api"
+ namespace: "media_audio"
+ description: "\
+ AudioManager audio focus test APIs:\
+ AudioManager.enterAudioFocusFreezeForTest(java.util.List)\
+ AudioManager.exitAudioFocusFreezeForTest()\
+ AudioManager.getFocusDuckedUidsForTest()\
+ AudioManager.getFocusFadeOutDurationForTest()\
+ AudioManager.getFocusUnmuteDelayAfterFadeOutForTest()"
+ bug: "301713440"
+}
+
+flag {
+ name: "loudness_configurator_api"
+ namespace: "media_audio"
+ description: "\
+Enable the API for providing loudness metadata and CTA-2075 \
+support."
+ bug: "298463873"
+}
+
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
new file mode 100644
index 0000000..833730a
--- /dev/null
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -0,0 +1,13 @@
+# Flags for package android.media.audiopolicy.
+# Only add flags here which must be included in framework.jar
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.audiopolicy"
+
+flag {
+ name: "audio_policy_update_mixing_rules_api"
+ namespace: "media_audio"
+ description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
+ bug: "293874525"
+}
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
new file mode 100644
index 0000000..a25dd5f
--- /dev/null
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -0,0 +1,16 @@
+# Flags for the native audioserver
+#
+# Please add flags in alphabetical order.
+
+package: "com.android.media.audioserver"
+
+flag {
+ name: "mutex_priority_inheritance"
+ namespace: "media_audio"
+ description:
+ "Enable mutex priority inheritance in audioserver (std::mutex does not normally transfer "
+ "priority from the blocked thread to the blocking thread). "
+ "This feature helps reduce audio glitching caused by low priority blocking threads."
+ bug: "209491695"
+}
+
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
new file mode 100644
index 0000000..ff9238a
--- /dev/null
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -0,0 +1,13 @@
+# MIDI flags
+# Only add flags here which must be included in framework.jar
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.midi"
+
+flag {
+ name: "virtual_ump"
+ namespace: "media_audio"
+ description: "Enable virtual UMP MIDI."
+ bug: "291115176"
+}
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 947352f..ee80417 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -1070,6 +1070,13 @@
if (mac.size() != 6) return BAD_VALUE;
snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+ // special case for anonymized mac address:
+ // change anonymized bytes back from FD:FF:FF:FF to XX:XX:XX:XX
+ std::string address(addressBuffer);
+ if (address.compare(0, strlen("FD:FF:FF:FF"), "FD:FF:FF:FF") == 0) {
+ address.replace(0, strlen("FD:FF:FF:FF"), "XX:XX:XX:XX");
+ }
+ strcpy(addressBuffer, address.c_str());
} break;
case Tag::ipv4: {
const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1126,11 +1133,20 @@
if (!legacyAddress.empty()) {
switch (suggestDeviceAddressTag(aidl.type)) {
case Tag::mac: {
+ // special case for anonymized mac address:
+ // change anonymized bytes so that they can be scanned as HEX bytes
+ // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
+ // and universaly administered
+ std::string address = legacyAddress;
+ if (address.compare(0, strlen("XX:XX:XX:XX"), "XX:XX:XX:XX") == 0) {
+ address.replace(0, strlen("XX:XX:XX:XX"), "FD:FF:FF:FF");
+ }
+
std::vector<uint8_t> mac(6);
- int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+ int status = sscanf(address.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
&mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
if (status != mac.size()) {
- ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
+ ALOGE("%s: malformed MAC address: \"%s\"", __func__, address.c_str());
return unexpected(BAD_VALUE);
}
aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 828d861..2030dc7 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,21 +25,31 @@
"libmediametrics_headers",
],
- shared_libs: [
- "packagemanager_aidl-cpp",
+ defaults: [
+ "libaaudioservice_dependencies",
+ "libaudioflinger_dependencies",
+ "libaudiopolicyservice_dependencies",
+ "latest_android_media_audio_common_types_cpp_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ ],
+
+ static_libs: [
"libaaudioservice",
- "libaudioclient",
"libaudioflinger",
"libaudiopolicyservice",
+ "libmedialogservice",
+ "libnbaio",
+ ],
+
+ shared_libs: [
+ "libaudioclient",
"libaudioprocessing",
"libbinder",
"libcutils",
"libhidlbase",
"liblog",
"libmedia",
- "libmedialogservice",
"libmediautils",
- "libnbaio",
"libnblog",
"libpowermanager",
"libutils",
@@ -59,9 +69,9 @@
"frameworks/av/services/audiopolicy/engine/interface",
"frameworks/av/services/audiopolicy/service",
"frameworks/av/services/medialog",
+ "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
- // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
- "frameworks/av/services/oboeservice",
+
],
init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index d865ab2..721a12a 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -69,7 +69,7 @@
addParameter(
DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
- .withFields({C2F(mChannelCount, value).inRange(1, 6)})
+ .withFields({C2F(mChannelCount, value).inRange(1, kMaxChannelCount)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
@@ -198,10 +198,17 @@
}
c2_status_t C2SoftAacEnc::onFlush_sm() {
+ if (mAACEncoder != nullptr) {
+ /* encoder's internal input buffer needs to be reset during flush */
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CONTROL_STATE, AACENC_INIT_ALL)) {
+ ALOGE("Failed to reset AAC encoder");
+ }
+ }
mSentCodecSpecificData = false;
mInputSize = 0u;
mNextFrameTimestampUs.reset();
mLastFrameEndTimestampUs.reset();
+ mRemainderLen = 0;
return C2_OK;
}
@@ -562,6 +569,11 @@
inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
inargs.numInSamples -= outargs.numInSamples;
}
+ } else {
+ // In case of error in encode call, discard remaining input bytes.
+ inBuffer[0] = nullptr;
+ inBufferSize[0] = 0;
+ inargs.numInSamples = 0;
}
ALOGV("encoderErr = %d mInputSize = %zu "
"inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
@@ -597,10 +609,19 @@
&outBufDesc,
&inargs,
&outargs);
+
+ // after flush, discard remaining input bytes.
+ inBuffer[0] = nullptr;
inBufferSize[0] = 0;
}
if (inBufferSize[0] > 0) {
+ if (inBufferSize[0] > kRemainderBufSize) {
+ ALOGE("Remaining bytes %d greater than remainder buffer size %zu", inBufferSize[0],
+ kRemainderBufSize);
+ work->result = C2_CORRUPTED;
+ return;
+ }
for (size_t i = 0; i < inBufferSize[0]; ++i) {
mRemainder[i] = static_cast<uint8_t *>(inBuffer[0])[i];
}
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index 9a28280..c79526c 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -47,6 +47,9 @@
const std::shared_ptr<C2BlockPool> &pool) override;
private:
+ static constexpr size_t kMaxChannelCount = 6;
+ static constexpr size_t kRemainderBufSize = kMaxChannelCount * sizeof(int16_t);
+
std::shared_ptr<IntfImpl> mIntf;
HANDLE_AACENCODER mAACEncoder;
@@ -63,7 +66,7 @@
std::atomic_uint64_t mOutIndex;
// We support max 6 channels
- uint8_t mRemainder[6 * sizeof(int16_t)];
+ uint8_t mRemainder[kRemainderBufSize];
size_t mRemainderLen;
status_t initEncoder();
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index f056759..5141d65 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -760,6 +760,19 @@
return true;
}
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+ const size_t tmpSize = mWidth;
+ const bool needFill = tmpSize > mTmpFrameBufferSize;
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ return false;
+ }
+ if (needFill) {
+ std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+ }
+ return true;
+}
+
bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work) {
if (!(work && pool)) return false;
@@ -781,6 +794,16 @@
return false;
}
+#if LIBYUV_VERSION < 1871
+ if (buffer->bitdepth > 10) {
+ ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+#endif
+
const int width = buffer->displayed_width[0];
const int height = buffer->displayed_height[0];
if (width != mWidth || height != mHeight) {
@@ -824,7 +847,7 @@
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
- if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+ if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
IntfImpl::Lock lock = mIntf->lock();
codedColorAspects = mIntf->getColorAspects_l();
bool allowRGBA1010102 = false;
@@ -836,8 +859,9 @@
format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
#if !HAVE_LIBYUV_I410_I210_TO_AB30
if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
- (buffer->image_format != libgav1::kImageFormatYuv420)) {
- ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+ (buffer->image_format != libgav1::kImageFormatYuv420) &&
+ (buffer->bitdepth == 10)) {
+ ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
mSignalledError = true;
work->result = C2_OMITTED;
work->workletsProcessed = 1u;
@@ -845,6 +869,18 @@
}
#endif
}
+ if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+ (buffer->image_format == libgav1::kImageFormatYuv422 ||
+ buffer->image_format == libgav1::kImageFormatYuv444)) {
+ // There are no 12-bit color conversion functions from YUV422/YUV444 to
+ // RGBA_1010102. Use 8-bit YV12 in this case.
+ format = HAL_PIXEL_FORMAT_YV12;
+ }
+ if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+ // in this case.
+ format = HAL_PIXEL_FORMAT_YV12;
+ }
if (mHalPixelFormat != format) {
C2StreamPixelFormatInfo::output pixelFormat(0u, format);
@@ -898,7 +934,41 @@
size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
- if (buffer->bitdepth == 10) {
+ if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+ const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+ const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+ const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+ size_t srcYStride = buffer->stride[0] / 2;
+ size_t srcUStride = buffer->stride[1] / 2;
+ size_t srcVStride = buffer->stride[2] / 2;
+ if (isMonochrome) {
+ if (!fillMonochromeRow(2048)) {
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ srcU = srcV = mTmpFrameBuffer.get();
+ srcUStride = srcVStride = 0;
+ }
+ if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, &libyuv::kYuvV2020Constants,
+ mWidth, mHeight);
+ } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+ libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else {
+ libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ }
+#endif // LIBYUV_VERSION >= 1871
+ } else if (buffer->bitdepth == 10) {
const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -923,18 +993,12 @@
#endif // HAVE_LIBYUV_I410_I210_TO_AB30
if (!processed) {
if (isMonochrome) {
- const size_t tmpSize = mWidth;
- const bool needFill = tmpSize > mTmpFrameBufferSize;
- if (!allocTmpFrameBuffer(tmpSize)) {
- ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ if (!fillMonochromeRow(512)) {
setError(work, C2_NO_MEMORY);
return false;
}
srcU = srcV = mTmpFrameBuffer.get();
srcUStride = srcVStride = 0;
- if (needFill) {
- std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
- }
}
convertYUV420Planar16ToY410OrRGBA1010102(
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
// Sets |work->result| and mSignalledError. Returns false.
void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
bool allocTmpFrameBuffer(size_t size);
+ bool fillMonochromeRow(int value);
bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work);
c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index c351b6d..775bbbf 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -62,8 +62,8 @@
void setCrypto(const sp<ICrypto> &crypto) override;
void setDescrambler(const sp<IDescrambler> &descrambler) override;
- virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
- virtual status_t queueSecureInputBuffer(
+ status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+ status_t queueSecureInputBuffer(
const sp<MediaCodecBuffer> &buffer,
bool secure,
const uint8_t *key,
@@ -73,10 +73,10 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) override;
- virtual status_t attachBuffer(
+ status_t attachBuffer(
const std::shared_ptr<C2Buffer> &c2Buffer,
const sp<MediaCodecBuffer> &buffer) override;
- virtual status_t attachEncryptedBuffer(
+ status_t attachEncryptedBuffer(
const sp<hardware::HidlMemory> &memory,
bool secure,
const uint8_t *key,
@@ -88,12 +88,12 @@
size_t numSubSamples,
const sp<MediaCodecBuffer> &buffer,
AString* errorDetailMsg) override;
- virtual status_t renderOutputBuffer(
+ status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
- virtual void pollForRenderedBuffers() override;
- virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
- virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
- virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+ void pollForRenderedBuffers() override;
+ status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+ void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+ void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
// Methods below are interface for CCodec to use.
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 7648c76..c3b32e6 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -539,6 +539,22 @@
* Available since API level 29.
*/
AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE = 10,
+
+ /**
+ * Use this preset for an echo canceller to capture the reference signal.
+ * Reserved for system components.
+ * Requires CAPTURE_AUDIO_OUTPUT permission
+ * Available since API level 35.
+ */
+ AAUDIO_INPUT_PRESET_SYSTEM_ECHO_REFERENCE = 1997,
+
+ /**
+ * Use this preset for preemptible, low-priority software hotword detection.
+ * Reserved for system components.
+ * Requires CAPTURE_AUDIO_HOTWORD permission.
+ * Available since API level 35.
+ */
+ AAUDIO_INPUT_PRESET_SYSTEM_HOTWORD = 1999,
};
typedef int32_t aaudio_input_preset_t;
@@ -623,6 +639,11 @@
* (e.g. a USB audio interface, a DAC connected to headphones) to
* specify allowable configurations of a particular device.
*
+ * Channel masks are for input only, output only, or both input and output.
+ * These channel masks are different than those defined in AudioFormat.java.
+ * If an app gets a channel mask from Java API and wants to use it in AAudio,
+ * conversion should be done by the app.
+ *
* Added in API level 32.
*/
enum {
@@ -630,10 +651,6 @@
* Invalid channel mask
*/
AAUDIO_CHANNEL_INVALID = -1,
-
- /**
- * Output audio channel mask
- */
AAUDIO_CHANNEL_FRONT_LEFT = 1 << 0,
AAUDIO_CHANNEL_FRONT_RIGHT = 1 << 1,
AAUDIO_CHANNEL_FRONT_CENTER = 1 << 2,
@@ -661,62 +678,112 @@
AAUDIO_CHANNEL_FRONT_WIDE_LEFT = 1 << 24,
AAUDIO_CHANNEL_FRONT_WIDE_RIGHT = 1 << 25,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_MONO = AAUDIO_CHANNEL_FRONT_LEFT,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_STEREO = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_2POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_LOW_FREQUENCY,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_TRI = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_TRI_BACK = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_BACK_CENTER,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_3POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_LOW_FREQUENCY,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_2POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_TOP_SIDE_LEFT |
AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_2POINT1POINT2 = AAUDIO_CHANNEL_2POINT0POINT2 |
AAUDIO_CHANNEL_LOW_FREQUENCY,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_3POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_TOP_SIDE_LEFT |
AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+ /**
+ * Supported for Input and Output
+ */
AAUDIO_CHANNEL_3POINT1POINT2 = AAUDIO_CHANNEL_3POINT0POINT2 |
AAUDIO_CHANNEL_LOW_FREQUENCY,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_QUAD = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_BACK_LEFT |
AAUDIO_CHANNEL_BACK_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_QUAD_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_SIDE_LEFT |
AAUDIO_CHANNEL_SIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_SURROUND = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_BACK_CENTER,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_PENTA = AAUDIO_CHANNEL_QUAD |
AAUDIO_CHANNEL_FRONT_CENTER,
- // aka 5POINT1_BACK
+ /**
+ * Supported for Input and Output. aka 5POINT1_BACK
+ */
AAUDIO_CHANNEL_5POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_LOW_FREQUENCY |
AAUDIO_CHANNEL_BACK_LEFT |
AAUDIO_CHANNEL_BACK_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_5POINT1_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_LOW_FREQUENCY |
AAUDIO_CHANNEL_SIDE_LEFT |
AAUDIO_CHANNEL_SIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_6POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
AAUDIO_CHANNEL_FRONT_RIGHT |
AAUDIO_CHANNEL_FRONT_CENTER |
@@ -724,32 +791,55 @@
AAUDIO_CHANNEL_BACK_LEFT |
AAUDIO_CHANNEL_BACK_RIGHT |
AAUDIO_CHANNEL_BACK_CENTER,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_7POINT1 = AAUDIO_CHANNEL_5POINT1 |
AAUDIO_CHANNEL_SIDE_LEFT |
AAUDIO_CHANNEL_SIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_5POINT1POINT2 = AAUDIO_CHANNEL_5POINT1 |
AAUDIO_CHANNEL_TOP_SIDE_LEFT |
AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_5POINT1POINT4 = AAUDIO_CHANNEL_5POINT1 |
AAUDIO_CHANNEL_TOP_FRONT_LEFT |
AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
AAUDIO_CHANNEL_TOP_BACK_LEFT |
AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_7POINT1POINT2 = AAUDIO_CHANNEL_7POINT1 |
AAUDIO_CHANNEL_TOP_SIDE_LEFT |
AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_7POINT1POINT4 = AAUDIO_CHANNEL_7POINT1 |
AAUDIO_CHANNEL_TOP_FRONT_LEFT |
AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
AAUDIO_CHANNEL_TOP_BACK_LEFT |
AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_9POINT1POINT4 = AAUDIO_CHANNEL_7POINT1POINT4 |
AAUDIO_CHANNEL_FRONT_WIDE_LEFT |
AAUDIO_CHANNEL_FRONT_WIDE_RIGHT,
+ /**
+ * Supported for only Output
+ */
AAUDIO_CHANNEL_9POINT1POINT6 = AAUDIO_CHANNEL_9POINT1POINT4 |
AAUDIO_CHANNEL_TOP_SIDE_LEFT |
AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
-
+ /**
+ * Supported for only Input
+ */
AAUDIO_CHANNEL_FRONT_BACK = AAUDIO_CHANNEL_FRONT_CENTER |
AAUDIO_CHANNEL_BACK_CENTER,
};
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 5444565..b7e0ae6 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -39,18 +39,21 @@
aaudio_result_t AAudioFlowGraph::configure(audio_format_t sourceFormat,
int32_t sourceChannelCount,
+ int32_t sourceSampleRate,
audio_format_t sinkFormat,
int32_t sinkChannelCount,
+ int32_t sinkSampleRate,
bool useMonoBlend,
+ bool useVolumeRamps,
float audioBalance,
- bool isExclusive) {
+ aaudio::resampler::MultiChannelResampler::Quality resamplerQuality) {
FlowGraphPortFloatOutput *lastOutput = nullptr;
- // TODO change back to ALOGD
- ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d, "
- "useMonoBlend = %d, audioBalance = %f, isExclusive %d",
- __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount,
- useMonoBlend, audioBalance, isExclusive);
+ ALOGD("%s() source format = 0x%08x, channels = %d, sample rate = %d, "
+ "sink format = 0x%08x, channels = %d, sample rate = %d, "
+ "useMonoBlend = %d, audioBalance = %f, useVolumeRamps %d",
+ __func__, sourceFormat, sourceChannelCount, sourceSampleRate, sinkFormat,
+ sinkChannelCount, sinkSampleRate, useMonoBlend, audioBalance, useVolumeRamps);
switch (sourceFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
@@ -85,6 +88,15 @@
lastOutput = &mLimiter->output;
}
+ if (sourceSampleRate != sinkSampleRate) {
+ mResampler.reset(aaudio::resampler::MultiChannelResampler::make(sourceChannelCount,
+ sourceSampleRate, sinkSampleRate, resamplerQuality));
+ mRateConverter = std::make_unique<SampleRateConverter>(sourceChannelCount,
+ *mResampler);
+ lastOutput->connect(&mRateConverter->input);
+ lastOutput = &mRateConverter->output;
+ }
+
// Expand the number of channels if required.
if (sourceChannelCount == 1 && sinkChannelCount > 1) {
mChannelConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
@@ -95,8 +107,7 @@
return AAUDIO_ERROR_UNIMPLEMENTED;
}
- // Apply volume ramps for only exclusive streams.
- if (isExclusive) {
+ if (useVolumeRamps) {
// Apply volume ramps to set the left/right audio balance and target volumes.
// The signals will be decoupled, volume ramps will be applied, before the signals are
// combined again.
@@ -137,9 +148,14 @@
return AAUDIO_OK;
}
-void AAudioFlowGraph::process(const void *source, void *destination, int32_t numFrames) {
- mSource->setData(source, numFrames);
- mSink->read(destination, numFrames);
+int32_t AAudioFlowGraph::pull(void *destination, int32_t targetFramesToRead) {
+ return mSink->read(destination, targetFramesToRead);
+}
+
+int32_t AAudioFlowGraph::process(const void *source, int32_t numFramesToWrite, void *destination,
+ int32_t targetFramesToRead) {
+ mSource->setData(source, numFramesToWrite);
+ return mSink->read(destination, targetFramesToRead);
}
/**
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 35fef37..e1d517e 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -30,6 +30,7 @@
#include <flowgraph/MonoToMultiConverter.h>
#include <flowgraph/MultiToManyConverter.h>
#include <flowgraph/RampLinear.h>
+#include <flowgraph/SampleRateConverter.h>
class AAudioFlowGraph {
public:
@@ -38,23 +39,57 @@
*
* @param sourceFormat
* @param sourceChannelCount
+ * @param sourceSampleRate
* @param sinkFormat
* @param sinkChannelCount
+ * @param sinkSampleRate
* @param useMonoBlend
+ * @param useVolumeRamps
* @param audioBalance
- * @param channelMask
- * @param isExclusive
+ * @param resamplerQuality
* @return
*/
aaudio_result_t configure(audio_format_t sourceFormat,
int32_t sourceChannelCount,
+ int32_t sourceSampleRate,
audio_format_t sinkFormat,
int32_t sinkChannelCount,
+ int32_t sinkSampleRate,
bool useMonoBlend,
+ bool useVolumeRamps,
float audioBalance,
- bool isExclusive);
+ aaudio::resampler::MultiChannelResampler::Quality resamplerQuality);
- void process(const void *source, void *destination, int32_t numFrames);
+ /**
+ * Attempt to read targetFramesToRead from the flowgraph.
+ * This function returns the number of frames actually read.
+ *
+ * This function does nothing if process() was not called before.
+ *
+ * @param destination
+ * @param targetFramesToRead
+ * @return numFramesRead
+ */
+ int32_t pull(void *destination, int32_t targetFramesToRead);
+
+ /**
+ * Set numFramesToWrite frames from the source into the flowgraph.
+ * Then, attempt to read targetFramesToRead from the flowgraph.
+ * This function returns the number of frames actually read.
+ *
+ * There may be data still in the flowgraph if targetFramesToRead is not large enough.
+ * Before calling process() again, pull() must be called until until all the data is consumed.
+ *
+ * TODO: b/289510598 - Calculate the exact number of input frames needed for Y output frames.
+ *
+ * @param source
+ * @param numFramesToWrite
+ * @param destination
+ * @param targetFramesToRead
+ * @return numFramesRead
+ */
+ int32_t process(const void *source, int32_t numFramesToWrite, void *destination,
+ int32_t targetFramesToRead);
/**
* @param volume between 0.0 and 1.0
@@ -73,6 +108,8 @@
private:
std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
+ std::unique_ptr<RESAMPLER_OUTER_NAMESPACE::resampler::MultiChannelResampler> mResampler;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::SampleRateConverter> mRateConverter;
std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::Limiter> mLimiter;
std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 84c715f..8d9bf20 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -63,6 +63,8 @@
#define LOG_TIMESTAMPS 0
+#define ENABLE_SAMPLE_RATE_CONVERTER 1
+
AudioStreamInternal::AudioStreamInternal(AAudioServiceInterface &serviceInterface, bool inService)
: AudioStream()
, mClockModel()
@@ -132,12 +134,6 @@
request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
- request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
- request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
- request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
-
- mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
-
mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
if (getServiceHandle() < 0
&& (request.getConfiguration().getSamplesPerFrame() == 1
@@ -181,9 +177,6 @@
setChannelMask(configurationOutput.getChannelMask());
}
- mDeviceChannelCount = configurationOutput.getSamplesPerFrame();
-
- setSampleRate(configurationOutput.getSampleRate());
setDeviceId(configurationOutput.getDeviceId());
setSessionId(configurationOutput.getSessionId());
setSharingMode(configurationOutput.getSharingMode());
@@ -194,8 +187,21 @@
setIsContentSpatialized(configurationOutput.isContentSpatialized());
setInputPreset(configurationOutput.getInputPreset());
+ setDeviceSampleRate(configurationOutput.getSampleRate());
+
+ if (getSampleRate() == AAUDIO_UNSPECIFIED) {
+ setSampleRate(configurationOutput.getSampleRate());
+ }
+
+#if !ENABLE_SAMPLE_RATE_CONVERTER
+ if (getSampleRate() != getDeviceSampleRate()) {
+ goto error;
+ }
+#endif
+
// Save device format so we can do format conversion and volume scaling together.
setDeviceFormat(configurationOutput.getFormat());
+ setDeviceSamplesPerFrame(configurationOutput.getSamplesPerFrame());
setHardwareSamplesPerFrame(configurationOutput.getHardwareSamplesPerFrame());
setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
@@ -233,39 +239,50 @@
}
aaudio_result_t AudioStreamInternal::configureDataInformation(int32_t callbackFrames) {
- int32_t framesPerHardwareBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+ int32_t originalFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+ int32_t deviceFramesPerBurst = originalFramesPerBurst;
// Scale up the burst size to meet the minimum equivalent in microseconds.
// This is to avoid waking the CPU too often when the HW burst is very small
- // or at high sample rates.
- int32_t framesPerBurst = framesPerHardwareBurst;
+ // or at high sample rates. The actual number of frames that we call back to
+ // the app with will be 0 < N <= framesPerBurst so round up the division.
int32_t burstMicros = 0;
const int32_t burstMinMicros = android::AudioSystem::getAAudioHardwareBurstMinUsec();
do {
if (burstMicros > 0) { // skip first loop
- framesPerBurst *= 2;
+ deviceFramesPerBurst *= 2;
}
- burstMicros = framesPerBurst * static_cast<int64_t>(1000000) / getSampleRate();
+ burstMicros = deviceFramesPerBurst * static_cast<int64_t>(1000000) / getDeviceSampleRate();
} while (burstMicros < burstMinMicros);
ALOGD("%s() original HW burst = %d, minMicros = %d => SW burst = %d\n",
- __func__, framesPerHardwareBurst, burstMinMicros, framesPerBurst);
+ __func__, originalFramesPerBurst, burstMinMicros, deviceFramesPerBurst);
// Validate final burst size.
- if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
- ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
+ if (deviceFramesPerBurst < MIN_FRAMES_PER_BURST
+ || deviceFramesPerBurst > MAX_FRAMES_PER_BURST) {
+ ALOGE("%s - deviceFramesPerBurst out of range = %d", __func__, deviceFramesPerBurst);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
+
+ // Calculate the application framesPerBurst from the deviceFramesPerBurst
+ int32_t framesPerBurst = (static_cast<int64_t>(deviceFramesPerBurst) * getSampleRate() +
+ getDeviceSampleRate() - 1) / getDeviceSampleRate();
+
+ setDeviceFramesPerBurst(deviceFramesPerBurst);
setFramesPerBurst(framesPerBurst); // only save good value
- mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+ mDeviceBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+
+ mBufferCapacityInFrames = static_cast<int64_t>(mDeviceBufferCapacityInFrames)
+ * getSampleRate() / getDeviceSampleRate();
if (mBufferCapacityInFrames < getFramesPerBurst()
|| mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
- mClockModel.setSampleRate(getSampleRate());
- mClockModel.setFramesPerBurst(framesPerHardwareBurst);
+ mClockModel.setSampleRate(getDeviceSampleRate());
+ mClockModel.setFramesPerBurst(deviceFramesPerBurst);
if (isDataCallbackSet()) {
mCallbackFrames = callbackFrames;
@@ -315,7 +332,8 @@
mTimeOffsetNanos = offsetMicros * AAUDIO_NANOS_PER_MICROSECOND;
}
- setBufferSize(mBufferCapacityInFrames / 2); // Default buffer size to match Q
+ // Default buffer size to match Q
+ setBufferSize(mBufferCapacityInFrames / 2);
return AAUDIO_OK;
}
@@ -374,9 +392,9 @@
// Cache the buffer size which may be from client.
const int32_t previousBufferSize = mBufferSizeInFrames;
// Copy all available data from current data queue.
- uint8_t buffer[getBufferCapacity() * getBytesPerFrame()];
- android::fifo_frames_t fullFramesAvailable =
- mAudioEndpoint->read(buffer, getBufferCapacity());
+ uint8_t buffer[getDeviceBufferCapacity() * getBytesPerFrame()];
+ android::fifo_frames_t fullFramesAvailable = mAudioEndpoint->read(buffer,
+ getDeviceBufferCapacity());
mEndPointParcelable.closeDataFileDescriptor();
aaudio_result_t result = mServiceInterface.exitStandby(
mServiceStreamHandleInfo, endpointParcelable);
@@ -408,7 +426,7 @@
goto exit;
}
// Write data from previous data buffer to new endpoint.
- if (android::fifo_frames_t framesWritten =
+ if (const android::fifo_frames_t framesWritten =
mAudioEndpoint->write(buffer, fullFramesAvailable);
framesWritten != fullFramesAvailable) {
ALOGW("Some data lost after exiting standby, frames written: %d, "
@@ -448,7 +466,7 @@
ALOGD("requestStart() but DISCONNECTED");
return AAUDIO_ERROR_DISCONNECTED;
}
- aaudio_stream_state_t originalState = getState();
+ const aaudio_stream_state_t originalState = getState();
setState(AAUDIO_STREAM_STATE_STARTING);
// Clear any stale timestamps from the previous run.
@@ -605,7 +623,11 @@
// Generated in server and passed to client. Return latest.
if (mAtomicInternalTimestamp.isValid()) {
Timestamp timestamp = mAtomicInternalTimestamp.read();
- int64_t position = timestamp.getPosition() + mFramesOffsetFromService;
+ // This should not overflow as timestamp.getPosition() should be a position in a buffer and
+ // not the actual timestamp. timestamp.getNanoseconds() below uses the actual timestamp.
+ // At 48000 Hz we can run for over 100 years before overflowing the int64_t.
+ int64_t position = (timestamp.getPosition() + mFramesOffsetFromService) * getSampleRate() /
+ getDeviceSampleRate();
if (position >= 0) {
*framePosition = position;
*timeNanoseconds = timestamp.getNanoseconds();
@@ -876,44 +898,41 @@
}
aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
- int32_t adjustedFrames = requestedFrames;
const int32_t maximumSize = getBufferCapacity() - getFramesPerBurst();
- // Minimum size should be a multiple number of bursts.
- const int32_t minimumSize = 1 * getFramesPerBurst();
+ int32_t adjustedFrames = std::min(requestedFrames, maximumSize);
+ // Buffer sizes should always be a multiple of framesPerBurst.
+ int32_t numBursts = (static_cast<int64_t>(adjustedFrames) + getFramesPerBurst() - 1) /
+ getFramesPerBurst();
- // Clip to minimum size so that rounding up will work better.
- adjustedFrames = std::max(minimumSize, adjustedFrames);
-
- // Prevent arithmetic overflow by clipping before we round.
- if (adjustedFrames >= maximumSize) {
- adjustedFrames = maximumSize;
- } else {
- // Round to the next highest burst size.
- int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
- adjustedFrames = numBursts * getFramesPerBurst();
- // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
- adjustedFrames = std::min(maximumSize, adjustedFrames);
+ // Use at least one burst
+ if (numBursts == 0) {
+ numBursts = 1;
}
if (mAudioEndpoint) {
// Clip against the actual size from the endpoint.
- int32_t actualFrames = 0;
+ int32_t actualFramesDevice = 0;
+ int32_t maximumFramesDevice = getDeviceBufferCapacity() - getDeviceFramesPerBurst();
// Set to maximum size so we can write extra data when ready in order to reduce glitches.
// The amount we keep in the buffer is controlled by mBufferSizeInFrames.
- mAudioEndpoint->setBufferSizeInFrames(maximumSize, &actualFrames);
- // actualFrames should be <= actual maximum size of endpoint
- adjustedFrames = std::min(actualFrames, adjustedFrames);
+ mAudioEndpoint->setBufferSizeInFrames(maximumFramesDevice, &actualFramesDevice);
+ int32_t actualNumBursts = actualFramesDevice / getDeviceFramesPerBurst();
+ numBursts = std::min(numBursts, actualNumBursts);
}
- if (adjustedFrames != mBufferSizeInFrames) {
+ const int32_t bufferSizeInFrames = numBursts * getFramesPerBurst();
+ const int32_t deviceBufferSizeInFrames = numBursts * getDeviceFramesPerBurst();
+
+ if (deviceBufferSizeInFrames != mDeviceBufferSizeInFrames) {
android::mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE)
- .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, adjustedFrames)
+ .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, deviceBufferSizeInFrames)
.set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getXRunCount())
.record();
}
- mBufferSizeInFrames = adjustedFrames;
+ mBufferSizeInFrames = bufferSizeInFrames;
+ mDeviceBufferSizeInFrames = deviceBufferSizeInFrames;
ALOGV("%s(%d) returns %d", __func__, requestedFrames, adjustedFrames);
return (aaudio_result_t) adjustedFrames;
}
@@ -922,10 +941,18 @@
return mBufferSizeInFrames;
}
+int32_t AudioStreamInternal::getDeviceBufferSize() const {
+ return mDeviceBufferSizeInFrames;
+}
+
int32_t AudioStreamInternal::getBufferCapacity() const {
return mBufferCapacityInFrames;
}
+int32_t AudioStreamInternal::getDeviceBufferCapacity() const {
+ return mDeviceBufferCapacityInFrames;
+}
+
bool AudioStreamInternal::isClockModelInControl() const {
return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 9c06121..a5981b1 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -22,8 +22,9 @@
#include "binding/AudioEndpointParcelable.h"
#include "binding/AAudioServiceInterface.h"
-#include "client/IsochronousClockModel.h"
+#include "client/AAudioFlowGraph.h"
#include "client/AudioEndpoint.h"
+#include "client/IsochronousClockModel.h"
#include "core/AudioStream.h"
#include "utility/AudioClock.h"
@@ -56,8 +57,12 @@
int32_t getBufferSize() const override;
+ int32_t getDeviceBufferSize() const;
+
int32_t getBufferCapacity() const override;
+ int32_t getDeviceBufferCapacity() const override;
+
int32_t getXRunCount() const override {
return mXRunCount;
}
@@ -133,8 +138,6 @@
// Calculate timeout for an operation involving framesPerOperation.
int64_t calculateReasonableTimeout(int32_t framesPerOperation);
- int32_t getDeviceChannelCount() const { return mDeviceChannelCount; }
-
/**
* @return true if running in audio service, versus in app process
*/
@@ -177,6 +180,8 @@
int64_t mLastFramesWritten = 0;
int64_t mLastFramesRead = 0;
+ AAudioFlowGraph mFlowGraph;
+
private:
/*
* Asynchronous write with data conversion.
@@ -206,13 +211,10 @@
int64_t mServiceLatencyNanos = 0;
- // Sometimes the hardware is operating with a different channel count from the app.
- // Then we require conversion in AAudio.
- int32_t mDeviceChannelCount = 0;
-
int32_t mBufferSizeInFrames = 0; // local threshold to control latency
+ int32_t mDeviceBufferSizeInFrames = 0;
int32_t mBufferCapacityInFrames = 0;
-
+ int32_t mDeviceBufferCapacityInFrames = 0;
};
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index f5cc2be..d9b75da 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -47,6 +47,27 @@
}
+aaudio_result_t AudioStreamInternalCapture::open(const AudioStreamBuilder &builder) {
+ aaudio_result_t result = AudioStreamInternal::open(builder);
+ if (result == AAUDIO_OK) {
+ result = mFlowGraph.configure(getDeviceFormat(),
+ getDeviceSamplesPerFrame(),
+ getDeviceSampleRate(),
+ getFormat(),
+ getSamplesPerFrame(),
+ getSampleRate(),
+ getRequireMonoBlend(),
+ false /* useVolumeRamps */,
+ getAudioBalance(),
+ aaudio::resampler::MultiChannelResampler::Quality::Medium);
+
+ if (result != AAUDIO_OK) {
+ safeReleaseClose();
+ }
+ }
+ return result;
+}
+
void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
int64_t readCounter = mAudioEndpoint->getDataReadCounter();
int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
@@ -149,7 +170,8 @@
// Calculate frame position based off of the readCounter because
// the writeCounter might have just advanced in the background,
// causing us to sleep until a later burst.
- int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
+ const int64_t nextPosition = mAudioEndpoint->getDataReadCounter() +
+ getDeviceFramesPerBurst();
wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
}
break;
@@ -166,42 +188,75 @@
aaudio_result_t AudioStreamInternalCapture::readNowWithConversion(void *buffer,
int32_t numFrames) {
- // ALOGD("readNowWithConversion(%p, %d)",
- // buffer, numFrames);
WrappingBuffer wrappingBuffer;
- uint8_t *destination = (uint8_t *) buffer;
- int32_t framesLeft = numFrames;
+ uint8_t *byteBuffer = (uint8_t *) buffer;
+ int32_t framesLeftInByteBuffer = numFrames;
+
+ if (framesLeftInByteBuffer > 0) {
+ // Pull data from the flowgraph in case there is residual data.
+ const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.pull(
+ (void *)byteBuffer,
+ framesLeftInByteBuffer);
+
+ const int32_t numBytesActuallyWrittenToByteBuffer =
+ framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+ byteBuffer += numBytesActuallyWrittenToByteBuffer;
+ framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+ }
mAudioEndpoint->getFullFramesAvailable(&wrappingBuffer);
- // Read data in one or two parts.
- for (int partIndex = 0; framesLeft > 0 && partIndex < WrappingBuffer::SIZE; partIndex++) {
- int32_t framesToProcess = framesLeft;
- const int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
- if (framesAvailable <= 0) break;
+ // Write data in one or two parts.
+ int partIndex = 0;
+ int framesReadFromAudioEndpoint = 0;
+ while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+ const int32_t totalFramesInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+ int32_t framesAvailableInWrappingBuffer = totalFramesInWrappingBuffer;
+ uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
- if (framesToProcess > framesAvailable) {
- framesToProcess = framesAvailable;
+ if (framesAvailableInWrappingBuffer <= 0) break;
+
+ // Put data from the wrapping buffer into the flowgraph 8 frames at a time.
+ // Continuously pull as much data as possible from the flowgraph into the byte buffer.
+ // The return value of mFlowGraph.process is the number of frames actually pulled.
+ while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+ const int32_t framesToReadFromWrappingBuffer = std::min(flowgraph::kDefaultBufferSize,
+ framesAvailableInWrappingBuffer);
+
+ const int32_t numBytesToReadFromWrappingBuffer = getBytesPerDeviceFrame() *
+ framesToReadFromWrappingBuffer;
+
+ // If framesActuallyWrittenToByteBuffer < framesLeftInByteBuffer, it is guaranteed
+ // that all the data is pulled. If there is no more space in the byteBuffer, the
+ // remaining data will be pulled in the following readNowWithConversion().
+ const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.process(
+ (void *)currentWrappingBuffer,
+ framesToReadFromWrappingBuffer,
+ (void *)byteBuffer,
+ framesLeftInByteBuffer);
+
+ const int32_t numBytesActuallyWrittenToByteBuffer =
+ framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+ byteBuffer += numBytesActuallyWrittenToByteBuffer;
+ framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+ currentWrappingBuffer += numBytesToReadFromWrappingBuffer;
+ framesAvailableInWrappingBuffer -= framesToReadFromWrappingBuffer;
+
+ //ALOGD("%s() numBytesActuallyWrittenToByteBuffer %d, framesLeftInByteBuffer %d"
+ // "framesAvailableInWrappingBuffer %d, framesReadFromAudioEndpoint %d"
+ // , __func__, numBytesActuallyWrittenToByteBuffer, framesLeftInByteBuffer,
+ // framesAvailableInWrappingBuffer, framesReadFromAudioEndpoint);
}
-
- const int32_t numBytes = getBytesPerFrame() * framesToProcess;
- const int32_t numSamples = framesToProcess * getSamplesPerFrame();
-
- const audio_format_t sourceFormat = getDeviceFormat();
- const audio_format_t destinationFormat = getFormat();
-
- memcpy_by_audio_format(destination, destinationFormat,
- wrappingBuffer.data[partIndex], sourceFormat, numSamples);
-
- destination += numBytes;
- framesLeft -= framesToProcess;
+ framesReadFromAudioEndpoint += totalFramesInWrappingBuffer -
+ framesAvailableInWrappingBuffer;
+ partIndex++;
}
- int32_t framesProcessed = numFrames - framesLeft;
- mAudioEndpoint->advanceReadIndex(framesProcessed);
+ // The audio endpoint should reference the number of frames written to the wrapping buffer.
+ mAudioEndpoint->advanceReadIndex(framesReadFromAudioEndpoint);
- //ALOGD("readNowWithConversion() returns %d", framesProcessed);
- return framesProcessed;
+ // The internal code should use the number of frames read from the app.
+ return numFrames - framesLeftInByteBuffer;
}
int64_t AudioStreamInternalCapture::getFramesWritten() {
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 87017de..10e247d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -32,6 +32,8 @@
bool inService = false);
virtual ~AudioStreamInternalCapture() = default;
+ aaudio_result_t open(const AudioStreamBuilder &builder) override;
+
aaudio_result_t read(void *buffer,
int32_t numFrames,
int64_t timeoutNanoseconds) override;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 89dd8ff..3badb0b 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -48,14 +48,18 @@
aaudio_result_t AudioStreamInternalPlay::open(const AudioStreamBuilder &builder) {
aaudio_result_t result = AudioStreamInternal::open(builder);
+ const bool useVolumeRamps = (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE);
if (result == AAUDIO_OK) {
result = mFlowGraph.configure(getFormat(),
getSamplesPerFrame(),
+ getSampleRate(),
getDeviceFormat(),
- getDeviceChannelCount(),
+ getDeviceSamplesPerFrame(),
+ getDeviceSampleRate(),
getRequireMonoBlend(),
+ useVolumeRamps,
getAudioBalance(),
- (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE));
+ aaudio::resampler::MultiChannelResampler::Quality::Medium);
if (result != AAUDIO_OK) {
safeReleaseClose();
@@ -186,7 +190,7 @@
// Sleep if there is too much data in the buffer.
// Calculate an ideal time to wake up.
if (wakeTimePtr != nullptr
- && (mAudioEndpoint->getFullFramesAvailable() >= getBufferSize())) {
+ && (mAudioEndpoint->getFullFramesAvailable() >= getDeviceBufferSize())) {
// By default wake up a few milliseconds from now. // TODO review
int64_t wakeTime = currentNanoTime + (1 * AAUDIO_NANOS_PER_MILLISECOND);
aaudio_stream_state_t state = getState();
@@ -206,12 +210,12 @@
// If the appBufferSize is smaller than the endpointBufferSize then
// we will have room to write data beyond the appBufferSize.
// That is a technique used to reduce glitches without adding latency.
- const int32_t appBufferSize = getBufferSize();
+ const int64_t appBufferSize = getDeviceBufferSize();
// The endpoint buffer size is set to the maximum that can be written.
// If we use it then we must carve out some room to write data when we wake up.
- const int32_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
- - getFramesPerBurst();
- const int32_t bestBufferSize = std::min(appBufferSize, endBufferSize);
+ const int64_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
+ - getDeviceFramesPerBurst();
+ const int64_t bestBufferSize = std::min(appBufferSize, endBufferSize);
int64_t targetReadPosition = mAudioEndpoint->getDataWriteCounter() - bestBufferSize;
wakeTime = mClockModel.convertPositionToTime(targetReadPosition);
}
@@ -232,37 +236,84 @@
int32_t numFrames) {
WrappingBuffer wrappingBuffer;
uint8_t *byteBuffer = (uint8_t *) buffer;
- int32_t framesLeft = numFrames;
+ int32_t framesLeftInByteBuffer = numFrames;
mAudioEndpoint->getEmptyFramesAvailable(&wrappingBuffer);
// Write data in one or two parts.
int partIndex = 0;
- while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
- int32_t framesToWrite = framesLeft;
- int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
- if (framesAvailable > 0) {
- if (framesToWrite > framesAvailable) {
- framesToWrite = framesAvailable;
- }
+ int framesWrittenToAudioEndpoint = 0;
+ while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+ int32_t framesAvailableInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+ uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
- int32_t numBytes = getBytesPerFrame() * framesToWrite;
+ if (framesAvailableInWrappingBuffer > 0) {
+ // Pull data from the flowgraph in case there is residual data.
+ const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.pull(
+ (void*) currentWrappingBuffer,
+ framesAvailableInWrappingBuffer);
- mFlowGraph.process((void *)byteBuffer,
- wrappingBuffer.data[partIndex],
- framesToWrite);
-
- byteBuffer += numBytes;
- framesLeft -= framesToWrite;
+ const int32_t numBytesActuallyWrittenToWrappingBuffer =
+ framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+ currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+ framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+ framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
} else {
break;
}
+
+ // Put data from byteBuffer into the flowgraph one buffer (8 frames) at a time.
+ // Continuously pull as much data as possible from the flowgraph into the wrapping buffer.
+ // The return value of mFlowGraph.process is the number of frames actually pulled.
+ while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+ int32_t framesToWriteFromByteBuffer = std::min(flowgraph::kDefaultBufferSize,
+ framesLeftInByteBuffer);
+ // If the wrapping buffer is running low, write one frame at a time.
+ if (framesAvailableInWrappingBuffer < flowgraph::kDefaultBufferSize) {
+ framesToWriteFromByteBuffer = 1;
+ }
+
+ const int32_t numBytesToWriteFromByteBuffer = getBytesPerFrame() *
+ framesToWriteFromByteBuffer;
+
+ //ALOGD("%s() framesLeftInByteBuffer %d, framesAvailableInWrappingBuffer %d"
+ // "framesToWriteFromByteBuffer %d, numBytesToWriteFromByteBuffer %d"
+ // , __func__, framesLeftInByteBuffer, framesAvailableInWrappingBuffer,
+ // framesToWriteFromByteBuffer, numBytesToWriteFromByteBuffer);
+
+ const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.process(
+ (void *)byteBuffer,
+ framesToWriteFromByteBuffer,
+ (void *)currentWrappingBuffer,
+ framesAvailableInWrappingBuffer);
+
+ byteBuffer += numBytesToWriteFromByteBuffer;
+ framesLeftInByteBuffer -= framesToWriteFromByteBuffer;
+ const int32_t numBytesActuallyWrittenToWrappingBuffer =
+ framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+ currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+ framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+ framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
+
+ //ALOGD("%s() numBytesActuallyWrittenToWrappingBuffer %d, framesLeftInByteBuffer %d"
+ // "framesActuallyWrittenToWrappingBuffer %d, numBytesToWriteFromByteBuffer %d"
+ // "framesWrittenToAudioEndpoint %d"
+ // , __func__, numBytesActuallyWrittenToWrappingBuffer, framesLeftInByteBuffer,
+ // framesActuallyWrittenToWrappingBuffer, numBytesToWriteFromByteBuffer,
+ // framesWrittenToAudioEndpoint);
+ }
partIndex++;
}
- int32_t framesWritten = numFrames - framesLeft;
- mAudioEndpoint->advanceWriteIndex(framesWritten);
+ //ALOGD("%s() framesWrittenToAudioEndpoint %d, numFrames %d"
+ // "framesLeftInByteBuffer %d"
+ // , __func__, framesWrittenToAudioEndpoint, numFrames,
+ // framesLeftInByteBuffer);
- return framesWritten;
+ // The audio endpoint should reference the number of frames written to the wrapping buffer.
+ mAudioEndpoint->advanceWriteIndex(framesWrittenToAudioEndpoint);
+
+ // The internal code should use the number of frames read from the app.
+ return numFrames - framesLeftInByteBuffer;
}
int64_t AudioStreamInternalPlay::getFramesRead() {
@@ -284,7 +335,6 @@
return mLastFramesWritten;
}
-
// Render audio in the application callback and then write the data to the stream.
void *AudioStreamInternalPlay::callbackLoop() {
ALOGD("%s() entering >>>>>>>>>>>>>>>", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index e761807..b51b5d0 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -21,7 +21,6 @@
#include <aaudio/AAudio.h>
#include "binding/AAudioServiceInterface.h"
-#include "client/AAudioFlowGraph.h"
#include "client/AudioStreamInternal.h"
using android::sp;
@@ -89,13 +88,11 @@
* Asynchronous write with data conversion.
* @param buffer
* @param numFrames
- * @return fdrames written or negative error
+ * @return frames written or negative error
*/
aaudio_result_t writeNowWithConversion(const void *buffer,
int32_t numFrames);
- AAudioFlowGraph mFlowGraph;
-
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8a13a6f..1e27a81 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -571,13 +571,15 @@
AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
- return audioStream->getFramesWritten();
+ return audioStream->getFramesWritten() * audioStream->getSampleRate() /
+ audioStream->getDeviceSampleRate();
}
AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
- return audioStream->getFramesRead();
+ return audioStream->getFramesRead() * audioStream->getSampleRate() /
+ audioStream->getDeviceSampleRate();
}
AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream,
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index f305e46..1db62f3 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -184,6 +184,8 @@
case AAUDIO_INPUT_PRESET_VOICE_RECOGNITION:
case AAUDIO_INPUT_PRESET_UNPROCESSED:
case AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE:
+ case AAUDIO_INPUT_PRESET_SYSTEM_ECHO_REFERENCE:
+ case AAUDIO_INPUT_PRESET_SYSTEM_HOTWORD:
break; // valid
default:
ALOGD("input preset not valid = %d", mInputPreset);
@@ -317,4 +319,4 @@
ALOGD("mHardwareSamplesPerFrame = %6d", mHardwareSamplesPerFrame);
ALOGD("mHardwareSampleRate = %6d", mHardwareSampleRate);
ALOGD("mHardwareAudioFormat = %6d", (int)mHardwareAudioFormat);
-}
\ No newline at end of file
+}
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 56ef1e6..e0fd325 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -134,7 +134,8 @@
.set(AMEDIAMETRICS_PROP_ENCODINGHARDWARE,
android::toString(getHardwareFormat()).c_str())
.set(AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, (int32_t)getHardwareSamplesPerFrame())
- .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate());
+ .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate())
+ .set(AMEDIAMETRICS_PROP_SAMPLERATECLIENT, (int32_t)getSampleRate());
if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 9b4b734..f2f5cac 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -204,10 +204,18 @@
return mBufferCapacity;
}
+ virtual int32_t getDeviceBufferCapacity() const {
+ return mDeviceBufferCapacity;
+ }
+
virtual int32_t getFramesPerBurst() const {
return mFramesPerBurst;
}
+ virtual int32_t getDeviceFramesPerBurst() const {
+ return mDeviceFramesPerBurst;
+ }
+
virtual int32_t getXRunCount() const {
return AAUDIO_ERROR_UNIMPLEMENTED;
}
@@ -224,6 +232,10 @@
return mSampleRate;
}
+ aaudio_result_t getDeviceSampleRate() const {
+ return mDeviceSampleRate;
+ }
+
aaudio_result_t getHardwareSampleRate() const {
return mHardwareSampleRate;
}
@@ -240,6 +252,10 @@
return mSamplesPerFrame;
}
+ aaudio_result_t getDeviceSamplesPerFrame() const {
+ return mDeviceSamplesPerFrame;
+ }
+
aaudio_result_t getHardwareSamplesPerFrame() const {
return mHardwareSamplesPerFrame;
}
@@ -322,10 +338,10 @@
}
/**
- * This is only valid after setChannelMask() and setDeviceFormat() have been called.
+ * This is only valid after setDeviceSamplesPerFrame() and setDeviceFormat() have been called.
*/
int32_t getBytesPerDeviceFrame() const {
- return getSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
+ return getDeviceSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
}
virtual int64_t getFramesWritten() = 0;
@@ -365,6 +381,11 @@
mSamplesPerFrame = AAudioConvert_channelMaskToCount(channelMask);
}
+ void setDeviceSamplesPerFrame(int32_t deviceSamplesPerFrame) {
+ mDeviceSamplesPerFrame = deviceSamplesPerFrame;
+ }
+
+
/**
* @return true if data callback has been specified
*/
@@ -542,6 +563,11 @@
}
// This should not be called after the open() call.
+ void setDeviceSampleRate(int32_t deviceSampleRate) {
+ mDeviceSampleRate = deviceSampleRate;
+ }
+
+ // This should not be called after the open() call.
void setHardwareSampleRate(int32_t hardwareSampleRate) {
mHardwareSampleRate = hardwareSampleRate;
}
@@ -552,11 +578,21 @@
}
// This should not be called after the open() call.
+ void setDeviceFramesPerBurst(int32_t deviceFramesPerBurst) {
+ mDeviceFramesPerBurst = deviceFramesPerBurst;
+ }
+
+ // This should not be called after the open() call.
void setBufferCapacity(int32_t bufferCapacity) {
mBufferCapacity = bufferCapacity;
}
// This should not be called after the open() call.
+ void setDeviceBufferCapacity(int32_t deviceBufferCapacity) {
+ mDeviceBufferCapacity = deviceBufferCapacity;
+ }
+
+ // This should not be called after the open() call.
void setSharingMode(aaudio_sharing_mode_t sharingMode) {
mSharingMode = sharingMode;
}
@@ -721,9 +757,11 @@
// These do not change after open().
int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+ int32_t mDeviceSamplesPerFrame = AAUDIO_UNSPECIFIED;
int32_t mHardwareSamplesPerFrame = AAUDIO_UNSPECIFIED;
aaudio_channel_mask_t mChannelMask = AAUDIO_UNSPECIFIED;
int32_t mSampleRate = AAUDIO_UNSPECIFIED;
+ int32_t mDeviceSampleRate = AAUDIO_UNSPECIFIED;
int32_t mHardwareSampleRate = AAUDIO_UNSPECIFIED;
int32_t mDeviceId = AAUDIO_UNSPECIFIED;
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
@@ -732,7 +770,9 @@
audio_format_t mHardwareFormat = AUDIO_FORMAT_DEFAULT;
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
int32_t mFramesPerBurst = 0;
+ int32_t mDeviceFramesPerBurst = 0;
int32_t mBufferCapacity = 0;
+ int32_t mDeviceBufferCapacity = 0;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/VersionExperiment.txt b/media/libaaudio/src/core/VersionExperiment.txt
deleted file mode 100644
index 071239b..0000000
--- a/media/libaaudio/src/core/VersionExperiment.txt
+++ /dev/null
@@ -1,55 +0,0 @@
-
-// TODO Experiment with versioning. This may be removed or changed dramatically.
-// Please ignore for now. Do not review.
-#define OBOE_VERSION_EXPERIMENT 0
-#if OBOE_VERSION_EXPERIMENT
-
-#define OBOE_EARLIEST_SUPPORTED_VERSION 1
-#define OBOE_CURRENT_VERSION 2
-
-typedef struct OboeInterface_s {
- int32_t size; // do not use size_t because its size can vary
- int32_t version;
- int32_t reserved1;
- void * reserved2;
- oboe_result_t (*createStreamBuilder)(OboeStreamBuilder *);
-} OboeInterface_t;
-
-OboeInterface_t s_oboe_template = {
- .size = sizeof(OboeInterface_t),
- .version = OBOE_CURRENT_VERSION,
- .reserved1 = 0,
- .reserved2 = NULL,
- .createStreamBuilder = Oboe_createStreamBuilder
-};
-
-oboe_result_t Oboe_Unimplemented(OboeInterface_t *oboe) {
- (void) oboe;
- return OBOE_ERROR_UNIMPLEMENTED;
-}
-
-typedef oboe_result_t (*OboeFunction_t)(OboeInterface_t *oboe);
-
-int32_t Oboe_Initialize(OboeInterface_t *oboe, uint32_t flags) {
- if (oboe->version < OBOE_EARLIEST_SUPPORTED_VERSION) {
- return OBOE_ERROR_INCOMPATIBLE;
- }
- // Fill in callers vector table.
- uint8_t *start = (uint8_t*)&oboe->reserved1;
- uint8_t *end;
- if (oboe->size <= s_oboe_template.size) {
- end = ((uint8_t *)oboe) + oboe->size;
- } else {
- end = ((uint8_t *)oboe) + s_oboe_template.size;
- // Assume the rest of the structure is vectors.
- // Point them all to OboeInternal_Unimplemented()
- // Point to first vector past end of the known structure.
- OboeFunction_t *next = (OboeFunction_t*)end;
- while ((((uint8_t *)next) - ((uint8_t *)oboe)) < oboe->size) {
- *next++ = Oboe_Unimplemented;
- }
- }
- memcpy(&oboe->reserved1, &s_oboe_template.reserved1, end - start);
- return OBOE_OK;
-}
-#endif /* OBOE_VERSION_EXPERIMENT -------------------------- */
diff --git a/media/libaaudio/src/fifo/FifoControllerBase.cpp b/media/libaaudio/src/fifo/FifoControllerBase.cpp
index ad6d041..e79bf96 100644
--- a/media/libaaudio/src/fifo/FifoControllerBase.cpp
+++ b/media/libaaudio/src/fifo/FifoControllerBase.cpp
@@ -21,7 +21,8 @@
#include <stdint.h>
#include "FifoControllerBase.h"
-using namespace android; // TODO just import names needed
+using android::FifoControllerBase;
+using android::fifo_frames_t;
FifoControllerBase::FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold)
: mCapacity(capacity)
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index a3ce58c..611ddcd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -135,10 +135,9 @@
int coefficientIndex = 0;
double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
// Stretch the sinc function for low pass filtering.
- const float cutoffScaler = normalizedCutoff *
- ((outputRate < inputRate)
- ? ((float)outputRate / inputRate)
- : ((float)inputRate / outputRate));
+ const float cutoffScaler = (outputRate < inputRate)
+ ? (normalizedCutoff * (float)outputRate / inputRate)
+ : 1.0f; // Do not filter when upsampling.
const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
const float numTapsHalfInverse = 1.0f / numTapsHalf;
for (int i = 0; i < numRows; i++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index 717f3fd..9e47335 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -111,6 +111,9 @@
* Set lower to reduce aliasing.
* Default is 0.70.
*
+ * Note that this value is ignored when upsampling, which is when
+ * the outputRate is higher than the inputRate.
+ *
* @param normalizedCutoff anti-aliasing filter cutoff
* @return address of this builder for chaining calls
*/
@@ -227,6 +230,10 @@
/**
* Generate the filter coefficients in optimal order.
+ *
+ * Note that normalizedCutoff is ignored when upsampling, which is when
+ * the outputRate is higher than the inputRate.
+ *
* @param inputRate sample rate of the input stream
* @param outputRate sample rate of the output stream
* @param numRows number of rows in the array that contain a set of tap coefficients
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index e760dab..fe4bf2c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -208,6 +208,12 @@
setBufferCapacity(getBufferCapacityFromDevice());
setFramesPerBurst(getFramesPerBurstFromDevice());
+ // Use the same values for device values.
+ setDeviceSamplesPerFrame(getSamplesPerFrame());
+ setDeviceSampleRate(mAudioRecord->getSampleRate());
+ setDeviceBufferCapacity(getBufferCapacityFromDevice());
+ setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
setHardwareSamplesPerFrame(mAudioRecord->getHalChannelCount());
setHardwareSampleRate(mAudioRecord->getHalSampleRate());
setHardwareFormat(mAudioRecord->getHalFormat());
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 67ee42e..59fdabc 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -203,6 +203,12 @@
setBufferCapacity(getBufferCapacityFromDevice());
setFramesPerBurst(getFramesPerBurstFromDevice());
+ // Use the same values for device values.
+ setDeviceSamplesPerFrame(getSamplesPerFrame());
+ setDeviceSampleRate(mAudioTrack->getSampleRate());
+ setDeviceBufferCapacity(getBufferCapacityFromDevice());
+ setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
setHardwareSamplesPerFrame(mAudioTrack->getHalChannelCount());
setHardwareSampleRate(mAudioTrack->getHalSampleRate());
setHardwareFormat(mAudioTrack->getHalFormat());
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index e8324a8..0cbf79d 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -383,11 +383,10 @@
return AUDIO_CHANNEL_OUT_7POINT1POINT2;
case AAUDIO_CHANNEL_7POINT1POINT4:
return AUDIO_CHANNEL_OUT_7POINT1POINT4;
- // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
- // case AAUDIO_CHANNEL_9POINT1POINT4:
- // return AUDIO_CHANNEL_OUT_9POINT1POINT4;
- // case AAUDIO_CHANNEL_9POINT1POINT6:
- // return AUDIO_CHANNEL_OUT_9POINT1POINT6;
+ case AAUDIO_CHANNEL_9POINT1POINT4:
+ return AUDIO_CHANNEL_OUT_9POINT1POINT4;
+ case AAUDIO_CHANNEL_9POINT1POINT6:
+ return AUDIO_CHANNEL_OUT_9POINT1POINT6;
default:
ALOGE("%s() %#x unrecognized", __func__, channelMask);
return AUDIO_CHANNEL_INVALID;
@@ -465,11 +464,10 @@
return AAUDIO_CHANNEL_7POINT1POINT2;
case AUDIO_CHANNEL_OUT_7POINT1POINT4:
return AAUDIO_CHANNEL_7POINT1POINT4;
- // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
- // case AUDIO_CHANNEL_OUT_9POINT1POINT4:
- // return AAUDIO_CHANNEL_9POINT1POINT4;
- // case AUDIO_CHANNEL_OUT_9POINT1POINT6:
- // return AAUDIO_CHANNEL_9POINT1POINT6;
+ case AUDIO_CHANNEL_OUT_9POINT1POINT4:
+ return AAUDIO_CHANNEL_9POINT1POINT4;
+ case AUDIO_CHANNEL_OUT_9POINT1POINT6:
+ return AAUDIO_CHANNEL_9POINT1POINT6;
default:
ALOGE("%s() %#x unrecognized", __func__, channelMask);
return AAUDIO_CHANNEL_INVALID;
diff --git a/media/libaaudio/src/utility/AudioClock.h b/media/libaaudio/src/utility/AudioClock.h
index d5d4ef4..37f5b39 100644
--- a/media/libaaudio/src/utility/AudioClock.h
+++ b/media/libaaudio/src/utility/AudioClock.h
@@ -33,7 +33,7 @@
public:
static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
struct timespec time;
- int result = clock_gettime(clockId, &time);
+ const int result = clock_gettime(clockId, &time);
if (result < 0) {
return -errno;
}
@@ -56,7 +56,7 @@
time.tv_sec = nanoTime / AAUDIO_NANOS_PER_SECOND;
// Calculate the fractional nanoseconds. Avoids expensive % operation.
time.tv_nsec = nanoTime - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
- int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
+ const int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
switch (err) {
case EINTR:
return 1;
@@ -86,7 +86,7 @@
// Calculate the fractional nanoseconds. Avoids expensive % operation.
time.tv_nsec = nanoseconds - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
const int flags = 0; // documented as relative sleep
- int err = clock_nanosleep(clockId, flags, &time, nullptr);
+ const int err = clock_nanosleep(clockId, flags, &time, nullptr);
switch (err) {
case EINTR:
return 1;
diff --git a/media/libaaudio/src/utility/MonotonicCounter.h b/media/libaaudio/src/utility/MonotonicCounter.h
index 51eb69b..b58634f 100644
--- a/media/libaaudio/src/utility/MonotonicCounter.h
+++ b/media/libaaudio/src/utility/MonotonicCounter.h
@@ -104,7 +104,7 @@
*/
void roundUp64(int32_t period) {
if (period > 0) {
- int64_t numPeriods = (mCounter64 + period - 1) / period;
+ const int64_t numPeriods = (mCounter64 + period - 1) / period;
mCounter64 = numPeriods * period;
}
}
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 24041bc..1129ced 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -145,6 +145,7 @@
srcs: ["test_flowgraph.cpp"],
shared_libs: [
"libaaudio_internal",
+ "libaudioutils",
"libbinder",
"libcutils",
"libutils",
@@ -181,7 +182,10 @@
name: "test_full_queue",
defaults: ["libaaudio_tests_defaults"],
srcs: ["test_full_queue.cpp"],
- shared_libs: ["libaaudio"],
+ shared_libs: [
+ "libaaudio",
+ "liblog"
+ ],
}
cc_test {
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 6f75f5a..7eb8b0d 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -25,6 +25,8 @@
#include <gtest/gtest.h>
+#include <aaudio/AAudio.h>
+#include "client/AAudioFlowGraph.h"
#include "flowgraph/ClipToRange.h"
#include "flowgraph/Limiter.h"
#include "flowgraph/MonoBlend.h"
@@ -37,8 +39,18 @@
#include "flowgraph/SinkI32.h"
#include "flowgraph/SourceI16.h"
#include "flowgraph/SourceI24.h"
+#include "flowgraph/resampler/IntegerRatio.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+using TestFlowgraphResamplerParams = std::tuple<int32_t, int32_t, MultiChannelResampler::Quality>;
+
+enum {
+ PARAM_SOURCE_SAMPLE_RATE = 0,
+ PARAM_SINK_SAMPLE_RATE,
+ PARAM_RESAMPLER_QUALITY
+};
constexpr int kBytesPerI24Packed = 3;
@@ -394,3 +406,240 @@
EXPECT_NEAR(expected[i], output[i], tolerance);
}
}
+
+TEST(test_flowgraph, module_sinki16_multiple_reads) {
+ static constexpr int kNumSamples = 8;
+ std::array<int16_t, kNumSamples + 10> output; // larger than input
+
+ SourceFloat sourceFloat{1};
+ SinkI16 sinkI16{1};
+
+ sourceFloat.setData(kInputFloat.data(), kNumSamples);
+ sourceFloat.output.connect(&sinkI16.input);
+
+ output.fill(777);
+
+ // Read the first half of the data
+ int32_t numRead = sinkI16.read(output.data(), kNumSamples / 2);
+ ASSERT_EQ(kNumSamples / 2, numRead);
+ for (int i = 0; i < numRead; i++) {
+ EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+ }
+
+ // Read the rest of the data
+ numRead = sinkI16.read(output.data(), output.size());
+ ASSERT_EQ(kNumSamples / 2, numRead);
+ for (int i = 0; i < numRead; i++) {
+ EXPECT_EQ(kExpectedI16.at(i + kNumSamples / 2), output.at(i)) << ", i = " << i;
+ }
+}
+
+void checkSampleRateConversionVariedSizes(int32_t sourceSampleRate,
+ int32_t sinkSampleRate,
+ MultiChannelResampler::Quality resamplerQuality) {
+ AAudioFlowGraph flowgraph;
+ aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+ 1 /* sourceChannelCount */,
+ sourceSampleRate,
+ AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+ 1 /* sinkChannelCount */,
+ sinkSampleRate,
+ false /* useMonoBlend */,
+ false /* useVolumeRamps */,
+ 0.0f /* audioBalance */,
+ resamplerQuality);
+
+ IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+ ratio.reduce();
+
+ ASSERT_EQ(AAUDIO_OK, result);
+
+ const int inputSize = ratio.getNumerator();
+ const int outputSize = ratio.getDenominator();
+ float input[inputSize];
+ float output[outputSize];
+
+ for (int i = 0; i < inputSize; i++) {
+ input[i] = i * 1.0f / inputSize;
+ }
+
+ int inputUsed = 0;
+ int outputRead = 0;
+ int curInputSize = 1;
+
+ // Process the data with larger and larger input buffer sizes.
+ while (inputUsed < inputSize) {
+ outputRead += flowgraph.process((void *) (input + inputUsed),
+ curInputSize,
+ (void *) (output + outputRead),
+ outputSize - outputRead);
+ inputUsed += curInputSize;
+ curInputSize = std::min(curInputSize + 5, inputSize - inputUsed);
+ }
+
+ ASSERT_EQ(outputSize, outputRead);
+
+ for (int i = 1; i < outputSize; i++) {
+ // The first values of the flowgraph will be close to zero.
+ // Besides those, the values should be strictly increasing.
+ if (output[i - 1] > 0.01f) {
+ EXPECT_GT(output[i], output[i - 1]);
+ }
+ }
+}
+
+TEST(test_flowgraph, flowgraph_varied_sizes_all) {
+ const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+ const MultiChannelResampler::Quality qualities[] =
+ {
+ MultiChannelResampler::Quality::Fastest,
+ MultiChannelResampler::Quality::Low,
+ MultiChannelResampler::Quality::Medium,
+ MultiChannelResampler::Quality::High,
+ MultiChannelResampler::Quality::Best
+ };
+ for (int srcRate : rates) {
+ for (int destRate : rates) {
+ for (auto quality : qualities) {
+ if (srcRate != destRate) {
+ checkSampleRateConversionVariedSizes(srcRate, destRate, quality);
+ }
+ }
+ }
+ }
+}
+
+void checkSampleRateConversionPullLater(int32_t sourceSampleRate,
+ int32_t sinkSampleRate,
+ MultiChannelResampler::Quality resamplerQuality) {
+ AAudioFlowGraph flowgraph;
+ aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+ 1 /* sourceChannelCount */,
+ sourceSampleRate,
+ AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+ 1 /* sinkChannelCount */,
+ sinkSampleRate,
+ false /* useMonoBlend */,
+ false /* useVolumeRamps */,
+ 0.0f /* audioBalance */,
+ resamplerQuality);
+
+ IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+ ratio.reduce();
+
+ ASSERT_EQ(AAUDIO_OK, result);
+
+ const int inputSize = ratio.getNumerator();
+ const int outputSize = ratio.getDenominator();
+ float input[inputSize];
+ float output[outputSize];
+
+ for (int i = 0; i < inputSize; i++) {
+ input[i] = i * 1.0f / inputSize;
+ }
+
+ // Read half the data with process.
+ int outputRead = flowgraph.process((void *) input,
+ inputSize,
+ (void *) output,
+ outputSize / 2);
+
+ ASSERT_EQ(outputSize / 2, outputRead);
+
+ // Now read the other half of the data with pull.
+ outputRead += flowgraph.pull(
+ (void *) (output + outputRead),
+ outputSize - outputRead);
+
+ ASSERT_EQ(outputSize, outputRead);
+ for (int i = 1; i < outputSize; i++) {
+ // The first values of the flowgraph will be close to zero.
+ // Besides those, the values should be strictly increasing.
+ if (output[i - 1] > 0.01f) {
+ EXPECT_GT(output[i], output[i - 1]);
+ }
+ }
+}
+
+// TODO: b/289508408 - Remove non-parameterized tests if they get noisy.
+TEST(test_flowgraph, flowgraph_pull_later_all) {
+ const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+ const MultiChannelResampler::Quality qualities[] =
+ {
+ MultiChannelResampler::Quality::Fastest,
+ MultiChannelResampler::Quality::Low,
+ MultiChannelResampler::Quality::Medium,
+ MultiChannelResampler::Quality::High,
+ MultiChannelResampler::Quality::Best
+ };
+ for (int srcRate : rates) {
+ for (int destRate : rates) {
+ for (auto quality : qualities) {
+ if (srcRate != destRate) {
+ checkSampleRateConversionPullLater(srcRate, destRate, quality);
+ }
+ }
+ }
+ }
+}
+
+class TestFlowgraphSampleRateConversion : public ::testing::Test,
+ public ::testing::WithParamInterface<TestFlowgraphResamplerParams> {
+};
+
+const char* resamplerQualityToString(MultiChannelResampler::Quality quality) {
+ switch (quality) {
+ case MultiChannelResampler::Quality::Fastest: return "FASTEST";
+ case MultiChannelResampler::Quality::Low: return "LOW";
+ case MultiChannelResampler::Quality::Medium: return "MEDIUM";
+ case MultiChannelResampler::Quality::High: return "HIGH";
+ case MultiChannelResampler::Quality::Best: return "BEST";
+ }
+ return "UNKNOWN";
+}
+
+static std::string getTestName(
+ const ::testing::TestParamInfo<TestFlowgraphResamplerParams>& info) {
+ return std::string()
+ + std::to_string(std::get<PARAM_SOURCE_SAMPLE_RATE>(info.param))
+ + "__" + std::to_string(std::get<PARAM_SINK_SAMPLE_RATE>(info.param))
+ + "__" + resamplerQualityToString(std::get<PARAM_RESAMPLER_QUALITY>(info.param));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_pull_later) {
+ checkSampleRateConversionPullLater(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+ std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+ std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_varied_sizes) {
+ checkSampleRateConversionVariedSizes(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+ std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+ std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ test_flowgraph,
+ TestFlowgraphSampleRateConversion,
+ ::testing::Values(
+ TestFlowgraphResamplerParams({8000, 11025, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({8000, 48000, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({8000, 44100, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({11025, 24000, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({11025, 48000,
+ MultiChannelResampler::Quality::Fastest}),
+ TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Low}),
+ TestFlowgraphResamplerParams({11025, 48000,
+ MultiChannelResampler::Quality::Medium}),
+ TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::High}),
+ TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({11025, 44100, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({11025, 88200, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({16000, 48000, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Low}),
+ TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({48000, 11025, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({48000, 44100, MultiChannelResampler::Quality::Best}),
+ TestFlowgraphResamplerParams({44100, 11025, MultiChannelResampler::Quality::Best})),
+ &getTestName
+);
diff --git a/media/libaaudio/tests/test_full_queue.cpp b/media/libaaudio/tests/test_full_queue.cpp
index 12d4fa3..8944d89 100644
--- a/media/libaaudio/tests/test_full_queue.cpp
+++ b/media/libaaudio/tests/test_full_queue.cpp
@@ -17,31 +17,43 @@
// Test whether a stream dies if it is written to after a delay.
// Maybe because the message queue from the AAudio service fills up.
+#define LOG_TAG "test_full_queue"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
#include <stdio.h>
#include <unistd.h>
#include <aaudio/AAudio.h>
#include <gtest/gtest.h>
+#include <cstdlib>
+#include <algorithm>
-constexpr int64_t kNanosPerSecond = 1000000000;
-constexpr int64_t kTimeoutNanos = kNanosPerSecond / 2;
+constexpr int64_t kNanosPerMillisecond = 1e6;
+constexpr int64_t kMicrosPerMillisecond = 1000;
+constexpr int64_t kTimeoutNanos = 50 * kNanosPerMillisecond;
constexpr int kNumFrames = 256;
constexpr int kChannelCount = 2;
+constexpr int kNumSamples = kChannelCount * kNumFrames;
static void checkFullQueue(aaudio_performance_mode_t perfMode,
+ aaudio_sharing_mode_t sharingMode,
int32_t sleepMillis) {
- std::unique_ptr<float[]> buffer = std::make_unique<float[]>(
- kNumFrames * kChannelCount);
+ aaudio_result_t result;
+ std::unique_ptr<float[]> buffer = std::make_unique<float[]>(kNumSamples);
+ for (int i = 0; i < kNumSamples; i++) {
+ buffer[i] = (drand48() - 0.5) * 0.05; // random buzzy waveform
+ }
AAudioStreamBuilder *aaudioBuilder = nullptr;
// Use an AAudioStreamBuilder to contain requested parameters.
ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
- AAudioStreamBuilder_setChannelCount(aaudioBuilder, kChannelCount);
-
// Request stream properties.
+ AAudioStreamBuilder_setChannelCount(aaudioBuilder, kChannelCount);
AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+ AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
// Create an AAudioStream using the Builder.
AAudioStream *aaudioStream = nullptr;
@@ -49,13 +61,26 @@
&aaudioStream));
AAudioStreamBuilder_delete(aaudioBuilder);
+ int bufferSize = std::max(
+ 2 * AAudioStream_getFramesPerBurst(aaudioStream),
+ 2 * kNumFrames
+ );
+ AAudioStream_setBufferSizeInFrames(aaudioStream, bufferSize);
+
EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
- // Sleep for awhile. This might kill the stream.
- usleep(sleepMillis * 1000); // 1000 millis in a microsecond
+#if 0
+ int32_t capacity = AAudioStream_getBufferCapacityInFrames(aaudioStream);
+ ASSERT_LT(20, capacity);
+ int numWrites = 30 * capacity / kNumFrames;
+#else
+ int32_t sampleRate = AAudioStream_getSampleRate(aaudioStream);
+ EXPECT_LT(7000, sampleRate);
+ int numWrites = 1 * sampleRate / kNumFrames;
+#endif
- for (int i = 0; i < 10; i++) {
- const aaudio_result_t result = AAudioStream_write(aaudioStream,
+ for (int i = 0; i < numWrites/2; i++) {
+ result = AAudioStream_write(aaudioStream,
buffer.get(),
kNumFrames,
kTimeoutNanos);
@@ -63,31 +88,100 @@
if (kNumFrames != result) break;
}
+ // Sleep for awhile. This might kill the stream.
+ ALOGD("%s() start sleeping %d millis", __func__, sleepMillis);
+ usleep(sleepMillis * kMicrosPerMillisecond);
+ ALOGD("%s() start writing", __func__);
+
+ // Let CPU catch up with the hardware.
+ int64_t framesRead = AAudioStream_getFramesRead(aaudioStream);
+ int64_t framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+
+ ALOGD("%s() after hang, read = %jd, written = %jd, w-r = %jd",
+ __func__, (intmax_t) framesRead, (intmax_t) framesWritten,
+ (intmax_t)(framesWritten - framesRead));
+ int countDown = 2 * sleepMillis * sampleRate / (kNumFrames * 1000);
+ do {
+ result = AAudioStream_write(aaudioStream,
+ buffer.get(),
+ kNumFrames,
+ kTimeoutNanos);
+
+ ALOGD("%s() catching up, wrote %d frames", __func__, result);
+ framesRead = AAudioStream_getFramesRead(aaudioStream);
+ framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+ countDown--;
+ } while ((framesRead > framesWritten)
+ && (countDown > 0)
+ && (kNumFrames == result));
+ EXPECT_LE(framesRead, framesWritten);
+ EXPECT_GT(countDown, 0);
+ EXPECT_EQ(kNumFrames, result);
+ ALOGD("%s() after catch up, read = %jd, written = %jd, w-r = %jd",
+ __func__, (intmax_t) framesRead, (intmax_t) framesWritten,
+ (intmax_t)(framesWritten - framesRead));
+
+ // Try to keep the stream full.
+ for (int i = 0; i < numWrites; i++) {
+ ALOGD("%s() try to write", __func__);
+ result = AAudioStream_write(aaudioStream,
+ buffer.get(),
+ kNumFrames,
+ kTimeoutNanos);
+ ALOGD("%s() wrote %d frames", __func__, result);
+ EXPECT_EQ(kNumFrames, result);
+ if (kNumFrames != result) break;
+ }
+
EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
}
-TEST(test_full_queue, aaudio_full_queue_perf_none_50) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE, 50 /* sleepMillis */);
+// ==== Default Latency, SHARED ===========
+TEST(test_full_queue, aaudio_full_queue_perf_none_sh_50) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_SHARING_MODE_SHARED, 50 /* sleepMillis */);
}
-TEST(test_full_queue, aaudio_full_queue_perf_none_200) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE, 200 /* sleepMillis */);
+TEST(test_full_queue, aaudio_full_queue_perf_none_sh_400) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_SHARING_MODE_SHARED, 400 /* sleepMillis */);
}
-TEST(test_full_queue, aaudio_full_queue_perf_none_1000) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE, 1000 /* sleepMillis */);
+TEST(test_full_queue, aaudio_full_queue_perf_none_sh_1000) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_SHARING_MODE_SHARED, 1000 /* sleepMillis */);
}
-TEST(test_full_queue, aaudio_full_queue_low_latency_50) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, 50 /* sleepMillis */);
+// ==== Low Latency, SHARED ===========
+TEST(test_full_queue, aaudio_full_queue_low_latency_sh_50) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_SHARED, 50 /* sleepMillis */);
}
-TEST(test_full_queue, aaudio_full_queue_low_latency_200) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, 200 /* sleepMillis */);
+TEST(test_full_queue, aaudio_full_queue_low_latency_sh_400) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_SHARED, 400 /* sleepMillis */);
}
-TEST(test_full_queue, aaudio_full_queue_low_latency_1000) {
- checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, 1000 /* sleepMillis */);
+TEST(test_full_queue, aaudio_full_queue_low_latency_sh_1000) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_SHARED, 1000 /* sleepMillis */);
+}
+
+// ==== Low Latency, EXCLUSIVE ===========
+TEST(test_full_queue, aaudio_full_queue_low_latency_excl_50) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE, 50 /* sleepMillis */);
+}
+
+TEST(test_full_queue, aaudio_full_queue_low_latency_excl_400) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE, 400 /* sleepMillis */);
+}
+
+TEST(test_full_queue, aaudio_full_queue_low_latency_excl_1000) {
+ checkFullQueue(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE, 1000 /* sleepMillis */);
}
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
index 1e4f59c..13e4a20 100644
--- a/media/libaaudio/tests/test_resampler.cpp
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -101,14 +101,20 @@
}
}
+ // Flush out remaining frames from the flowgraph
+ while (!mcResampler->isWriteNeeded()) {
+ mcResampler->readNextFrame(output);
+ output++;
+ numRead++;
+ }
+
ASSERT_LE(numRead, kNumOutputSamples);
// Some frames are lost priming the FIR filter.
- const int kMaxAlgorithmicFrameLoss = 16;
+ const int kMaxAlgorithmicFrameLoss = 5;
EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
- // Some cycles may get chopped off at the end.
- const int kMaxZeroCrossingDelta = 3;
+ const int kMaxZeroCrossingDelta = std::max(sinkRate / sourceRate / 2, 1);
EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
// Detect glitches by looking for spikes in the second derivative.
@@ -136,8 +142,7 @@
TEST(test_resampler, resampler_scan_all) {
- // TODO Add 64000, 88200, 96000 when they work. Failing now.
- const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+ const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
const MultiChannelResampler::Quality qualities[] =
{
MultiChannelResampler::Quality::Fastest,
@@ -193,10 +198,9 @@
checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
}
-// TODO This fails because the output is very low.
-//TEST(test_resampler, resampler_11025_88200_best) {
-// checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
-//}
+TEST(test_resampler, resampler_11025_88200_best) {
+ checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+}
TEST(test_resampler, resampler_16000_48000_best) {
checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 2c9e173..658bf63 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -340,6 +340,7 @@
srcs: [
"aidl/android/media/AudioAttributesEx.aidl",
"aidl/android/media/AudioMix.aidl",
+ "aidl/android/media/AudioMixUpdate.aidl",
"aidl/android/media/AudioMixerAttributesInternal.aidl",
"aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioMixCallbackFlag.aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 5b94845..5bfdd5f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -20,6 +20,7 @@
#include <utils/Log.h>
#include <android/media/IAudioPolicyService.h>
+#include <android/media/AudioMixUpdate.h>
#include <android/media/BnCaptureStateListener.h>
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
@@ -132,12 +133,10 @@
binder = gAudioFlingerBinder;
} else {
sp<IServiceManager> sm = defaultServiceManager();
- do {
- binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
- if (binder != nullptr) break;
- ALOGW("AudioFlinger not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
+ binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+ if (binder == nullptr) {
+ return nullptr;
+ }
}
binder->linkToDeath(gAudioFlingerClient);
const auto afs = interface_cast<media::IAudioFlingerService>(binder);
@@ -880,14 +879,10 @@
Mutex::Autolock _l(gLockAPS);
if (gAudioPolicyService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.audio_policy"));
- if (binder != 0)
- break;
- ALOGW("AudioPolicyService not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
+ sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
+ if (binder == nullptr) {
+ return nullptr;
+ }
if (gAudioPolicyServiceClient == NULL) {
gAudioPolicyServiceClient = new AudioPolicyServiceClient();
}
@@ -1848,6 +1843,27 @@
return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
}
+status_t AudioSystem::updatePolicyMixes(
+ const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
+ mixesWithUpdates) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+
+ std::vector<media::AudioMixUpdate> updatesAidl;
+ updatesAidl.reserve(mixesWithUpdates.size());
+
+ for (const auto& update : mixesWithUpdates) {
+ media::AudioMixUpdate updateAidl;
+ updateAidl.audioMix = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioMix(update.first));
+ RETURN_STATUS_IF_ERROR(convertRange(update.second.begin(), update.second.end(),
+ std::back_inserter(updateAidl.newCriteria),
+ legacy2aidl_AudioMixMatchCriterion));
+ updatesAidl.emplace_back(updateAidl);
+ }
+
+ return statusTFromBinderStatus(aps->updatePolicyMixes(updatesAidl));
+}
+
status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -2103,8 +2119,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>
- & aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<AudioFormatDescription> formatsAidl;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index f050a20..58e0486 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -812,7 +812,7 @@
(void) updateAndGetPosition_l();
// save start timestamp
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
if (getTimestamp_l(mStartTs) != OK) {
mStartTs.mPosition = 0;
}
@@ -833,7 +833,7 @@
mTimestampStaleTimeReported = false;
mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
- if (!isOffloadedOrDirect_l()
+ if (!isAfTrackOffloadedOrDirect_l()
&& mStartEts.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] > 0) {
// Server side has consumed something, but is it finished consuming?
// It is possible since flush and stop are asynchronous that the server
@@ -1912,6 +1912,7 @@
mAfChannelCount = audio_channel_count_from_out_mask(output.afChannelMask);
mAfFormat = output.afFormat;
mAfLatency = output.afLatencyMs;
+ mAfTrackFlags = output.afTrackFlags;
mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
@@ -3177,7 +3178,7 @@
// To avoid a race, read the presented frames first. This ensures that presented <= consumed.
status_t status;
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
// use Binder to get timestamp
media::AudioTimestampInternal ts;
mAudioTrack->getTimestamp(&ts, &status);
@@ -3289,7 +3290,7 @@
ALOGV_IF(status != WOULD_BLOCK, "%s(%d): getTimestamp error:%#x", __func__, mPortId, status);
return status;
}
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
if (isOffloaded_l() && (mState == STATE_PAUSED || mState == STATE_PAUSED_STOPPING)) {
// use cached paused position in case another offloaded track is running.
timestamp.mPosition = mPausedPosition;
@@ -3735,7 +3736,7 @@
// This is conservatively figured - if we encounter an unexpected error
// then we will not wait.
bool wait = false;
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
AudioTimestamp ts;
status_t status = getTimestamp_l(ts);
if (status == WOULD_BLOCK) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 01edf72..48f8992 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -116,6 +116,8 @@
legacy2aidl_audio_channel_mask_t_AudioChannelLayout(afChannelMask, false /*isInput*/));
aidl.afFormat = VALUE_OR_RETURN(
legacy2aidl_audio_format_t_AudioFormatDescription(afFormat));
+ aidl.afTrackFlags = VALUE_OR_RETURN(
+ legacy2aidl_audio_output_flags_t_int32_t_mask(afTrackFlags));
aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
aidl.audioTrack = audioTrack;
@@ -144,6 +146,8 @@
false /*isInput*/));
legacy.afFormat = VALUE_OR_RETURN(
aidl2legacy_AudioFormatDescription_audio_format_t(aidl.afFormat));
+ legacy.afTrackFlags = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.afTrackFlags));
legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
legacy.audioTrack = aidl.audioTrack;
diff --git a/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl b/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl
new file mode 100644
index 0000000..d481b1c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMix;
+import android.media.AudioMixMatchCriterion;
+
+
+/**
+ * {@hide}
+ */
+parcelable AudioMixUpdate {
+ // Audio mix to update.
+ AudioMix audioMix;
+ // Updated audio mixing rule.
+ AudioMixMatchCriterion[] newCriteria;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index 42e0bb4..ab60461 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -43,6 +43,7 @@
AudioChannelLayout afChannelMask;
AudioFormatDescription afFormat;
int afLatencyMs;
+ int afTrackFlags;
/** Interpreted as audio_io_handle_t. */
int outputId;
/** Interpreted as audio_port_handle_t. */
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 3e9b27f..52c8da0 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -20,6 +20,7 @@
import android.media.AudioDirectMode;
import android.media.AudioMix;
+import android.media.AudioMixUpdate;
import android.media.AudioMixerAttributesInternal;
import android.media.AudioOffloadMode;
import android.media.AudioPatchFw;
@@ -262,6 +263,8 @@
void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
+ void updatePolicyMixes(in AudioMixUpdate[] updates);
+
void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
void removeUidDeviceAffinities(int /* uid_t */ uid);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 0215f3c..a1f7941 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -462,6 +462,10 @@
static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
+ static status_t updatePolicyMixes(
+ const std::vector<
+ std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
+
static status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
static status_t removeUidDeviceAffinities(uid_t uid);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 8f712db..523383f 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1238,6 +1238,11 @@
bool isDirect_l() const
{ return (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0; }
+ bool isAfTrackOffloadedOrDirect_l() const
+ { return isOffloadedOrDirect_l() ||
+ (mAfTrackFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD|
+ AUDIO_OUTPUT_FLAG_DIRECT)) != 0; }
+
// pure pcm data is mixable (which excludes HW_AV_SYNC, with embedded timing)
bool isPurePcmData_l() const
{ return audio_is_linear_pcm(mFormat)
@@ -1295,6 +1300,7 @@
uint32_t mAfSampleRate; // AudioFlinger sample rate
uint32_t mAfChannelCount; // AudioFlinger channel count
audio_format_t mAfFormat; // AudioFlinger format
+ audio_output_flags_t mAfTrackFlags; // AudioFlinger track flags
// constant after constructor or set()
audio_format_t mFormat; // as requested by client, not forced to 16-bit
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index eb27e25..5a1e037 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -119,6 +119,7 @@
uint32_t afLatencyMs;
audio_channel_mask_t afChannelMask;
audio_format_t afFormat;
+ audio_output_flags_t afTrackFlags;
audio_io_handle_t outputId;
audio_port_handle_t portId;
sp<media::IAudioTrack> audioTrack;
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 9a46b20..dc37785 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -481,8 +481,28 @@
AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
std::vector<int32_t>{1, 2}))));
+TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
+ const std::vector<uint8_t> sAnonymizedAidlAddress =
+ std::vector<uint8_t>{0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
+ const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
+ auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ sAnonymizedLegacyAddress);
+ ASSERT_TRUE(device.ok());
+ ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
+ ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
+
+ audio_devices_t legacyType;
+ std::string legacyAddress;
+ status_t status =
+ aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
+ ASSERT_EQ(OK, status);
+ EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
+ EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
+}
+
class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
};
+
TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
const auto initial = GetParam();
auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index fa990b5..c101f00 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -56,7 +56,7 @@
ASSERT_NE(nullptr, ap);
ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
<< "Unable to open Resource";
- EXPECT_EQ(OK, ap->create()) << "track creation failed";
+ ASSERT_EQ(OK, ap->create()) << "track creation failed";
sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cb));
EXPECT_EQ(OK, ap->start()) << "audio track start failed";
@@ -94,7 +94,7 @@
sp<AudioCapture> capture = sp<AudioCapture>::make(
AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
ASSERT_NE(nullptr, capture);
- EXPECT_EQ(OK, capture->create()) << "record creation failed";
+ ASSERT_EQ(OK, capture->create()) << "record creation failed";
sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, capture->getAudioRecordHandle()->addAudioDeviceCallback(cbCapture));
@@ -105,7 +105,7 @@
ASSERT_NE(nullptr, playback);
ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
<< "Unable to open Resource";
- EXPECT_EQ(OK, playback->create()) << "track creation failed";
+ ASSERT_EQ(OK, playback->create()) << "track creation failed";
sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
@@ -187,7 +187,7 @@
ASSERT_NE(nullptr, playback);
ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
<< "Unable to open Resource";
- EXPECT_EQ(OK, playback->create()) << "track creation failed";
+ ASSERT_EQ(OK, playback->create()) << "track creation failed";
sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
@@ -195,7 +195,7 @@
sp<AudioCapture> captureA = sp<AudioCapture>::make(
AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
ASSERT_NE(nullptr, captureA);
- EXPECT_EQ(OK, captureA->create()) << "record creation failed";
+ ASSERT_EQ(OK, captureA->create()) << "record creation failed";
sp<OnAudioDeviceUpdateNotifier> cbCaptureA = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, captureA->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureA));
@@ -206,7 +206,7 @@
AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
AUDIO_INPUT_FLAG_NONE, AUDIO_SESSION_ALLOCATE, AudioRecord::TRANSFER_CALLBACK, &attr);
ASSERT_NE(nullptr, captureB);
- EXPECT_EQ(OK, captureB->create()) << "record creation failed";
+ ASSERT_EQ(OK, captureB->create()) << "record creation failed";
sp<OnAudioDeviceUpdateNotifier> cbCaptureB = sp<OnAudioDeviceUpdateNotifier>::make();
EXPECT_EQ(OK, captureB->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureB));
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 5bc25ae..eecb4bc 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -1116,6 +1116,14 @@
return BAD_VALUE;
}
}
+
+ if (mSoundDose == nullptr) {
+ ALOGE("%s failed to return the sound dose interface for module %s: not implemented",
+ __func__,
+ module.c_str());
+ return NO_INIT;
+ }
+
*soundDoseBinder = mSoundDose->asBinder();
ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 2ba1fc3..085a7e4 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -41,8 +41,8 @@
namespace android {
void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
- info->mWidth = videoFrame->mWidth;
- info->mHeight = videoFrame->mHeight;
+ info->mWidth = videoFrame->mDisplayWidth;
+ info->mHeight = videoFrame->mDisplayHeight;
info->mRotationAngle = videoFrame->mRotationAngle;
info->mBytesPerPixel = videoFrame->mBytesPerPixel;
info->mDurationUs = videoFrame->mDurationUs;
@@ -476,35 +476,37 @@
}
bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
- if (heifColor == (HeifColorFormat)mOutputColor) {
- return true;
- }
-
+ android_pixel_format_t outputColor;
switch(heifColor) {
case kHeifColorFormat_RGB565:
{
- mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+ outputColor = HAL_PIXEL_FORMAT_RGB_565;
break;
}
case kHeifColorFormat_RGBA_8888:
{
- mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+ outputColor = HAL_PIXEL_FORMAT_RGBA_8888;
break;
}
case kHeifColorFormat_BGRA_8888:
{
- mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+ outputColor = HAL_PIXEL_FORMAT_BGRA_8888;
break;
}
case kHeifColorFormat_RGBA_1010102:
{
- mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+ outputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
break;
}
default:
ALOGE("Unsupported output color format %d", heifColor);
return false;
}
+ if (outputColor == mOutputColor) {
+ return true;
+ }
+
+ mOutputColor = outputColor;
if (mFrameDecoded) {
return reinit(nullptr);
@@ -740,8 +742,11 @@
// Either document why it is safe in this case or address the
// issue (e.g. by copying).
VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->unsecurePointer());
- uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
- memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mWidth);
+ uint8_t* src = videoFrame->getFlattenedData() +
+ (videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
+ (videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
+ mCurScanline++;
+ memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
return true;
}
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index c43ef66..f498453 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -38,16 +38,10 @@
Mutex::Autolock _l(sServiceLock);
if (sMediaPlayerService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.player"));
- if (binder != 0) {
- break;
- }
- ALOGW("Media player service not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
-
+ sp<IBinder> binder = sm->waitForService(String16("media.player"));
+ if (binder == nullptr) {
+ return nullptr;
+ }
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 2ae76b3..40fd022 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -41,14 +41,10 @@
if (sService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.player"));
- if (binder != 0) {
- break;
- }
- ALOGW("MediaPlayerService not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
+ binder = sm->waitForService(String16("media.player"));
+ if (binder == nullptr) {
+ return nullptr;
+ }
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index 2240223..26fe306 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -87,7 +87,7 @@
}
void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
- const std::string &string) {
+ const std::string &string) {
mediametrics_setCString(handle, attr, string.c_str());
}
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index f80a467..26aa375 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -184,6 +184,7 @@
#define AMEDIAMETRICS_PROP_PLAYERIID "playerIId" // int32 (-1 invalid/unset IID)
#define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
#define AMEDIAMETRICS_PROP_SAMPLERATE "sampleRate" // int32
+#define AMEDIAMETRICS_PROP_SAMPLERATECLIENT "sampleRateClient" // int32
#define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
#define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
#define AMEDIAMETRICS_PROP_SELECTEDMICDIRECTION "selectedMicDirection" // int32
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 84d772d..605e659 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -672,6 +672,18 @@
if (trackMeta->findInt32(kKeyWidth, &imageWidth)
&& trackMeta->findInt32(kKeyHeight, &imageHeight)) {
imagePrimary = imageCount;
+ int32_t displayLeft;
+ int32_t displayTop;
+ int32_t displayRight;
+ int32_t displayBottom;
+ if (trackMeta->findRect(kKeyCropRect, &displayLeft, &displayTop,
+ &displayRight, &displayBottom)
+ && displayLeft >= 0 && displayTop >= 0 && displayRight < imageWidth
+ && displayBottom < imageHeight && displayLeft <= displayRight
+ && displayTop <= displayBottom) {
+ imageWidth = displayRight - displayLeft + 1;
+ imageHeight = displayBottom - displayTop + 1;
+ }
} else {
ALOGE("primary image track ignored for missing dimensions");
}
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index c3d6c89..b511372 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -174,6 +174,7 @@
"libnetd_client",
"libpowermanager",
"libstagefright_httplive",
+ "libaudiohal@7.0",
],
}
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 89e9806..434ae00 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -49,7 +49,7 @@
defaults: ["libnbaio_mono_defaults"],
}
-cc_library_shared {
+cc_library {
name: "libnbaio",
defaults: ["libnbaio_mono_defaults"],
srcs: [
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a26fcbe..0af9d12 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -270,10 +270,10 @@
"SurfaceUtils.cpp",
"ThrottledSource.cpp",
"Utils.cpp",
- "VideoRenderQualityTracker.cpp",
"VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
- ],
+ "VideoRenderQualityTracker.cpp",
+ ],
shared_libs: [
"libstagefright_framecapture_utils",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 57937f9..1a0bb7f 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -86,6 +86,22 @@
displayWidth = width;
displayHeight = height;
}
+ int32_t displayLeft = 0;
+ int32_t displayTop = 0;
+ int32_t displayRight;
+ int32_t displayBottom;
+ if (trackMeta->findRect(kKeyCropRect, &displayLeft, &displayTop, &displayRight,
+ &displayBottom)) {
+ if (displayLeft >= 0 && displayTop >= 0 && displayRight < width && displayBottom < height &&
+ displayLeft <= displayRight && displayTop <= displayBottom) {
+ displayWidth = displayRight - displayLeft + 1;
+ displayHeight = displayBottom - displayTop + 1;
+ } else {
+ // Crop rectangle is invalid, use the whole frame.
+ displayLeft = 0;
+ displayTop = 0;
+ }
+ }
if (allocRotated) {
if (rotationAngle == 90 || rotationAngle == 270) {
@@ -108,8 +124,8 @@
}
}
- VideoFrame frame(width, height, displayWidth, displayHeight,
- tileWidth, tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
+ VideoFrame frame(width, height, displayWidth, displayHeight, displayLeft, displayTop, tileWidth,
+ tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
size_t size = frame.getFlattenedSize();
sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 9e7eac9..4a44eb0 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,7 +31,6 @@
#include "include/SoftwareRenderer.h"
#include <android/api-level.h>
-#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -74,7 +73,6 @@
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
-#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/OMXClient.h>
@@ -245,7 +243,7 @@
"android.media.mediacodec.judder-score-histogram-buckets";
// Freeze event
static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
-static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventKeyName = "videofreeze";
static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
@@ -257,7 +255,7 @@
"android.media.mediacodec.freeze.details-distance-ms";
// Judder event
static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
-static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventKeyName = "videojudder";
static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index e920bd1..aca20a4 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -15,7 +15,11 @@
*/
#define LOG_TAG "VideoRenderQualityTracker"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+
#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Mutex.h>
#include <media/stagefright/VideoRenderQualityTracker.h>
@@ -24,9 +28,12 @@
#include <cmath>
#include <stdio.h>
#include <sys/time.h>
+#include <sys/wait.h>
+#include <android-base/macros.h>
#include <android-base/parsebool.h>
#include <android-base/parseint.h>
+#include <android-base/properties.h>
namespace android {
@@ -38,6 +45,7 @@
typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
GetServerConfigurableFlagFn;
+typedef VideoRenderQualityTracker::TraceTriggerFn TraceTriggerFn;
static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
char const *flagNameSuffix, bool *value) {
@@ -149,6 +157,10 @@
getFlag(judderEventMax, "judder_event_max");
getFlag(judderEventDetailsMax, "judder_event_details_max");
getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+ getFlag(traceTriggerEnabled, "trace_trigger_enabled");
+ getFlag(traceTriggerThrottleMs, "trace_trigger_throttle_ms");
+ getFlag(traceMinFreezeDurationMs, "trace_minimum_freeze_duration_ms");
+ getFlag(traceMaxFreezeDurationMs, "trace_maximum_freeze_duration_ms");
#undef getFlag
return c;
}
@@ -186,15 +198,25 @@
judderEventMax = 0; // enabled only when debugging
judderEventDetailsMax = 20;
judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
+
+ // Perfetto trigger configuration.
+ traceTriggerEnabled = android::base::GetProperty(
+ "ro.build.type", "user") != "user"; // Enabled for non-user builds for debugging.
+ traceTriggerThrottleMs = 5 * 60 * 1000; // 5 mins.
+ traceMinFreezeDurationMs = 400;
+ traceMaxFreezeDurationMs = 1500;
}
-VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
+VideoRenderQualityTracker::VideoRenderQualityTracker()
+ : mConfiguration(Configuration()), mTraceTriggerFn(triggerTrace) {
configureHistograms(mMetrics, mConfiguration);
clear();
}
-VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
- mConfiguration(configuration) {
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration,
+ const TraceTriggerFn traceTriggerFn)
+ : mConfiguration(configuration),
+ mTraceTriggerFn(traceTriggerFn == nullptr ? triggerTrace : traceTriggerFn) {
configureHistograms(mMetrics, mConfiguration);
clear();
}
@@ -231,6 +253,11 @@
resetIfDiscontinuity(contentTimeUs, -1);
+ if (mTraceFrameSkippedToken == -1) {
+ mTraceFrameSkippedToken = contentTimeUs;
+ ATRACE_ASYNC_BEGIN("Video frame(s) skipped", mTraceFrameSkippedToken);
+ }
+
// Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
// app could be terminating the playback. The pending count will be added to the metrics if and
// when the next frame is rendered.
@@ -261,11 +288,25 @@
return;
}
+ if (mTraceFrameSkippedToken != -1) {
+ ATRACE_ASYNC_END("Video frame(s) skipped", mTraceFrameSkippedToken);
+ mTraceFrameSkippedToken = -1;
+ }
+
int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
if (mLastRenderTimeUs != -1) {
- mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
+ int64_t frameRenderDurationMs = (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
+ mRenderDurationMs += frameRenderDurationMs;
+ if (mConfiguration.traceTriggerEnabled
+ // Threshold for visible video freeze.
+ && frameRenderDurationMs >= mConfiguration.traceMinFreezeDurationMs
+ // Threshold for removing long render durations which could be video pause.
+ && frameRenderDurationMs < mConfiguration.traceMaxFreezeDurationMs) {
+ triggerTraceWithThrottle(mTraceTriggerFn, mConfiguration, actualRenderTimeUs);
+ }
}
+
// Now that a frame has been rendered, the previously skipped frames can be processed as skipped
// frames since the app is not skipping them to terminate playback.
for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
@@ -738,4 +779,51 @@
return false;
}
+void VideoRenderQualityTracker::triggerTraceWithThrottle(const TraceTriggerFn traceTriggerFn,
+ const Configuration &c,
+ const int64_t triggerTimeUs) {
+ static int64_t lastTriggerUs = -1;
+ static Mutex updateLastTriggerLock;
+
+ Mutex::Autolock autoLock(updateLastTriggerLock);
+ if (lastTriggerUs != -1) {
+ int32_t sinceLastTriggerMs = int32_t((triggerTimeUs - lastTriggerUs) / 1000);
+ // Throttle the trace trigger calls to reduce continuous PID fork calls in a short time
+ // to impact device performance, and reduce spamming trace reports.
+ if (sinceLastTriggerMs < c.traceTriggerThrottleMs) {
+ ALOGI("Not triggering trace - not enough time since last trigger");
+ return;
+ }
+ }
+ lastTriggerUs = triggerTimeUs;
+ (*traceTriggerFn)();
+}
+
+void VideoRenderQualityTracker::triggerTrace() {
+ // Trigger perfetto to stop always-on-tracing (AOT) to collect trace into a file for video
+ // freeze event, the collected trace categories are configured by AOT.
+ static const char* args[] = {"/system/bin/trigger_perfetto",
+ "com.android.codec-video-freeze", NULL};
+
+ pid_t pid = fork();
+ if (pid < 0) {
+ ALOGI("Failed to fork for triggering trace");
+ } else if (pid == 0) {
+ // Child process.
+ ALOGI("Trigger trace %s", args[1]);
+ execvp(args[0], const_cast<char**>(args));
+ ALOGW("Failed to trigger trace %s", args[1]);
+ _exit(1);
+ } else {
+ // Parent process.
+ int status;
+ // Wait for the child process (pid) gets terminated, and allow the system to release
+ // the resource associated with the child. Or the child process will remain in a
+ // zombie state and get killed by llkd to cause foreground app crash.
+ if (waitpid(pid, &status, 0) < 0) {
+ ALOGW("Failed to waitpid for triggering trace");
+ }
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index a4b3e2f..e091cb8 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -286,9 +286,11 @@
return;
}
- // decoder deals in ms, OMX in us.
- outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp);
- mPvToOmxTimeMap.removeItem(timestamp);
+ if (mPvToOmxTimeMap.indexOfKey(timestamp) >= 0) {
+ // decoder deals in ms, OMX in us.
+ outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp);
+ mPvToOmxTimeMap.removeItem(timestamp);
+ }
inHeader->nOffset += bufferSize;
inHeader->nFilledLen = 0;
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index a464504..946d533 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -72,8 +72,8 @@
void setCrypto(const sp<ICrypto> &crypto) override;
void setDescrambler(const sp<IDescrambler> &descrambler) override;
- virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
- virtual status_t queueSecureInputBuffer(
+ status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+ status_t queueSecureInputBuffer(
const sp<MediaCodecBuffer> &buffer,
bool secure,
const uint8_t *key,
@@ -83,10 +83,10 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) override;
- virtual status_t attachBuffer(
+ status_t attachBuffer(
const std::shared_ptr<C2Buffer> &c2Buffer,
const sp<MediaCodecBuffer> &buffer) override;
- virtual status_t attachEncryptedBuffer(
+ status_t attachEncryptedBuffer(
const sp<hardware::HidlMemory> &memory,
bool secure,
const uint8_t *key,
@@ -98,12 +98,12 @@
size_t numSubSamples,
const sp<MediaCodecBuffer> &buffer,
AString* errorDetailMsg) override;
- virtual status_t renderOutputBuffer(
+ status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
- virtual void pollForRenderedBuffers() override;
- virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
- virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
- virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+ void pollForRenderedBuffers() override;
+ status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+ void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+ void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
// Methods below are interface for ACodec to use.
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
index 50fa258..46ee288 100644
--- a/media/libstagefright/include/media/stagefright/MediaHistogram.h
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -29,11 +29,11 @@
public:
MediaHistogram();
void clear();
- bool setup(int bucketCount, T width, T floor = 0);
+ bool setup(size_t bucketCount, T width, T floor = 0);
bool setup(const std::vector<T> &bucketLimits);
void insert(T sample);
- size_t size();
- int64_t operator[](int);
+ size_t size() const;
+ int64_t operator[](int) const;
T getMin() const { return mMin; }
T getMax() const { return mMax; }
T getCount() const { return mCount; }
@@ -45,7 +45,7 @@
private:
MediaHistogram(const MediaHistogram &); // disallow
- bool allocate(int bucketCount, bool withBucketLimits);
+ void allocate(size_t bucketCount, bool withBucketLimits);
T mFloor, mCeiling, mWidth;
T mMin, mMax, mSum;
@@ -73,13 +73,12 @@
}
template<typename T>
-bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+bool MediaHistogram<T>::setup(size_t bucketCount, T width, T floor) {
if (bucketCount <= 0 || width <= 0) {
return false;
}
- if (!allocate(bucketCount, false)) {
- return false;
- }
+ allocate(bucketCount, false);
+
mWidth = width;
mFloor = floor;
mCeiling = floor + bucketCount * width;
@@ -92,14 +91,14 @@
if (bucketLimits.size() <= 1) {
return false;
}
- int bucketCount = bucketLimits.size() - 1;
- if (!allocate(bucketCount, true)) {
- return false;
- }
+ // The floor is the first bucket limit value, so offset by 1
+ size_t bucketCount = bucketLimits.size() - 1;
+ allocate(bucketCount, true);
mWidth = -1;
mFloor = bucketLimits[0];
- for (int i = 0; i < bucketCount; ++i) {
+ for (size_t i = 0; i < bucketCount; ++i) {
+ // The floor is the first bucket, so offset by 1
mBucketLimits[i] = bucketLimits[i + 1];
}
mCeiling = bucketLimits[bucketCount];
@@ -108,7 +107,7 @@
}
template<typename T>
-bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+void MediaHistogram<T>::allocate(size_t bucketCount, bool withBucketLimits) {
assert(bucketCount > 0);
if (bucketCount != mBuckets.size()) {
mBuckets = std::vector<T>(bucketCount, 0);
@@ -116,7 +115,6 @@
if (withBucketLimits && mBucketLimits.size() != bucketCount) {
mBucketLimits = std::vector<T>(bucketCount, 0);
}
- return true;
}
template<typename T>
@@ -128,8 +126,8 @@
mCount++;
mSum += sample;
- if (mMin > sample) mMin = sample;
- if (mMax < sample) mMax = sample;
+ mMin = std::min(mMin, sample);
+ mMax = std::max(mMax, sample);
if (sample < mFloor) {
mBelow++;
@@ -138,7 +136,7 @@
} else if (mWidth == -1) {
// A binary search might be more efficient for large number of buckets, but it is expected
// that there will never be a large amount of buckets, so keep the code simple.
- for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+ for (size_t slot = 0; slot < mBucketLimits.size(); ++slot) {
if (sample < mBucketLimits[slot]) {
mBuckets[slot]++;
break;
@@ -153,12 +151,12 @@
}
template<typename T>
-size_t MediaHistogram<T>::size() {
+size_t MediaHistogram<T>::size() const {
return mBuckets.size() + 1;
}
template<typename T>
-int64_t MediaHistogram<T>::operator[](int i) {
+int64_t MediaHistogram<T>::operator[](int i) const {
assert(i >= 0);
assert(i <= mBuckets.size());
if (i == mBuckets.size()) {
@@ -179,7 +177,7 @@
} else {
ss << mFloor << "," << mWidth << "," << mBelow << "{";
}
- for (int i = 0; i < mBuckets.size(); i++) {
+ for (size_t i = 0; i < mBuckets.size(); i++) {
if (i != 0) {
ss << ",";
}
@@ -194,12 +192,12 @@
std::stringstream ss("");
if (mWidth == -1) {
ss << mFloor;
- for (int i = 0; i < mBucketLimits.size(); ++i) {
+ for (size_t i = 0; i < mBucketLimits.size(); ++i) {
ss << ',' << mBucketLimits[i];
}
} else {
ss << mFloor;
- for (int i = 1; i <= mBuckets.size(); ++i) {
+ for (size_t i = 1; i <= mBuckets.size(); ++i) {
ss << ',' << (mFloor + i * mWidth);
}
}
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index a656e6e..cf53f27 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -200,6 +200,21 @@
// The maximum distance in time between two judder occurrences such that both will be
// lumped into the same judder event.
int32_t judderEventDistanceToleranceMs;
+ //
+ // Whether or not Perfetto trace trigger is enabled.
+ bool traceTriggerEnabled;
+ //
+ // The throttle time for Perfetto trace trigger to avoid triggering multiple traces for
+ // the same event in a short time.
+ int32_t traceTriggerThrottleMs;
+ //
+ // The minimum frame render duration to recognize video freeze event to collect trace.
+ int32_t traceMinFreezeDurationMs;
+ //
+ // The maximum frame render duration to recognize video freeze event. A frame render
+ // duration that is larger than the max duration would not trigger trace collection for
+ // video freeze because it's highly possible a video pause.
+ int32_t traceMaxFreezeDurationMs;
};
struct FreezeEvent {
@@ -256,8 +271,11 @@
Details details;
};
+ typedef void (*TraceTriggerFn)();
+
VideoRenderQualityTracker();
- VideoRenderQualityTracker(const Configuration &configuration);
+ VideoRenderQualityTracker(const Configuration &configuration,
+ const TraceTriggerFn traceTriggerFn = nullptr);
// Called when a tunnel mode frame has been queued.
void onTunnelFrameQueued(int64_t contentTimeUs);
@@ -376,6 +394,14 @@
JudderEvent &e, const VideoRenderQualityMetrics & m,
const Configuration &c, JudderEvent *judderEventOut);
+ // Trigger trace collection for video freeze.
+ static void triggerTrace();
+
+ // Trigger collection of a Perfetto Always-On-Tracing (AOT) trace file for video freeze,
+ // triggerTimeUs is used as a throttle to avoid triggering multiple traces in a short time.
+ static void triggerTraceWithThrottle(TraceTriggerFn traceTriggerFn,
+ const Configuration &c, const int64_t triggerTimeUs);
+
// Check to see if a discontinuity has occurred by examining the content time and the
// app-desired render time. If so, reset some internal state.
bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
@@ -394,6 +420,9 @@
// Configurable elements of the metrics algorithms.
const Configuration mConfiguration;
+ // The function for triggering trace collection for video freeze.
+ const TraceTriggerFn mTraceTriggerFn;
+
// Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
VideoRenderQualityMetrics mMetrics;
@@ -445,6 +474,9 @@
// Frame durations derived from timestamps captured by the display subsystem, indicating the
// wall clock atime at which the frame is actually rendered.
FrameDurationUs mActualFrameDurationUs;
+
+ // Token of async atrace for video frame dropped/skipped by the app.
+ int64_t mTraceFrameSkippedToken= -1;
};
} // namespace android
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c82a303..959f43e 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -102,6 +102,21 @@
namespace android {
+static bool isValidOmxParamSize(const void *params, OMX_U32 size) {
+ // expect the vector to contain at least the size and version, two OMX_U32 entries.
+ if (size < 2 * sizeof(OMX_U32)) {
+ return false;
+ }
+
+ // expect the vector to be as large as the declared size
+ OMX_U32 *buf = (OMX_U32 *)params;
+ OMX_U32 declaredSize = *(OMX_U32*)buf;
+ if (declaredSize > size) {
+ return false;
+ }
+ return true;
+}
+
struct BufferMeta {
explicit BufferMeta(
const sp<IMemory> &mem, const sp<IHidlMemory> &hidlMemory,
@@ -688,6 +703,18 @@
status_t OMXNodeInstance::getParameter(
OMX_INDEXTYPE index, void *params, size_t size) {
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ if (extIndex == OMX_IndexParamConsumerUsageBits) {
+ // expect the size to be 4 bytes for OMX_IndexParamConsumerUsageBits
+ if (size != sizeof(OMX_U32)) {
+ return BAD_VALUE;
+ }
+ } else {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -699,7 +726,6 @@
}
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
- OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
// some errors are expected for getParameter
if (err != OMX_ErrorNoMore) {
CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
@@ -710,6 +736,10 @@
status_t OMXNodeInstance::setParameter(
OMX_INDEXTYPE index, const void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -736,6 +766,9 @@
status_t OMXNodeInstance::getConfig(
OMX_INDEXTYPE index, void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -759,6 +792,10 @@
status_t OMXNodeInstance::setConfig(
OMX_INDEXTYPE index, const void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index e853da9..4183023 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -616,6 +616,10 @@
DescribeHDR10PlusInfoParams* outParams =
(DescribeHDR10PlusInfoParams *)params;
+ if (!isValidOMXParam(outParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
outParams->nParamSizeUsed = info->size();
// If the buffer provided by the client does not have enough
@@ -694,6 +698,10 @@
const DescribeHDR10PlusInfoParams* inParams =
(DescribeHDR10PlusInfoParams *)params;
+ if (!isValidOMXParam(inParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (*frameConfig) {
// This is a request to append to the current frame config set.
// For now, we only support kDescribeHdr10PlusInfoIndex, which
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
new file mode 100644
index 0000000..a2791ba
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -0,0 +1,89 @@
+/*
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at:
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+cc_defaults {
+ name: "libstagefright_rtsp_fuzzer_defaults",
+ shared_libs: [
+ "liblog",
+ "libmedia",
+ "libutils",
+ "libstagefright_foundation",
+ ],
+ static_libs: [
+ "libdatasource",
+ "libstagefright_rtsp",
+ ],
+ header_libs: [
+ "libstagefright_rtsp_headers",
+ ],
+ fuzz_config:{
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_fuzz {
+ name: "sdploader_fuzzer",
+ srcs: [
+ "sdploader_fuzzer.cpp",
+ ],
+ defaults: [
+ "libstagefright_rtsp_fuzzer_defaults",
+ ]
+}
+
+cc_fuzz {
+ name: "rtp_writer_fuzzer",
+ srcs: [
+ "rtp_writer_fuzzer.cpp",
+ ],
+ defaults: [
+ "libstagefright_rtsp_fuzzer_defaults",
+ ],
+ shared_libs:[
+ "libandroid_net",
+ "libbase",
+ "libstagefright",
+ "libcutils",
+ ],
+}
+
+cc_fuzz {
+ name: "packet_source_fuzzer",
+ srcs: [
+ "packet_source_fuzzer.cpp",
+ ],
+ defaults: [
+ "libstagefright_rtsp_fuzzer_defaults",
+ ],
+}
+
+cc_fuzz {
+ name: "rtsp_connection_fuzzer",
+ srcs: [
+ "rtsp_connection_fuzzer.cpp",
+ ],
+ shared_libs: [
+ "libcrypto",
+ "libcutils",
+ "libnetd_client",
+ ],
+ defaults: [
+ "libstagefright_rtsp_fuzzer_defaults",
+ ],
+}
diff --git a/media/libstagefright/rtsp/fuzzer/README.md b/media/libstagefright/rtsp/fuzzer/README.md
new file mode 100644
index 0000000..bc7be29
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/README.md
@@ -0,0 +1,117 @@
+# Fuzzers for libstagefright_rtsp
+
+## Table of contents
++ [sdploader_fuzzer](#SDPLoader)
++ [rtp_writer_fuzzer](#ARTPWriter)
++ [packet_source_fuzzer](#packetSource)
++ [rtsp_connection_fuzzer](#ARTSPConnection)
+
+# <a name="SDPLoader"></a> Fuzzer for SDPLoader
+
+SDPLoader supports the following parameters:
+1. Flag (parameter name: "flags")
+2. URL (parameter name: "url")
+3. Header (parameter name: "headers")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`flags`| `UINT32_MIN` to `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`url`| `String` |Value obtained from FuzzedDataProvider|
+|`headers`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) sdploader_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/sdploader_fuzzer/sdploader_fuzzer
+```
+
+# <a name="ARTPWriter"></a> Fuzzer for ARTPWriter
+
+ARTPWriter supports the following parameters:
+1. File descriptor (parameter name: "fd")
+2. Local Ip (parameter name: "localIp")
+3. Local Port (parameter name: "localPort")
+4. Remote Ip (parameter name: "remoteIp")
+5. Remote Port (parameter name: "remotePort")
+6. Sequence No (parameter name: "seqNo")
+7. OpponentID (parameter name: "opponentID")
+8. Bit Rate (parameter name: "bitrate")
+9. kKeyMIMETypeArray (parameter name: "mimeType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`localIp`| `String` |Value obtained from FuzzedDataProvider|
+|`localPort`| `UINT32_MIN` to `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`remoteIp`| `String` |Value obtained from FuzzedDataProvider|
+|`remotePort`| `UINT32_MIN` to `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`seqNo`| `0` to `10000000` |Value obtained from FuzzedDataProvider|
+|`opponentID`| `UINT32_MIN` to `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`bitrate`| `UINT32_MIN` to `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`mimeType`| 0. `MEDIA_MIMETYPE_VIDEO_AVC`<br> 1. `MEDIA_MIMETYPE_VIDEO_HEVC`<br> 2. `MEDIA_MIMETYPE_VIDEO_H263`<br> 3. `MEDIA_MIMETYPE_AUDIO_AMR_NB`<br> 4. `MEDIA_MIMETYPE_AUDIO_AMR_WB`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) rtp_writer_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/rtp_writer_fuzzer/rtp_writer_fuzzer
+```
+
+# <a name="packetSource"></a> Fuzzer for PacketSource
+
+ PacketSource supports the following parameters:
+1. Codec (parameter name: "kCodecs")
+2. Format (parameter name: "kFmtp")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kCodecs`| 0. `opus`<br/>1. `ISAC`<br/>2. `VP8`<br/>3. `google-data`<br/>4. `G722`<br/>5. `PCMU`<br/>6. `PCMA`<br/>7. `CN`<br/>8. `telephone-event`<br/>9. `VP9`<br/>10. `red`<br/>11. `ulpfec`<br/>12. `rtx`<br/>13. `H264`<br/>14. `iLBC`<br/>15. `H261`<br/>16. `MPV`<br/>17. `H263`<br/>18. `AMR`<br/>19. `AC3`<br/>20. `G723`<br/>21. `G729A`<br/>22. `MP4V-ES`<br/>23. `H265`<br/>24. `H263-2000`<br/>25. `H263-1998`<br/>26. `AMR-WB`<br/>27. `MP4A-LATM`<br/>28. `MP2T`<br/>29. `mpeg4-generic` |Value obtained from FuzzedDataProvider|
+|`kFmtp`| <br/>0. `br=`<br/>1. `bw=`<br/>2. `ch-aw-recv=`<br/>3. `mode-change-capability=`<br/>4. `max-red =`<br/>5. `octet-align=`<br/>6. `mode-change-capability=`<br/>7. `profile-level-id=`<br/>8. `packetization-mode=`<br/>9. `profile=`<br/>10. `level=` <br/>11. `apt=`<br/>12. `annexb=`<br/>13. `protocol=`<br/>14. `config=`<br/>15. `streamtype=`<br/>16. `mode=`<br/>17. `sizelength=`<br/>18. `indexlength=`<br/>19. `indexdeltalength=`<br/>20. `minptime=`<br/>21. `useinbandfec=`<br/>22. `maxplaybackrate=`<br/>23. `stereo=`<br/>24. `level-asymmetry-allowed=`<br/>25. `max-fs=`<br/>26. `max-fr=`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) packet_source_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/packet_source_fuzzer/packet_source_fuzzer
+```
+
+# <a name="ARTSPConnection"></a> Fuzzer for ARTSPConnection
+
+## Design Considerations
+This fuzzer aims at covering ARTSPConnection.cpp. A server is implemented in the fuzzer. After accepting a connect request, the server accepts the connections and handles them in a seperate thread. The threads are maintained in a ThreadPool which limits the maximum number of threads alive at a time. When the fuzzer process ends, all the threads in the ThreadPool are joined to the main thread.
+The inputs to the server are generated using FuzzedDataProvider and stored in a variable 'mFuzzData'. As this variable is shared among multiple threads, mutex is used to ensure synchronization.
+### Fuzzer Inputs:
+The inputs generated in the fuzzer using FuzzzedDataProvider have been randomized as much as possible. Due to the constraints in the module source code, the inputs have to be limited and arranged in some specific format.
+
+ARTSPConnection supports the following parameters:
+1. Authentication Type (parameter name: "kAuthType")
+2. FuzzData (parameter name: "mFuzzData")
+3. RequestData (parameter name: "mFuzzRequestData")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kAuthType`| 0.`Basic`<br/>1.`Digest`|Value obtained from FuzzedDataProvider|
+|`mFuzzData`| `String` |Value obtained from FuzzedDataProvider|
+|`mFuzzRequestData`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) rtsp_connection_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/rtsp_connection_fuzzer/rtsp_connection_fuzzer
diff --git a/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
new file mode 100644
index 0000000..a3d7535
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
+
+using namespace android;
+
+static constexpr int32_t kMinValue = 0;
+static constexpr int32_t kMaxIPAddress = 255;
+static constexpr int32_t kMaxFmt = 255;
+static constexpr int32_t kMinAPICase = 0;
+static constexpr int32_t kMaxPacketSourceAPI = 5;
+static constexpr size_t kMinIndex = 1;
+static constexpr size_t kMaxCodecConfigs = 4;
+
+std::string kCodecs[] = {"opus", "ISAC", "VP8",
+ "google-data", "G722", "PCMU",
+ "PCMA", "CN", "telephone-event",
+ "VP9", "red", "ulpfec",
+ "rtx", "H264", "iLBC",
+ "H261", "MPV", "H263",
+ "AMR", "AC3", "G723",
+ "G729A", "H264", "MP4V-ES",
+ "H265", "H263-2000", "H263-1998",
+ "AMR", "AMR-WB", "MP4A-LATM",
+ "MP2T", "mpeg4-generic"};
+
+std::string kFmtp[] = {"br=",
+ "bw=",
+ "ch-aw-recv=",
+ "mode-change-capability=",
+ "max-red =",
+ "octet-align=",
+ "mode-change-capability=",
+ "max-red=",
+ "profile-level-id=",
+ "packetization-mode=",
+ "profile=",
+ "level=",
+ "apt=",
+ "annexb=",
+ "protocol=",
+ "streamtype=",
+ "mode=",
+ "sizelength=",
+ "indexlength=",
+ "indexdeltalength=",
+ "minptime=",
+ "useinbandfec=",
+ "maxplaybackrate=",
+ "stereo=",
+ "level-asymmetry-allowed=",
+ "max-fs=",
+ "max-fr="};
+
+std::string kCodecConfigString[kMaxCodecConfigs][2] = {{"H264", "profile-level-id="},
+ {"MP4A-LATM", "config="},
+ {"MP4V-ES", "config="},
+ {"mpeg4-generic", "mode="}};
+
+class ASessionPacketFuzzer {
+ public:
+ ASessionPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+
+ private:
+ FuzzedDataProvider mFdp;
+};
+
+bool checkFormatSupport(const std::string& codec, const std::string& format) {
+ for (int i = 0; i < kMaxCodecConfigs; ++i) {
+ if (codec == kCodecConfigString[i][0]) {
+ if (format == kCodecConfigString[i][1]) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+void ASessionPacketFuzzer::process() {
+ AString inputString;
+ const sp<ASessionDescription> sessionPacket = sp<ASessionDescription>::make();
+ std::string codec = mFdp.PickValueInArray(kCodecs);
+ std::string ipAddress =
+ std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+ std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+ std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." + "0";
+ std::string format = mFdp.PickValueInArray(kFmtp);
+ std::string fmptStr = format + std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt)) +
+ ";" + mFdp.PickValueInArray(kFmtp) +
+ std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt));
+ sessionPacket->SDPStringFactory(
+ inputString, ipAddress.c_str() /* ip */, mFdp.ConsumeBool() /* isAudio */,
+ mFdp.ConsumeIntegral<unsigned int>() /* port */,
+ mFdp.ConsumeIntegral<unsigned int>() /* payloadType */,
+ mFdp.ConsumeIntegral<unsigned int>() /* as */, codec.c_str(), /* codec */
+ fmptStr.c_str() /* fmtp */, mFdp.ConsumeIntegral<int32_t>() /* width */,
+ mFdp.ConsumeIntegral<int32_t>() /* height */,
+ mFdp.ConsumeIntegral<int32_t>() /* cvoExtMap */);
+ sessionPacket->setTo(inputString.c_str(), inputString.size());
+ size_t trackSize = sessionPacket->countTracks();
+ AString desc = nullptr;
+ while (mFdp.remaining_bytes()) {
+ int32_t packetSourceAPI =
+ mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxPacketSourceAPI);
+ switch (packetSourceAPI) {
+ case 0: {
+ unsigned long payload = 0;
+ AString params = nullptr;
+ sessionPacket->getFormatType(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+ &payload, &desc, ¶ms);
+ break;
+ }
+ case 1: {
+ int32_t width, height;
+ unsigned long payload = mFdp.ConsumeIntegral<unsigned long>();
+ sessionPacket->getDimensions(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+ payload, &width, &height);
+ break;
+ }
+ case 2: {
+ int32_t cvoExtMap = mFdp.ConsumeIntegral<int32_t>();
+ sessionPacket->getCvoExtMap(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+ &cvoExtMap);
+ break;
+ }
+ case 3: {
+ int64_t durationUs = mFdp.ConsumeIntegral<int64_t>();
+ sessionPacket->getDurationUs(&durationUs);
+ break;
+ }
+ case 4: {
+ int32_t timeScale, numChannels;
+ if (desc != nullptr) {
+ sessionPacket->ParseFormatDesc(desc.c_str(), &timeScale, &numChannels);
+ }
+ break;
+ }
+ case 5: {
+ if (checkFormatSupport(codec, format)) {
+ sp<APacketSource> packetSource = sp<APacketSource>::make(
+ sessionPacket, mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1));
+ }
+ break;
+ }
+ }
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ ASessionPacketFuzzer packetSourceFuzzer(data, size);
+ packetSourceFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
new file mode 100644
index 0000000..8d9f923
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/ARTPWriter.h>
+
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 65536;
+constexpr int32_t kMaxTime = 1000;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kAMRNBFrameSizes[] = {13, 14, 16, 18, 20, 21, 27, 32};
+constexpr int32_t kAMRWBFrameSizes[] = {18, 24, 33, 37, 41, 47, 51, 59, 61};
+constexpr int32_t kAMRIndexOffset = 8;
+
+using namespace android;
+
+const char* kKeyMimeTypeArray[] = {MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_HEVC,
+ MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ MEDIA_MIMETYPE_AUDIO_AMR_WB};
+
+struct TestMediaSource : public MediaSource {
+ public:
+ TestMediaSource(FuzzedDataProvider& mFdp) : mTestMetaData(new MetaData) {
+ int32_t vectorSize = 0;
+ mAllowRead = mFdp.ConsumeBool();
+ mKeySps = mFdp.ConsumeIntegral<int32_t>();
+ mKeyVps = mFdp.ConsumeIntegral<int32_t>();
+ mKeyPps = mFdp.ConsumeIntegral<int32_t>();
+ mKeyTime = mFdp.ConsumeIntegralInRange<int64_t>(kMinSize, kMaxTime);
+
+ mMimeType = mFdp.PickValueInArray(kKeyMimeTypeArray);
+ mTestMetaData->setCString(kKeyMIMEType, mMimeType);
+ if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+ int32_t index =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRNBFrameSizes) - 1);
+ vectorSize = kAMRNBFrameSizes[index];
+ mData.push_back(kAMRIndexOffset * index);
+ } else if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+ int32_t index =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRWBFrameSizes) - 1);
+ vectorSize = kAMRWBFrameSizes[index];
+ mData.push_back(kAMRIndexOffset * index);
+ } else if (mMimeType == MEDIA_MIMETYPE_VIDEO_H263) {
+ // Required format for H263 media data
+ mData.push_back(0);
+ mData.push_back(0);
+ vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+ } else {
+ vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+ }
+ for (size_t idx = mData.size(); idx < vectorSize; ++idx) {
+ mData.push_back(mFdp.ConsumeIntegral<uint8_t>());
+ }
+ }
+ virtual status_t start(MetaData* /*params*/) { return OK; }
+ virtual status_t stop() { return OK; }
+ virtual sp<MetaData> getFormat() { return mTestMetaData; }
+ virtual status_t read(MediaBufferBase** buffer, const ReadOptions* /*options*/) {
+ if (!mAllowRead) {
+ return -1;
+ }
+ *buffer = new MediaBuffer(mData.data() /*data*/, mData.size() /*size*/);
+ if (mKeySps) {
+ (*buffer)->meta_data().setInt32(kKeySps, mKeySps);
+ }
+ if (mKeyVps) {
+ (*buffer)->meta_data().setInt32(kKeyVps, mKeyVps);
+ }
+ if (mKeyPps) {
+ (*buffer)->meta_data().setInt32(kKeyPps, mKeyPps);
+ }
+ (*buffer)->meta_data().setInt64(kKeyTime, mKeyTime);
+ return OK;
+ }
+
+ private:
+ int32_t mKeySps;
+ int32_t mKeyVps;
+ int32_t mKeyPps;
+ int64_t mKeyTime;
+ bool mAllowRead;
+ const char* mMimeType;
+ sp<MetaData> mTestMetaData;
+ std::vector<uint8_t> mData;
+};
+
+class ARTPWriterFuzzer {
+ public:
+ ARTPWriterFuzzer(const uint8_t* data, size_t size)
+ : mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)), mFdp(data, size) {}
+ ~ARTPWriterFuzzer() { close(mDataSourceFd); }
+ void process();
+
+ private:
+ void createARTPWriter();
+ const int32_t mDataSourceFd;
+ FuzzedDataProvider mFdp;
+ sp<ARTPWriter> mArtpWriter;
+};
+
+void ARTPWriterFuzzer::createARTPWriter() {
+ String8 localIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+ String8 remoteIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+ mArtpWriter = sp<ARTPWriter>::make(
+ mDataSourceFd, localIp, mFdp.ConsumeIntegral<uint16_t>() /* localPort */, remoteIp,
+ mFdp.ConsumeIntegral<uint16_t>() /* remotePort */,
+ mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize) /* seqNo */);
+}
+
+void ARTPWriterFuzzer::process() {
+ if (mFdp.ConsumeBool()) {
+ mArtpWriter = sp<ARTPWriter>::make(mDataSourceFd);
+ if (mArtpWriter->getSequenceNum() > kMaxSize) {
+ createARTPWriter();
+ }
+ } else {
+ createARTPWriter();
+ }
+
+ mArtpWriter->addSource(sp<TestMediaSource>::make(mFdp) /* source */);
+
+ while (mFdp.remaining_bytes()) {
+ auto invokeRTPWriterFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() {
+ sp<MetaData> metaData = sp<MetaData>::make();
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt32(kKeySelfID, mFdp.ConsumeIntegral<int32_t>());
+ }
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt32(kKeyPayloadType, mFdp.ConsumeIntegral<int32_t>());
+ }
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt32(kKeyRtpExtMap, mFdp.ConsumeIntegral<int32_t>());
+ }
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt32(kKeyRtpCvoDegrees, mFdp.ConsumeIntegral<int32_t>());
+ }
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt32(kKeyRtpDscp, mFdp.ConsumeIntegral<int32_t>());
+ }
+ if (mFdp.ConsumeBool()) {
+ metaData->setInt64(kKeySocketNetwork, mFdp.ConsumeIntegral<int64_t>());
+ }
+ mArtpWriter->start(metaData.get() /*param*/);
+ },
+ [&]() {
+ mArtpWriter->setTMMBNInfo(mFdp.ConsumeIntegral<uint32_t>() /* opponentID */,
+ mFdp.ConsumeIntegral<uint32_t>() /* bitrate */);
+ },
+ [&]() { mArtpWriter->stop(); },
+ [&]() {
+ mArtpWriter->updateCVODegrees(mFdp.ConsumeIntegral<int32_t>() /* cvoDegrees */);
+ },
+ [&]() {
+ mArtpWriter->updatePayloadType(
+ mFdp.ConsumeIntegral<int32_t>() /* payloadType */);
+ },
+
+ });
+ invokeRTPWriterFuzzer();
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ ARTPWriterFuzzer artpWriterFuzzer(data, size);
+ artpWriterFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
new file mode 100644
index 0000000..51c423e
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
@@ -0,0 +1,397 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arpa/inet.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/rtsp/ARTSPConnection.h>
+#include <thread>
+
+using namespace android;
+
+const std::string kAuthType[] = {"Basic", "Digest"};
+const std::string kTab = "\t";
+const std::string kCSeq = "CSeq: ";
+const std::string kSpace = " ";
+const std::string kNewLine = "\n";
+const std::string kBinaryHeader = "$";
+const std::string kNonce = " nonce=\"\"";
+const std::string kRealm = " realm=\"\"";
+const std::string kHeaderBoundary = "\r\n\r\n";
+const std::string kContentLength = "content-length: ";
+const std::string kDefaultRequestValue = "INVALID_FORMAT";
+const std::string kUrlPrefix = "rtsp://root:pass@127.0.0.1:";
+const std::string kRequestMarker = "REQUEST_SENT";
+const std::string kQuitResponse = "\n\n\n\n";
+const std::string kRTSPVersion = "RTSP/1.0";
+const std::string kValidResponse = kRTSPVersion + " 200 \n";
+const std::string kAuthString = kRTSPVersion + " 401 \nwww-authenticate: ";
+constexpr char kNullValue = '\0';
+constexpr char kDefaultValue = '0';
+constexpr int32_t kWhat = 'resp';
+constexpr int32_t kMinPort = 100;
+constexpr int32_t kMaxPort = 999;
+constexpr int32_t kMinASCIIValue = 32;
+constexpr int32_t kMaxASCIIValue = 126;
+constexpr int32_t kMinContentLength = 0;
+constexpr int32_t kMaxContentLength = 1000;
+constexpr int32_t kBinaryVectorSize = 3;
+constexpr int32_t kDefaultCseqValue = 1;
+constexpr int32_t kBufferSize = 1024;
+constexpr int32_t kMaxLoopRuns = 5;
+constexpr int32_t kPort = 554;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kMaxThreads = 1024;
+
+struct FuzzAHandler : public AHandler {
+ public:
+ FuzzAHandler(std::function<void()> signalEosFunction)
+ : mSignalEosFunction(std::move(signalEosFunction)) {}
+ ~FuzzAHandler() = default;
+
+ protected:
+ void onMessageReceived(const sp<AMessage>& msg) override {
+ switch (msg->what()) {
+ case kWhat: {
+ mSignalEosFunction();
+ break;
+ }
+ }
+ }
+
+ private:
+ std::function<void()> mSignalEosFunction;
+};
+
+class RTSPConnectionFuzzer {
+ public:
+ RTSPConnectionFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ ~RTSPConnectionFuzzer() {
+ // wait for all the threads to join the main thread
+ for (auto& thread : mThreadPool) {
+ if (thread.joinable()) {
+ thread.join();
+ }
+ }
+ close(mServerFd);
+ }
+ void process();
+
+ private:
+ void signalEos();
+ void startServer();
+ void createFuzzData();
+ void acceptConnection();
+ void handleConnection(int32_t);
+ void handleClientResponse(int32_t);
+ void sendValidResponse(int32_t, int32_t);
+ int32_t checkSocket(int32_t);
+ size_t generateBinaryDataSize(std::string);
+ bool checkValidRequestData(const AString&);
+ bool mEosReached = false;
+ bool mServerFailure = false;
+ bool mNotifyResponseListener = false;
+ int32_t mServerFd;
+ std::string mFuzzData = "";
+ std::string mFuzzRequestData = "";
+ std::string mRequestData = kDefaultRequestValue;
+ std::mutex mFuzzDataMutex;
+ std::mutex mMsgPostCompleteMutex;
+ std::condition_variable mConditionalVariable;
+ std::vector<std::thread> mThreadPool;
+ FuzzedDataProvider mFdp;
+};
+
+size_t RTSPConnectionFuzzer::generateBinaryDataSize(std::string values) {
+ // computed the binary data size as done in ARTSPConnection.cpp
+ uint8_t x = values[0];
+ uint8_t y = values[1];
+ return x << 8 | y;
+}
+
+bool RTSPConnectionFuzzer::checkValidRequestData(const AString& request) {
+ if (request.find(kHeaderBoundary.c_str()) <= 0) {
+ return false;
+ }
+ ssize_t space = request.find(kSpace.c_str());
+ if (space <= 0) {
+ return false;
+ }
+ if (request.find(kSpace.c_str(), space + 1) <= 0) {
+ return false;
+ }
+ return true;
+}
+
+void RTSPConnectionFuzzer::createFuzzData() {
+ std::unique_lock fuzzLock(mFuzzDataMutex);
+ mFuzzData = "";
+ mFuzzRequestData = "";
+ int32_t contentLength = 0;
+ if (mFdp.ConsumeBool()) {
+ if (mFdp.ConsumeBool()) {
+ // if we want to handle server request
+ mFuzzData.append(kSpace + kSpace + kRTSPVersion);
+ } else {
+ // if we want to notify response listener
+ mFuzzData.append(
+ kRTSPVersion + kSpace +
+ std::to_string(mFdp.ConsumeIntegralInRange<uint16_t>(kMinPort, kMaxPort)) +
+ kSpace);
+ }
+ mFuzzData.append(kNewLine);
+ if (mFdp.ConsumeBool()) {
+ contentLength =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinContentLength, kMaxContentLength);
+ mFuzzData.append(kContentLength + std::to_string(contentLength) + kNewLine);
+ if (mFdp.ConsumeBool()) {
+ mFdp.ConsumeBool() ? mFuzzData.append(kSpace + kNewLine)
+ : mFuzzData.append(kTab + kNewLine);
+ }
+ }
+ // new line to break out of infinite for loop
+ mFuzzData.append(kNewLine);
+ if (contentLength) {
+ std::string contentData = mFdp.ConsumeBytesAsString(contentLength);
+ contentData.resize(contentLength, kDefaultValue);
+ mFuzzData.append(contentData);
+ }
+ } else {
+ // for binary data
+ std::string randomValues(kBinaryVectorSize, kNullValue);
+ for (size_t idx = 0; idx < kBinaryVectorSize; ++idx) {
+ randomValues[idx] =
+ (char)mFdp.ConsumeIntegralInRange<uint8_t>(kMinASCIIValue, kMaxASCIIValue);
+ }
+ size_t binaryDataSize = generateBinaryDataSize(randomValues);
+ std::string data = mFdp.ConsumeBytesAsString(binaryDataSize);
+ data.resize(binaryDataSize, kDefaultValue);
+ mFuzzData.append(kBinaryHeader + randomValues + data);
+ }
+ if (mFdp.ConsumeBool()) {
+ mRequestData = mFdp.ConsumeRandomLengthString(kMaxBytes) + kSpace + kSpace +
+ kHeaderBoundary + mFdp.ConsumeRandomLengthString(kMaxBytes);
+ // Check if Request data is valid
+ if (checkValidRequestData(mRequestData.c_str())) {
+ if (mFdp.ConsumeBool()) {
+ if (mFdp.ConsumeBool()) {
+ // if we want to handle server request
+ mFuzzRequestData.append(kSpace + kSpace + kRTSPVersion + kNewLine);
+ } else {
+ // if we want to add authentication headers
+ mNotifyResponseListener = true;
+ mFuzzRequestData.append(kAuthString);
+ if (mFdp.ConsumeBool()) {
+ // for Authentication type: Basic
+ mFuzzRequestData.append(kAuthType[0]);
+ } else {
+ // for Authentication type: Digest
+ mFuzzRequestData.append(kAuthType[1]);
+ mFuzzRequestData.append(kNonce);
+ mFuzzRequestData.append(kRealm);
+ }
+ mFuzzRequestData.append(kNewLine);
+ }
+ } else {
+ mNotifyResponseListener = false;
+ mFuzzRequestData.append(kValidResponse);
+ }
+ } else {
+ mRequestData = kDefaultRequestValue;
+ }
+ } else {
+ mRequestData = kDefaultRequestValue;
+ mFuzzData.append(kNewLine);
+ }
+}
+
+void RTSPConnectionFuzzer::signalEos() {
+ mEosReached = true;
+ mConditionalVariable.notify_all();
+ return;
+}
+
+int32_t RTSPConnectionFuzzer::checkSocket(int32_t newSocket) {
+ struct timeval tv;
+ tv.tv_sec = 1;
+ tv.tv_usec = 0;
+
+ fd_set rs;
+ FD_ZERO(&rs);
+ FD_SET(newSocket, &rs);
+
+ return select(newSocket + 1, &rs, nullptr, nullptr, &tv);
+}
+
+void RTSPConnectionFuzzer::sendValidResponse(int32_t newSocket, int32_t cseq = -1) {
+ std::string validResponse = kValidResponse;
+ if (cseq != -1) {
+ validResponse.append(kCSeq + std::to_string(cseq));
+ validResponse.append(kNewLine + kNewLine);
+ } else {
+ validResponse.append(kNewLine);
+ }
+ send(newSocket, validResponse.c_str(), validResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::handleClientResponse(int32_t newSocket) {
+ char buffer[kBufferSize] = {0};
+ if (checkSocket(newSocket) == 1) {
+ read(newSocket, buffer, kBufferSize);
+ }
+}
+
+void RTSPConnectionFuzzer::handleConnection(int32_t newSocket) {
+ std::unique_lock fuzzLock(mFuzzDataMutex);
+ send(newSocket, mFuzzData.c_str(), mFuzzData.size(), 0);
+ if (mFuzzData[0] == kSpace[0]) {
+ handleClientResponse(newSocket);
+ }
+
+ if (mFuzzRequestData != "") {
+ char buffer[kBufferSize] = {0};
+ if (checkSocket(newSocket) == 1 && recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+ // Extract the 'CSeq' value present at the end of header
+ std::string clientResponse(buffer);
+ std::string header = clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+ char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+ int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+ std::string response = mFuzzRequestData;
+ response.append(kCSeq + std::to_string(cseqValue));
+ response.append(kNewLine + kNewLine);
+ send(newSocket, response.data(), response.length(), 0);
+
+ if (!mNotifyResponseListener) {
+ char buffer[kBufferSize] = {0};
+ if (checkSocket(newSocket) == 1) {
+ if (recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+ // Extract the 'CSeq' value present at the end of header
+ std::string clientResponse(buffer);
+ std::string header =
+ clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+ char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+ int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+ sendValidResponse(newSocket, cseqValue);
+ } else {
+ sendValidResponse(newSocket);
+ }
+ }
+ }
+ } else {
+ // If no data to read, then send a valid response
+ // to release the mutex lock in fuzzer
+ sendValidResponse(newSocket);
+ }
+ }
+ send(newSocket, kQuitResponse.c_str(), kQuitResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::startServer() {
+ signal(SIGPIPE, SIG_IGN);
+ mServerFd = socket(AF_INET, SOCK_STREAM | SOCK_CLOEXEC, 0);
+ struct sockaddr_in serverAddress;
+ serverAddress.sin_family = AF_INET;
+ serverAddress.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+ serverAddress.sin_port = htons(kPort);
+
+ // Get rid of "Address in use" error
+ int32_t opt = 1;
+ if (setsockopt(mServerFd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt))) {
+ mServerFailure = true;
+ }
+
+ // Bind the socket and set for listening.
+ if (bind(mServerFd, (struct sockaddr*)(&serverAddress), sizeof(serverAddress)) < 0) {
+ mServerFailure = true;
+ }
+
+ if (listen(mServerFd, 5) < 0) {
+ mServerFailure = true;
+ }
+}
+
+void RTSPConnectionFuzzer::acceptConnection() {
+ int32_t clientFd = accept4(mServerFd, nullptr, nullptr, SOCK_CLOEXEC);
+ handleConnection(clientFd);
+ close(clientFd);
+}
+
+void RTSPConnectionFuzzer::process() {
+ startServer();
+ if (mServerFailure) {
+ return;
+ }
+ sp<ALooper> looper = sp<ALooper>::make();
+ sp<FuzzAHandler> handler =
+ sp<FuzzAHandler>::make(std::bind(&RTSPConnectionFuzzer::signalEos, this));
+ sp<ARTSPConnection> rtspConnection =
+ sp<ARTSPConnection>::make(mFdp.ConsumeBool(), mFdp.ConsumeIntegral<uint64_t>());
+ looper->start();
+ looper->registerHandler(rtspConnection);
+ looper->registerHandler(handler);
+ sp<AMessage> replymsg = sp<AMessage>::make(kWhat, handler);
+ std::string url = kUrlPrefix + std::to_string(kPort) + "/";
+
+ while (mFdp.remaining_bytes() && mThreadPool.size() < kMaxThreads) {
+ createFuzzData();
+ mThreadPool.push_back(std::thread(&RTSPConnectionFuzzer::acceptConnection, this));
+ if (mFdp.ConsumeBool()) {
+ rtspConnection->observeBinaryData(replymsg);
+ }
+
+ {
+ rtspConnection->connect(url.c_str(), replymsg);
+ std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+ mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+ if (mEosReached == true) {
+ mEosReached = false;
+ return true;
+ }
+ return mEosReached;
+ });
+ }
+
+ if (mRequestData != kDefaultRequestValue) {
+ rtspConnection->sendRequest(mRequestData.c_str(), replymsg);
+ std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+ mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+ if (mEosReached == true) {
+ mEosReached = false;
+ return true;
+ }
+ return mEosReached;
+ });
+ }
+
+ if (mFdp.ConsumeBool()) {
+ rtspConnection->disconnect(replymsg);
+ std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+ mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+ if (mEosReached == true) {
+ mEosReached = false;
+ return true;
+ }
+ return mEosReached;
+ });
+ }
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ RTSPConnectionFuzzer rtspFuzz(data, size);
+ rtspFuzz.process();
+ return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
new file mode 100644
index 0000000..748e5b6
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <datasource/HTTPBase.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+
+using namespace android;
+
+constexpr int32_t kMinCapacity = 0;
+constexpr int32_t kMaxCapacity = 1000;
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 128;
+enum { kWhatLoad = 'load' };
+
+struct FuzzAHandler : public AHandler {
+ public:
+ FuzzAHandler(std::function<void()> signalEosFunction) : mSignalEosFunction(signalEosFunction) {}
+
+ protected:
+ void onMessageReceived(const sp<AMessage>& msg) override {
+ switch (msg->what()) {
+ case kWhatLoad: {
+ mSignalEosFunction();
+ break;
+ }
+ }
+ return;
+ }
+
+ private:
+ std::function<void()> mSignalEosFunction;
+};
+
+struct FuzzMediaHTTPConnection : public MediaHTTPConnection {
+ public:
+ FuzzMediaHTTPConnection(FuzzedDataProvider* fdp) : mFdp(fdp) {
+ mSize = mFdp->ConsumeIntegralInRange(kMinCapacity, kMaxCapacity);
+ mData = mFdp->ConsumeBytes<uint8_t>(mSize);
+ mSize = mData.size();
+ }
+ virtual bool connect(const char* /* uri */,
+ const KeyedVector<String8, String8>* /* headers */) {
+ return mFdp->ConsumeBool();
+ }
+ virtual void disconnect() { return; }
+ virtual ssize_t readAt(off64_t offset, void* data, size_t size) {
+ if ((size + offset <= mData.size()) && (offset >= 0)) {
+ memcpy(data, mData.data() + offset, size);
+ return size;
+ }
+ return 0;
+ }
+ virtual off64_t getSize() { return mSize; }
+ virtual status_t getMIMEType(String8* /*mimeType*/) {return mFdp->ConsumeIntegral<status_t>();}
+ virtual status_t getUri(String8* /*uri*/) {return mFdp->ConsumeIntegral<status_t>();}
+
+ private:
+ FuzzedDataProvider* mFdp = nullptr;
+ std::vector<uint8_t> mData;
+ size_t mSize = 0;
+};
+
+struct FuzzMediaHTTPService : public MediaHTTPService {
+ public:
+ FuzzMediaHTTPService(FuzzedDataProvider* fdp) : mFdp(fdp) {}
+ virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+ mediaHTTPConnection = sp<FuzzMediaHTTPConnection>::make(mFdp);
+ return mediaHTTPConnection;
+ }
+
+ private:
+ sp<FuzzMediaHTTPConnection> mediaHTTPConnection = nullptr;
+ FuzzedDataProvider* mFdp = nullptr;
+};
+
+class SDPLoaderFuzzer {
+ public:
+ SDPLoaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {}
+ void process();
+
+ private:
+ void signalEos();
+
+ bool mEosReached = false;
+ std::mutex mMsgPostCompleteMutex;
+ std::condition_variable mConditionalVariable;
+ FuzzedDataProvider mFdp;
+};
+
+void SDPLoaderFuzzer::signalEos() {
+ mEosReached = true;
+ mConditionalVariable.notify_one();
+ return;
+}
+
+void SDPLoaderFuzzer::process() {
+ sp<FuzzAHandler> handler = sp<FuzzAHandler>::make(std::bind(&SDPLoaderFuzzer::signalEos, this));
+ sp<ALooper> looper = sp<ALooper>::make();
+ looper->start();
+ looper->registerHandler(handler);
+ const sp<AMessage> notify = sp<AMessage>::make(kWhatLoad, handler);
+ sp<SDPLoader> sdpLoader =
+ sp<SDPLoader>::make(notify, mFdp.ConsumeIntegral<uint32_t>() /* flags */,
+ sp<FuzzMediaHTTPService>::make(&mFdp) /* httpService */);
+
+ KeyedVector<String8, String8> headers;
+ for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange<size_t>(kMinCapacity, kMaxCapacity);
+ ++idx) {
+ headers.add(String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* key */,
+ String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* value */);
+ }
+
+ sdpLoader->load(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* url */, &headers);
+
+ std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+ mConditionalVariable.wait(waitForMsgPostComplete, [this] { return mEosReached; });
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ SDPLoaderFuzzer sdpLoaderFuzzer(data, size);
+ sdpLoaderFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
index ee76a67..78140dd 100644
--- a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -36,10 +36,11 @@
class Helper {
public:
Helper(double contentFrameDurationMs, const Configuration &configuration) :
- mVideoRenderQualityTracker(configuration) {
+ mVideoRenderQualityTracker(configuration, testTraceTrigger) {
mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
mMediaTimeUs = 0;
mClockTimeNs = 0;
+ sTraceTriggeredCount = 0;
}
void changeContentFrameDuration(double contentFrameDurationMs) {
@@ -100,6 +101,10 @@
return e;
}
+ int getTraceTriggeredCount() {
+ return sTraceTriggeredCount;
+ }
+
private:
VideoRenderQualityTracker mVideoRenderQualityTracker;
int64_t mContentFrameDurationUs;
@@ -107,8 +112,16 @@
int64_t mClockTimeNs;
VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
VideoRenderQualityTracker::JudderEvent mJudderEvent;
+
+ static int sTraceTriggeredCount;
+
+ static void testTraceTrigger() {
+ sTraceTriggeredCount++;
+ };
};
+int Helper::sTraceTriggeredCount = 0;
+
class VideoRenderQualityTrackerTest : public ::testing::Test {
public:
VideoRenderQualityTrackerTest() {}
@@ -139,6 +152,10 @@
EXPECT_EQ(c.judderEventMax, d.judderEventMax);
EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+ EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+ EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+ EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
+ EXPECT_EQ(c.traceMaxFreezeDurationMs, d.traceMaxFreezeDurationMs);
}
TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
@@ -166,6 +183,10 @@
EXPECT_EQ(c.judderEventMax, d.judderEventMax);
EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+ EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+ EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+ EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
+ EXPECT_EQ(c.traceMaxFreezeDurationMs, d.traceMaxFreezeDurationMs);
}
TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
@@ -193,6 +214,10 @@
EXPECT_EQ(c.judderEventMax, d.judderEventMax);
EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+ EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+ EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+ EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
+ EXPECT_EQ(c.traceMaxFreezeDurationMs, d.traceMaxFreezeDurationMs);
}
TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
@@ -232,6 +257,14 @@
return "10*10";
} else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
return "140-a";
+ } else if (flag == "render_metrics_trace_trigger_enabled") {
+ return "fals";
+ } else if (flag == "render_metrics_trace_trigger_throttle_ms") {
+ return "12345678901234";
+ } else if (flag == "render_metrics_trace_minimum_freeze_duration_ms") {
+ return "10b0";
+ } else if (flag == "render_metrics_trace_maximum_freeze_duration_ms") {
+ return "100a";
}
return "";
}
@@ -255,6 +288,10 @@
EXPECT_EQ(c.judderEventMax, d.judderEventMax);
EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+ EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+ EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+ EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
+ EXPECT_EQ(c.traceMaxFreezeDurationMs, d.traceMaxFreezeDurationMs);
}
TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
@@ -294,6 +331,14 @@
return "10000";
} else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
return "11000";
+ } else if (flag == "render_metrics_trace_trigger_enabled") {
+ return "true";
+ } else if (flag == "render_metrics_trace_trigger_throttle_ms") {
+ return "50000";
+ } else if (flag == "render_metrics_trace_minimum_freeze_duration_ms") {
+ return "1000";
+ } else if (flag == "render_metrics_trace_maximum_freeze_duration_ms") {
+ return "5000";
}
return "";
}
@@ -353,6 +398,11 @@
EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+
+ EXPECT_EQ(c.traceTriggerEnabled, true);
+ EXPECT_EQ(c.traceTriggerThrottleMs, 50000);
+ EXPECT_EQ(c.traceMinFreezeDurationMs, 1000);
+ EXPECT_EQ(c.traceMaxFreezeDurationMs, 5000);
}
TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
@@ -1060,4 +1110,75 @@
EXPECT_EQ(h.getMetrics().judderScore, 10 + 300 + 2000);
}
+TEST_F(VideoRenderQualityTrackerTest,
+ freezesForTraceDuration_withThrottle_throttlesTraceTrigger) {
+ Configuration c;
+ c.enabled = true;
+ c.traceTriggerEnabled = true; // The trigger is enabled, so traces should be triggered.
+ // The value of traceTriggerThrottleMs must be larger than traceMinFreezeDurationMs. Otherwise,
+ // the throttle does work.
+ c.traceTriggerThrottleMs = 200;
+ c.traceMinFreezeDurationMs = 40;
+ int32_t freeze = c.traceMinFreezeDurationMs;
+
+ Helper h(20, c);
+ // Freeze triggers separated by 80ms which is less than the threshold.
+ h.render({
+ freeze, // Freeze duration does not check trace trigger.
+ 20, // Trace triggered.
+ 20, // Throttle time: 20/200ms
+ 20, // Throttle time: 40/200ms
+ freeze, // Throttle time: 80/200ms
+ 20, // Throttle time: 100/200ms (Trace not triggered)
+ });
+ EXPECT_EQ(h.getTraceTriggeredCount(), 1);
+ // Next freeze trigger is separated by 200ms which breaks the throttle threshold.
+ h.render({
+ 20, // Throttle time: 120/200ms
+ 20, // Throttle time: 140/200ms
+ 20, // Throttle time: 160/200ms
+ freeze, // Throttle time: 200/200ms
+ 20, // Trace triggered.
+ });
+ EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+ // Next freeze trigger is separated by 80ms which is less than the threshold.
+ h.render({
+ 20, // Throttle time: 20/200ms
+ 20, // Throttle time: 40/200ms
+ freeze, // Throttle time: 80/200ms
+ 20, // Throttle time: 100/200ms (Trace not triggered)
+ });
+ EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, freezeForTraceDuration_triggersTrace) {
+ Configuration c;
+ c.enabled = true;
+ c.traceTriggerEnabled = true; // The trigger is enabled, so traces should be triggered.
+ c.traceTriggerThrottleMs = 0; // Disable throttle in the test case.
+ int32_t freeze1 = c.traceMinFreezeDurationMs;
+ int32_t freeze2 = c.traceMaxFreezeDurationMs - 1;
+ int32_t couldBeAPause = c.traceMaxFreezeDurationMs + 1;
+
+ Helper h(20, c);
+ h.render({freeze1, 20, freeze2, 20, couldBeAPause, 20});
+
+ EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+}
+
+TEST_F(VideoRenderQualityTrackerTest,
+ freezeForTraceDuration_withTraceDisabled_doesNotTriggerTrace) {
+ Configuration c;
+ c.enabled = true;
+ c.traceTriggerEnabled = false; // The trigger is disabled, so no traces should be triggered.
+ c.traceTriggerThrottleMs = 0; // Disable throttle in the test case.
+ int32_t freeze1 = c.traceMinFreezeDurationMs;
+ int32_t freeze2 = c.traceMaxFreezeDurationMs - 1;
+ int32_t couldBeAPause = c.traceMaxFreezeDurationMs + 1;
+
+ Helper h(20, c);
+ h.render({freeze1, 20, freeze2, 20, couldBeAPause, 20});
+
+ EXPECT_EQ(h.getTraceTriggeredCount(), 0);
+}
} // android
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 540d75d..8072002 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -30,6 +30,7 @@
srcs: [
"AC4Parser.cpp",
+ "HeifCleanAperture.cpp",
"ItemTable.cpp",
"MPEG4Extractor.cpp",
"SampleIterator.cpp",
diff --git a/media/module/extractors/mp4/HeifCleanAperture.cpp b/media/module/extractors/mp4/HeifCleanAperture.cpp
new file mode 100644
index 0000000..f0a0867
--- /dev/null
+++ b/media/module/extractors/mp4/HeifCleanAperture.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <HeifCleanAperture.h>
+
+namespace android {
+namespace heif {
+namespace {
+
+// |a| and |b| hold int32_t values. The int64_t type is used so that we can negate INT32_MIN without
+// overflowing int32_t.
+int64_t calculateGreatestCommonDivisor(int64_t a, int64_t b) {
+ if (a < 0) {
+ a *= -1;
+ }
+ if (b < 0) {
+ b *= -1;
+ }
+ while (b != 0) {
+ int64_t r = a % b;
+ a = b;
+ b = r;
+ }
+ return a;
+}
+
+bool overflowsInt32(int64_t x) {
+ return (x < INT32_MIN) || (x > INT32_MAX);
+}
+
+Fraction calculateCenter(int32_t value) {
+ Fraction f(value, 2);
+ f.simplify();
+ return f;
+}
+
+} // namespace
+
+Fraction::Fraction(int32_t n, int32_t d) {
+ this->n = n;
+ this->d = d;
+}
+
+void Fraction::simplify() {
+ int64_t gcd = calculateGreatestCommonDivisor(n, d);
+ if (gcd > 1) {
+ n = static_cast<int32_t>(n / gcd);
+ d = static_cast<int32_t>(d / gcd);
+ }
+}
+
+bool Fraction::commonDenominator(Fraction* f) {
+ simplify();
+ f->simplify();
+ if (d == f->d) return true;
+ const int64_t this_d = d;
+ const int64_t fd = f->d;
+ const int64_t thisnNew = n * fd;
+ const int64_t thisdNew = d * fd;
+ const int64_t fnNew = f->n * this_d;
+ const int64_t fdNew = f->d * this_d;
+ if (overflowsInt32(thisnNew) || overflowsInt32(thisdNew) || overflowsInt32(fnNew) ||
+ overflowsInt32(fdNew)) {
+ return false;
+ }
+ n = static_cast<int32_t>(thisnNew);
+ d = static_cast<int32_t>(thisdNew);
+ f->n = static_cast<int32_t>(fnNew);
+ f->d = static_cast<int32_t>(fdNew);
+ return true;
+}
+
+bool Fraction::add(Fraction f) {
+ if (!commonDenominator(&f)) {
+ return false;
+ }
+
+ const int64_t result = static_cast<int64_t>(n) + f.n;
+ if (overflowsInt32(result)) {
+ return false;
+ }
+ n = static_cast<int32_t>(result);
+ simplify();
+ return true;
+}
+
+bool Fraction::subtract(Fraction f) {
+ if (!commonDenominator(&f)) {
+ return false;
+ }
+
+ const int64_t result = static_cast<int64_t>(n) - f.n;
+ if (overflowsInt32(result)) {
+ return false;
+ }
+ n = static_cast<int32_t>(result);
+ simplify();
+ return true;
+}
+
+bool convertCleanApertureToRect(uint32_t imageW, uint32_t imageH, const CleanAperture& clap,
+ int32_t* left, int32_t* top, int32_t* right, int32_t* bottom) {
+ // ISO/IEC 14496-12:2020, Section 12.1.4.1:
+ // For horizOff and vertOff, D shall be strictly positive and N may be
+ // positive or negative. For cleanApertureWidth and cleanApertureHeight,
+ // N shall be positive and D shall be strictly positive.
+ if (clap.width.d <= 0 || clap.height.d <= 0 || clap.horizOff.d <= 0 || clap.vertOff.d <= 0 ||
+ clap.width.n < 0 || clap.height.n < 0 || !clap.width.isInteger() ||
+ !clap.height.isInteger() || imageW > INT32_MAX || imageH > INT32_MAX) {
+ return false;
+ }
+
+ const int32_t clapW = clap.width.getInt32();
+ const int32_t clapH = clap.height.getInt32();
+ if (clapW == 0 || clapH == 0) {
+ return false;
+ }
+
+ Fraction centerX = calculateCenter(imageW);
+ Fraction centerY = calculateCenter(imageH);
+ Fraction halfW(clapW, 2);
+ Fraction halfH(clapH, 2);
+
+ if (!centerX.add(clap.horizOff) || !centerX.subtract(halfW) || !centerX.isInteger() ||
+ centerX.n < 0 || !centerY.add(clap.vertOff) || !centerY.subtract(halfH) ||
+ !centerY.isInteger() || centerY.n < 0) {
+ return false;
+ }
+
+ *left = centerX.getInt32();
+ *top = centerY.getInt32();
+ *right = *left + clapW;
+ *bottom = *top + clapH;
+
+ // Make sure that the crop rect is within the image bounds.
+ if (*left > (UINT32_MAX - clapW) || *right > imageW || *top > (UINT32_MAX - clapH) ||
+ *bottom > imageH) {
+ return false;
+ }
+ return true;
+}
+
+} // namespace heif
+} // namespace android
diff --git a/media/module/extractors/mp4/ItemTable.cpp b/media/module/extractors/mp4/ItemTable.cpp
index 7fe5ba7..cf3df62 100644
--- a/media/module/extractors/mp4/ItemTable.cpp
+++ b/media/module/extractors/mp4/ItemTable.cpp
@@ -19,6 +19,7 @@
#include <unordered_set>
+#include <HeifCleanAperture.h>
#include <ItemTable.h>
#include <media/MediaExtractorPluginApi.h>
#include <media/MediaExtractorPluginHelper.h>
@@ -47,7 +48,7 @@
ImageItem(uint32_t _type, uint32_t _id, bool _hidden) :
type(_type), itemId(_id), hidden(_hidden),
rows(0), columns(0), width(0), height(0), rotation(0),
- offset(0), size(0), nextTileIndex(0) {}
+ offset(0), size(0), seenClap(false), nextTileIndex(0) {}
bool isGrid() const {
return type == FOURCC("grid");
@@ -77,6 +78,8 @@
sp<ABuffer> hvcc;
sp<ABuffer> icc;
sp<ABuffer> av1c;
+ bool seenClap;
+ CleanAperture clap;
Vector<uint32_t> thumbnails;
Vector<uint32_t> dimgRefs;
@@ -833,6 +836,47 @@
return OK;
}
+struct ClapBox : public Box, public ItemProperty {
+ ClapBox(DataSourceHelper *source) :
+ Box(source, FOURCC("clap")), mSeen(false) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.seenClap = mSeen;
+ if (!mSeen) return;
+ image.clap = mClap;
+ }
+
+private:
+ bool mSeen;
+ CleanAperture mClap;
+};
+
+status_t ClapBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ if (size < 32) {
+ return ERROR_MALFORMED;
+ }
+ mSeen = true;
+ uint32_t values[8];
+ for (int i = 0; i < 8; ++i, offset += 4) {
+ if (!source()->getUInt32(offset, &values[i])) {
+ return ERROR_IO;
+ }
+ }
+ mClap.width.n = values[0];
+ mClap.width.d = values[1];
+ mClap.height.n = values[2];
+ mClap.height.d = values[3];
+ mClap.horizOff.n = values[4];
+ mClap.horizOff.d = values[5];
+ mClap.vertOff.n = values[6];
+ mClap.vertOff.d = values[7];
+ return OK;
+}
+
struct ColrBox : public Box, public ItemProperty {
ColrBox(DataSourceHelper *source) :
Box(source, FOURCC("colr")) {}
@@ -992,6 +1036,11 @@
itemProperty = new IrotBox(source());
break;
}
+ case FOURCC("clap"):
+ {
+ itemProperty = new ClapBox(source());
+ break;
+ }
case FOURCC("colr"):
{
itemProperty = new ColrBox(source());
@@ -1599,6 +1648,12 @@
AMediaFormat_setInt32(meta,
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, image->width * image->height * 3 / 2);
+ int32_t left, top, right, bottom;
+ if (image->seenClap && convertCleanApertureToRect(image->width, image->height, image->clap,
+ &left, &top, &right, &bottom)) {
+ AMediaFormat_setRect(meta, AMEDIAFORMAT_KEY_DISPLAY_CROP, left, top, right - 1, bottom - 1);
+ }
+
if (!image->thumbnails.empty()) {
ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
if (thumbItemIndex >= 0) {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index a9ca078..559927a 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -2009,7 +2009,7 @@
uint8_t mhac_header[mhac_header_size];
off64_t data_offset = *offset;
- if (chunk_size < sizeof(mhac_header)) {
+ if (mLastTrack == NULL || chunk_size < sizeof(mhac_header)) {
return ERROR_MALFORMED;
}
diff --git a/media/module/extractors/mp4/include/HeifCleanAperture.h b/media/module/extractors/mp4/include/HeifCleanAperture.h
new file mode 100644
index 0000000..930197d
--- /dev/null
+++ b/media/module/extractors/mp4/include/HeifCleanAperture.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HEIF_CLEAN_APERTURE_H_
+#define HEIF_CLEAN_APERTURE_H_
+
+#include <stdint.h>
+
+namespace android {
+namespace heif {
+
+struct Fraction {
+ Fraction() = default;
+ Fraction(int32_t n, int32_t d);
+
+ void simplify();
+ bool commonDenominator(Fraction* f);
+ bool add(Fraction f);
+ bool subtract(Fraction f);
+ bool isInteger() const { return n % d == 0; }
+ int32_t getInt32() const { return n / d; }
+ int32_t n;
+ int32_t d;
+};
+
+struct CleanAperture {
+ Fraction width;
+ Fraction height;
+ Fraction horizOff;
+ Fraction vertOff;
+};
+
+// Converts the CleanAperture value into a rectangle with bounds left, top, right and bottom.
+// Returns true on success, false otherwise.
+bool convertCleanApertureToRect(uint32_t imageW, uint32_t imageH, const CleanAperture& image,
+ int32_t* left, int32_t* top, int32_t* right, int32_t* bottom);
+
+} // namespace heif
+} // namespace android
+
+#endif // HEIF_CLEAN_APERTURE_H_
diff --git a/media/module/extractors/mp4/tests/Android.bp b/media/module/extractors/mp4/tests/Android.bp
new file mode 100644
index 0000000..252cec2
--- /dev/null
+++ b/media/module/extractors/mp4/tests/Android.bp
@@ -0,0 +1,24 @@
+package {
+ default_applicable_licenses: ["frameworks_av_media_extractors_mp4_license"],
+}
+
+cc_test_host {
+ name: "HeifCleanApertureUnitTest",
+ gtest: true,
+
+ srcs: ["HeifCleanApertureUnitTest.cpp"],
+
+ header_libs: [
+ "libmp4extractor_headers",
+ ],
+
+ static_libs: [
+ "libmp4extractor",
+ ],
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
diff --git a/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp b/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp
new file mode 100644
index 0000000..6a84ae3
--- /dev/null
+++ b/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <HeifCleanAperture.h>
+#include <gtest/gtest.h>
+
+namespace {
+
+using android::heif::CleanAperture;
+using android::heif::convertCleanApertureToRect;
+using android::heif::Fraction;
+
+struct InvalidClapPropertyParam {
+ uint32_t width;
+ uint32_t height;
+ CleanAperture clap;
+};
+
+const InvalidClapPropertyParam kInvalidClapPropertyTestParams[] = {
+ // Zero or negative denominators.
+ {120, 160, {Fraction(96, 0), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, -1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 0), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, -1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 0), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, -1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 0)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, -1)}},
+ // Zero or negative clean aperture width or height.
+ {120, 160, {Fraction(-96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(0, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(-132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(0, 1), Fraction(0, 1), Fraction(0, 1)}},
+ // Clean aperture width or height is not an integer.
+ {120, 160, {Fraction(96, 5), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+ {120, 160, {Fraction(96, 1), Fraction(132, 5), Fraction(0, 1), Fraction(0, 1)}},
+ {722, 1024, {Fraction(385, 1), Fraction(330, 1), Fraction(103, 1), Fraction(-308, 1)}},
+ {1024, 722, {Fraction(330, 1), Fraction(385, 1), Fraction(-308, 1), Fraction(103, 1)}},
+};
+
+using InvalidClapPropertyTest = ::testing::TestWithParam<InvalidClapPropertyParam>;
+
+INSTANTIATE_TEST_SUITE_P(Parameterized, InvalidClapPropertyTest,
+ ::testing::ValuesIn(kInvalidClapPropertyTestParams));
+
+// Negative tests for the convertCleanApertureToRect() function.
+TEST_P(InvalidClapPropertyTest, ValidateClapProperty) {
+ const InvalidClapPropertyParam& param = GetParam();
+ int32_t left, top, right, bottom;
+ EXPECT_FALSE(convertCleanApertureToRect(param.width, param.height, param.clap, &left, &top,
+ &right, &bottom));
+}
+
+struct ValidClapPropertyParam {
+ uint32_t width;
+ uint32_t height;
+ CleanAperture clap;
+
+ int32_t left;
+ int32_t top;
+ int32_t right;
+ int32_t bottom;
+};
+
+const ValidClapPropertyParam kValidClapPropertyTestParams[] = {
+ {120,
+ 160,
+ {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)},
+ 12,
+ 14,
+ 108,
+ 146},
+ {120,
+ 160,
+ {Fraction(60, 1), Fraction(80, 1), Fraction(-30, 1), Fraction(-40, 1)},
+ 0,
+ 0,
+ 60,
+ 80},
+};
+
+using ValidClapPropertyTest = ::testing::TestWithParam<ValidClapPropertyParam>;
+
+INSTANTIATE_TEST_SUITE_P(Parameterized, ValidClapPropertyTest,
+ ::testing::ValuesIn(kValidClapPropertyTestParams));
+
+// Positive tests for the convertCleanApertureToRect() function.
+TEST_P(ValidClapPropertyTest, ValidateClapProperty) {
+ const ValidClapPropertyParam& param = GetParam();
+ int32_t left, top, right, bottom;
+ EXPECT_TRUE(convertCleanApertureToRect(param.width, param.height, param.clap, &left, &top,
+ &right, &bottom));
+ EXPECT_EQ(left, param.left);
+ EXPECT_EQ(top, param.top);
+ EXPECT_EQ(right, param.right);
+ EXPECT_EQ(bottom, param.bottom);
+}
+
+} // namespace
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index 5dbcd08..42e9078 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -73,14 +73,14 @@
}
bool MtpDataPacket::getUInt8(uint8_t& value) {
- if (mPacketSize - mOffset < sizeof(value))
+ if ((mPacketSize - mOffset < sizeof(value)) || (mOffset >= mBufferSize))
return false;
value = mBuffer[mOffset++];
return true;
}
bool MtpDataPacket::getUInt16(uint16_t& value) {
- if (mPacketSize - mOffset < sizeof(value))
+ if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+1) >= mBufferSize))
return false;
int offset = mOffset;
value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
@@ -89,7 +89,7 @@
}
bool MtpDataPacket::getUInt32(uint32_t& value) {
- if (mPacketSize - mOffset < sizeof(value))
+ if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+3) >= mBufferSize))
return false;
int offset = mOffset;
value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
@@ -99,7 +99,7 @@
}
bool MtpDataPacket::getUInt64(uint64_t& value) {
- if (mPacketSize - mOffset < sizeof(value))
+ if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+7) >= mBufferSize))
return false;
int offset = mOffset;
value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
@@ -420,8 +420,10 @@
}
void MtpDataPacket::putString(const char* s) {
- MtpStringBuffer string(s);
- string.writeToPacket(this);
+ if (s != NULL) {
+ MtpStringBuffer string(s);
+ string.writeToPacket(this);
+ }
}
void MtpDataPacket::putString(const uint16_t* string) {
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index ef8c9aa..5d68890 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -591,6 +591,9 @@
// Send the header data
mtp_data_header *header = reinterpret_cast<mtp_data_header*>(mIobuf[0].bufs.data());
+ if (header == NULL) {
+ return -1;
+ }
header->length = htole32(given_length);
header->type = htole16(2); // data packet
header->command = htole16(mfr.command);
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index 5faaac2..f196d87 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -97,7 +97,7 @@
}
else {
ALOGE("offset for buffer read is greater than buffer size!");
- abort();
+ return 0;
}
}
@@ -108,7 +108,7 @@
}
else {
ALOGE("offset for buffer read is greater than buffer size!");
- abort();
+ return 0;
}
}
@@ -175,6 +175,9 @@
#ifdef MTP_HOST
int MtpPacket::transfer(struct usb_request* request) {
+ if (request->dev == NULL) {
+ return -1;
+ }
int result = usb_device_bulk_transfer(request->dev,
request->endpoint,
request->buffer,
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
index a3d6a96..ba92b19 100644
--- a/media/ndk/fuzzer/Android.bp
+++ b/media/ndk/fuzzer/Android.bp
@@ -56,6 +56,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmediandk library",
+ vector: "local_no_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -63,6 +71,11 @@
name: "ndk_crypto_fuzzer",
srcs: ["ndk_crypto_fuzzer.cpp"],
defaults: ["libmediandk_fuzzer_defaults"],
+ fuzz_config: {
+ libfuzzer_options: [
+ "max_len=10000",
+ ],
+ },
}
cc_fuzz {
@@ -116,3 +129,16 @@
header_libs: ["libnativewindow_headers",],
defaults: ["libmediandk_fuzzer_defaults",],
}
+
+cc_fuzz {
+ name: "ndk_async_codec_fuzzer",
+ srcs: [
+ "ndk_async_codec_fuzzer.cpp",
+ "NdkMediaCodecFuzzerBase.cpp",
+ ],
+ header_libs: [
+ "libnativewindow_headers",
+ "libutils_headers",
+ ],
+ defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
index 0fd08b0..7f6bdd7 100644
--- a/media/ndk/fuzzer/README.md
+++ b/media/ndk/fuzzer/README.md
@@ -8,6 +8,7 @@
+ [ndk_drm_fuzzer](#NdkDrm)
+ [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
+ [ndk_sync_codec_fuzzer](#NdkSyncCodec)
++ [ndk_async_codec_fuzzer](#NdkAsyncCodec)
# <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
@@ -156,3 +157,16 @@
$ adb sync data
$ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
```
+
+# <a name="NdkAsyncCodec"></a>Fuzzer for NdkAsyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) ndk_async_codec_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/ndk_async_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
new file mode 100644
index 0000000..5c860a8
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
@@ -0,0 +1,442 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <functional>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+using namespace android;
+using namespace std;
+
+constexpr int32_t kMaxCryptoInfoAPIs = 3;
+constexpr int32_t kMaxNdkCodecAPIs = 5;
+
+template <typename T>
+class CallBackQueue {
+ public:
+ void push(T elem) {
+ bool needsNotify = false;
+ {
+ unique_lock<mutex> lock(mMutex);
+ needsNotify = mQueue.empty();
+ mQueue.push(std::move(elem));
+ }
+ if (needsNotify) {
+ mQueueNotEmptyCondition.notify_one();
+ }
+ }
+
+ T pop() {
+ unique_lock<mutex> lock(mMutex);
+ if (mQueue.empty()) {
+ mQueueNotEmptyCondition.wait(lock, [this]() { return !mQueue.empty(); });
+ }
+ auto result = mQueue.front();
+ mQueue.pop();
+ return result;
+ }
+
+ private:
+ mutex mMutex;
+ std::queue<T> mQueue;
+ std::condition_variable mQueueNotEmptyCondition;
+};
+
+class CallBackHandle {
+ public:
+ CallBackHandle() : mSawError(false), mIsDone(false) {}
+
+ virtual ~CallBackHandle() {}
+
+ void ioThread();
+
+ // Implementation in child class (Decoder/Encoder)
+ virtual void invokeInputBufferAPI(AMediaCodec* codec, int32_t index) {
+ (void)codec;
+ (void)index;
+ }
+ virtual void onFormatChanged(AMediaCodec* codec, AMediaFormat* format) {
+ (void)codec;
+ (void)format;
+ }
+ virtual void receiveError(void) {}
+ virtual void invokeOutputBufferAPI(AMediaCodec* codec, int32_t index,
+ AMediaCodecBufferInfo* bufferInfo) {
+ (void)codec;
+ (void)index;
+ (void)bufferInfo;
+ }
+
+ // Keep a queue of all function callbacks.
+ typedef function<void()> IOTask;
+ CallBackQueue<IOTask> mIOQueue;
+ bool mSawError;
+ bool mIsDone;
+};
+
+void CallBackHandle::ioThread() {
+ while (!mIsDone && !mSawError) {
+ auto task = mIOQueue.pop();
+ task();
+ }
+}
+
+static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+ CallBackHandle* self = (CallBackHandle*)userdata;
+ self->mIOQueue.push([self, codec, index]() { self->invokeInputBufferAPI(codec, index); });
+}
+
+static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+ AMediaCodecBufferInfo* bufferInfo) {
+ CallBackHandle* self = (CallBackHandle*)userdata;
+ AMediaCodecBufferInfo bufferInfoCopy = *bufferInfo;
+ self->mIOQueue.push([self, codec, index, bufferInfoCopy]() {
+ AMediaCodecBufferInfo bc = bufferInfoCopy;
+ self->invokeOutputBufferAPI(codec, index, &bc);
+ });
+}
+
+static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+ (void)codec;
+ (void)userdata;
+ (void)format;
+};
+
+static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t err, int32_t actionCode,
+ const char* detail) {
+ CallBackHandle* self = (CallBackHandle*)userdata;
+ self->mSawError = true;
+ self->receiveError();
+ (void)codec;
+ (void)err;
+ (void)actionCode;
+ (void)detail;
+};
+
+class NdkAsyncCodecFuzzer : public NdkMediaCodecFuzzerBase, public CallBackHandle {
+ public:
+ NdkAsyncCodecFuzzer(const uint8_t* data, size_t size)
+ : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+ setFdp(&mFdp);
+ mStopCodec = false;
+ mSawInputEOS = false;
+ mSignalledError = false;
+ mIsEncoder = false;
+ mNumOfFrames = 0;
+ mNumInputFrames = 0;
+ };
+ ~NdkAsyncCodecFuzzer() {
+ mIOThreadPool->stop();
+ delete (mIOThreadPool);
+ };
+
+ void process();
+
+ static void codecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+ int64_t systemNano) {
+ (void)codec;
+ (void)userdata;
+ (void)mediaTimeUs;
+ (void)systemNano;
+ };
+ class ThreadPool {
+ public:
+ void start();
+ void queueJob(const std::function<void()>& job);
+ void stop();
+
+ private:
+ void ThreadLoop();
+ bool mShouldTerminate = false;
+ std::vector<std::thread> mThreads;
+ std::mutex mQueueMutex;
+ std::condition_variable mQueueMutexCondition;
+ std::queue<std::function<void()>> mJobs;
+ };
+
+ private:
+ FuzzedDataProvider mFdp;
+ AMediaCodec* mCodec = nullptr;
+ void invokeCodecCryptoInfoAPI();
+ void invokekAsyncCodecAPIs(bool isEncoder);
+ void invokeAsyncCodeConfigAPI();
+ void invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex);
+ void invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+ AMediaCodecBufferInfo* bufferInfo);
+ void invokeFormatAPI(AMediaCodec* codec);
+ void receiveError();
+ bool mStopCodec;
+ bool mSawInputEOS;
+ bool mSignalledError;
+ int32_t mNumOfFrames;
+ int32_t mNumInputFrames;
+ mutable Mutex mMutex;
+ bool mIsEncoder;
+ ThreadPool* mIOThreadPool = new ThreadPool();
+};
+
+void NdkAsyncCodecFuzzer::ThreadPool::start() {
+ const uint32_t numThreads = std::thread::hardware_concurrency();
+ mThreads.resize(numThreads);
+ for (uint32_t i = 0; i < numThreads; ++i) {
+ mThreads.at(i) = std::thread(&ThreadPool::ThreadLoop, this);
+ }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::ThreadLoop() {
+ while (true) {
+ std::function<void()> job;
+ {
+ std::unique_lock<std::mutex> lock(mQueueMutex);
+ mQueueMutexCondition.wait(lock, [this] { return !mJobs.empty() || mShouldTerminate; });
+ if (mShouldTerminate) {
+ return;
+ }
+ job = mJobs.front();
+ mJobs.pop();
+ }
+ job();
+ }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::queueJob(const std::function<void()>& job) {
+ {
+ std::unique_lock<std::mutex> lock(mQueueMutex);
+ mJobs.push(job);
+ }
+ mQueueMutexCondition.notify_one();
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::stop() {
+ {
+ std::unique_lock<std::mutex> lock(mQueueMutex);
+ mShouldTerminate = true;
+ }
+ mQueueMutexCondition.notify_all();
+ for (std::thread& active_thread : mThreads) {
+ active_thread.join();
+ }
+ mThreads.clear();
+}
+
+void NdkAsyncCodecFuzzer::receiveError(void) {
+ mSignalledError = true;
+}
+
+void NdkAsyncCodecFuzzer::invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex) {
+ size_t bufferSize = 0;
+ Mutex::Autolock autoLock(mMutex);
+ if (mSignalledError) {
+ CallBackHandle::mSawError = true;
+ return;
+ }
+ if (mStopCodec || bufferIndex < 0 || mSawInputEOS) {
+ return;
+ }
+
+ uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+ if (buffer) {
+ std::vector<uint8_t> bytesRead = mFdp.ConsumeBytes<uint8_t>(
+ std::min(mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+ memcpy(buffer, bytesRead.data(), bytesRead.size());
+ bufferSize = bytesRead.size();
+ } else {
+ mSignalledError = true;
+ return;
+ }
+
+ uint32_t flag = 0;
+ if (!bufferSize || mNumInputFrames == mNumOfFrames) {
+ flag |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+ flag);
+ mNumInputFrames++;
+}
+
+void NdkAsyncCodecFuzzer::invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+ AMediaCodecBufferInfo* bufferInfo) {
+ size_t bufferSize = 0;
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mSignalledError) {
+ CallBackHandle::mSawError = true;
+ return;
+ }
+
+ if (mStopCodec || bufferIndex < 0 || mIsDone) {
+ return;
+ }
+
+ if (!mIsEncoder) {
+ (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+ }
+ AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp.ConsumeBool());
+ mIsDone = (0 != (bufferInfo->flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM));
+}
+
+void NdkAsyncCodecFuzzer::invokeFormatAPI(AMediaCodec* codec) {
+ AMediaFormat* codecFormat = nullptr;
+ if (mFdp.ConsumeBool()) {
+ codecFormat = AMediaCodec_getInputFormat(codec);
+ } else {
+ codecFormat = AMediaCodec_getOutputFormat(codec);
+ }
+ if (codecFormat) {
+ AMediaFormat_delete(codecFormat);
+ }
+}
+
+void NdkAsyncCodecFuzzer::invokekAsyncCodecAPIs(bool isEncoder) {
+ ANativeWindow* nativeWindow = nullptr;
+
+ if (mFdp.ConsumeBool()) {
+ AMediaCodec_createInputSurface(mCodec, &nativeWindow);
+ }
+
+ if (AMEDIA_OK == AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow,
+ nullptr /* crypto */,
+ (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0))) {
+ mNumOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+ // Configure codecs to run in async mode.
+ AMediaCodecOnAsyncNotifyCallback callBack = {onAsyncInputAvailable, onAsyncOutputAvailable,
+ onAsyncFormatChanged, onAsyncError};
+ AMediaCodec_setAsyncNotifyCallback(mCodec, callBack, this);
+ mIOThreadPool->queueJob([this] { CallBackHandle::ioThread(); });
+
+ AMediaCodec_start(mCodec);
+ sleep(5);
+ int32_t count = 0;
+ while (++count <= mNumOfFrames) {
+ int32_t ndkcodecAPI =
+ mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+ switch (ndkcodecAPI) {
+ case 0: { // get input and output Format
+ invokeFormatAPI(mCodec);
+ break;
+ }
+ case 1: {
+ AMediaCodec_signalEndOfInputStream(mCodec);
+ mSawInputEOS = true;
+ break;
+ }
+ case 2: { // set parameters
+ // Create a new parameter and set
+ AMediaFormat* params = AMediaFormat_new();
+ AMediaFormat_setInt32(
+ params, "video-bitrate",
+ mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+ AMediaCodec_setParameters(mCodec, params);
+ AMediaFormat_delete(params);
+ break;
+ }
+ case 3: { // flush codec
+ AMediaCodec_flush(mCodec);
+ if (mFdp.ConsumeBool()) {
+ AMediaCodec_start(mCodec);
+ }
+ break;
+ }
+ case 4: {
+ char* name = nullptr;
+ AMediaCodec_getName(mCodec, &name);
+ AMediaCodec_releaseName(mCodec, name);
+ break;
+ }
+ case 5:
+ default: {
+ std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+ mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+ AMediaCodecOnFrameRendered callback = codecOnFrameRendered;
+ AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+ break;
+ }
+ }
+ }
+ {
+ Mutex::Autolock autoLock(mMutex);
+ mStopCodec = 1;
+ AMediaCodec_stop(mCodec);
+ }
+ }
+
+ if (nativeWindow) {
+ ANativeWindow_release(nativeWindow);
+ }
+}
+
+void NdkAsyncCodecFuzzer::invokeAsyncCodeConfigAPI() {
+ mIOThreadPool->start();
+
+ while (mFdp.remaining_bytes() > 0) {
+ mIsEncoder = mFdp.ConsumeBool();
+ mCodec = createCodec(mIsEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+ if (mCodec) {
+ invokekAsyncCodecAPIs(mIsEncoder);
+ AMediaCodec_delete(mCodec);
+ }
+ }
+ mIOThreadPool->stop();
+}
+
+void NdkAsyncCodecFuzzer::invokeCodecCryptoInfoAPI() {
+ while (mFdp.remaining_bytes() > 0) {
+ AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+ int32_t ndkCryptoInfoAPI =
+ mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCryptoInfoAPIs);
+ switch (ndkCryptoInfoAPI) {
+ case 0: {
+ size_t sizes[kMaxCryptoKey];
+ AMediaCodecCryptoInfo_getEncryptedBytes(cryptoInfo, sizes);
+ break;
+ }
+ case 1: {
+ size_t sizes[kMaxCryptoKey];
+ AMediaCodecCryptoInfo_getClearBytes(cryptoInfo, sizes);
+ break;
+ }
+ case 2: {
+ uint8_t bytes[kMaxCryptoKey];
+ AMediaCodecCryptoInfo_getIV(cryptoInfo, bytes);
+ break;
+ }
+ case 3:
+ default: {
+ uint8_t bytes[kMaxCryptoKey];
+ AMediaCodecCryptoInfo_getKey(cryptoInfo, bytes);
+ break;
+ }
+ }
+ AMediaCodecCryptoInfo_delete(cryptoInfo);
+ }
+}
+
+void NdkAsyncCodecFuzzer::process() {
+ if (mFdp.ConsumeBool()) {
+ invokeCodecCryptoInfoAPI();
+ } else {
+ invokeAsyncCodeConfigAPI();
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ NdkAsyncCodecFuzzer ndkAsyncCodecFuzzer(data, size);
+ ndkAsyncCodecFuzzer.process();
+ return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 2b22f0f..304879d 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -16,14 +16,17 @@
#include <fuzzer/FuzzedDataProvider.h>
#include <media/NdkMediaCrypto.h>
+#include <functional>
constexpr size_t kMaxString = 256;
constexpr size_t kMinBytes = 0;
constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMaxRuns = 100;
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
AMediaUUID uuid = {};
+ size_t apiCount = 0;
int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
for (size_t idx = 0; idx < maxLen; ++idx) {
uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
@@ -31,7 +34,14 @@
std::vector<uint8_t> initData =
fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
- while (fdp.remaining_bytes()) {
+ /*
+ * The AMediaCrypto_isCryptoSchemeSupported API doesn't consume any input bytes,
+ * so when PickValueInArray() selects it repeatedly, only one byte is consumed by 'fdp'.
+ * As a result, on larger inputs, AMediaCrypto_isCryptoSchemeSupported can run a large
+ * number of times, potentially causing a timeout crash.
+ * Therefore, to prevent this issue, while loop is limited to kMaxRuns.
+ */
+ while (fdp.remaining_bytes() && ++apiCount <= kMaxRuns) {
auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
[&]() {
AMediaCrypto_requiresSecureDecoderComponent(
diff --git a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
index 6c11798..6450742 100644
--- a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
@@ -18,6 +18,7 @@
#include <fuzzer/FuzzedDataProvider.h>
#include <gui/BufferQueue.h>
#include <media/NdkImageReader.h>
+#include <functional>
constexpr int32_t kMaxSize = INT_MAX;
constexpr int32_t kMinSize = 1;
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
index c19ea13..23e2eaf 100644
--- a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -18,6 +18,7 @@
#include <fcntl.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <media/NdkMediaFormat.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <sys/mman.h>
#include <unistd.h>
#include <utils/Log.h>
@@ -176,11 +177,13 @@
constexpr size_t kMaxBytes = 1000;
constexpr size_t kMinChoice = 0;
constexpr size_t kMaxChoice = 9;
+const size_t kMaxIteration = android::AMessage::maxAllowedEntries();
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
AMediaFormat* mediaFormat = AMediaFormat_new();
- while (fdp.remaining_bytes()) {
+ std::vector<std::string> nameCollection;
+ while (fdp.remaining_bytes() && nameCollection.size() < kMaxIteration) {
const char* name = nullptr;
std::string nameString;
if (fdp.ConsumeBool()) {
@@ -190,6 +193,11 @@
: fdp.ConsumeRandomLengthString(
fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
name = nameString.c_str();
+ std::vector<std::string>::iterator it =
+ find(nameCollection.begin(), nameCollection.end(), name);
+ if (it == nameCollection.end()) {
+ nameCollection.push_back(name);
+ }
}
switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
case 0: {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 9e0d5e4..e947ef6 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -43,6 +43,7 @@
private boolean mRender = false;
private ArrayList<BufferInfo> mInputBufferInfo;
private Stats mStats;
+ private String mMime;
private boolean mSawInputEOS;
private boolean mSawOutputEOS;
@@ -107,14 +108,14 @@
}
private MediaCodec createCodec(String codecName, MediaFormat format) throws IOException {
- String mime = format.getString(MediaFormat.KEY_MIME);
+ mMime = format.getString(MediaFormat.KEY_MIME);
try {
MediaCodec codec;
if (codecName.isEmpty()) {
- Log.i(TAG, "File mime type: " + mime);
- if (mime != null) {
- codec = MediaCodec.createDecoderByType(mime);
- Log.i(TAG, "Decoder created for mime type " + mime);
+ Log.i(TAG, "File mime type: " + mMime);
+ if (mMime != null) {
+ codec = MediaCodec.createDecoderByType(mMime);
+ Log.i(TAG, "Decoder created for mime type " + mMime);
return codec;
} else {
Log.e(TAG, "Mime type is null, please specify a mime type to create decoder");
@@ -122,12 +123,12 @@
}
} else {
codec = MediaCodec.createByCodecName(codecName);
- Log.i(TAG, "Decoder created with codec name: " + codecName + " mime: " + mime);
+ Log.i(TAG, "Decoder created with codec name: " + codecName + " mime: " + mMime);
return codec;
}
} catch (IllegalArgumentException ex) {
ex.printStackTrace();
- Log.e(TAG, "Failed to create decoder for " + codecName + " mime:" + mime);
+ Log.e(TAG, "Failed to create decoder for " + codecName + " mime:" + mMime);
return null;
}
}
@@ -167,6 +168,7 @@
}
if (mFrameReleaseQueue != null) {
mFrameReleaseQueue.setMediaCodec(mCodec);
+ mFrameReleaseQueue.setMime(mMime);
}
if (asyncMode) {
mCodec.setCallback(new MediaCodec.Callback() {
@@ -322,7 +324,7 @@
ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
BufferInfo bufInfo;
if (mNumInFramesProvided >= mNumInFramesRequired) {
- Log.i(TAG, "Input frame limit reached");
+ Log.i(TAG, "Input frame limit reached provided: " + mNumInFramesProvided);
mIndex = mInputBufferInfo.size() - 1;
bufInfo = mInputBufferInfo.get(mIndex);
if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 84554d3..90731ed 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -21,20 +21,32 @@
import androidx.annotation.NonNull;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
public class FrameReleaseQueue {
private static final String TAG = "FrameReleaseQueue";
+ private final String MIME_AV1 = "video/av01";
+ private final int AV1_SUPERFRAME_DELAY = 6;
+ private final int THRESHOLD_TIME = 5;
private MediaCodec mCodec;
private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
private ReleaseThread mReleaseThread;
- private boolean doFrameRelease = false;
+ private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
+ private boolean mReleaseJobStarted = false;
private boolean mRender = false;
private int mWaitTime = 40; // milliseconds per frame
private int mWaitTimeCorrection = 0;
private int mCorrectionLoopCount;
private int firstReleaseTime = -1;
- private int THRESHOLD_TIME = 5;
+ private int mAllowedDelayTime = THRESHOLD_TIME;
+ private int mFrameDelay = 0;
+ private final ScheduledExecutorService mScheduler = Executors.newScheduledThreadPool(1);
+
private static class FrameInfo {
private int number;
@@ -48,60 +60,69 @@
}
private class ReleaseThread extends Thread {
+ private int mLoopCount = 0;
+ private int mNextReleaseTime = 0;
+
+ @SuppressWarnings("FutureReturnValueIgnored")
public void run() {
- int nextReleaseTime = 0;
- int loopCount = 0;
- while (doFrameRelease || mFrameInfoQueue.size() > 0) {
+ /* Check if the release thread wakes up too late */
+ if (mLoopCount != 0) {
+ int delta = getCurSysTime() - mNextReleaseTime;
+ if (delta >= THRESHOLD_TIME) {
+ Log.d(TAG, "Release thread wake up late by " + delta);
+ /* For accidental late wake up, we should relax the timestamp
+ check for display time */
+ mAllowedDelayTime = 1 + delta;
+ } else {
+ mAllowedDelayTime = THRESHOLD_TIME;
+ }
+ }
+ if (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
FrameInfo curFrameInfo = mFrameInfoQueue.peek();
if (curFrameInfo == null) {
- nextReleaseTime += mWaitTime;
+ mNextReleaseTime += mWaitTime;
} else {
- if (curFrameInfo.displayTime == 0) {
+ if (firstReleaseTime == -1 || curFrameInfo.displayTime <= 0) {
// first frame of loop
firstReleaseTime = getCurSysTime();
- nextReleaseTime = firstReleaseTime + mWaitTime;
- popAndRelease(curFrameInfo, true);
- } else if (!doFrameRelease && mFrameInfoQueue.size() == 1) {
+ mNextReleaseTime = firstReleaseTime + mWaitTime;
+ popAndRelease(true);
+ } else if (!doFrameRelease.get() && mFrameInfoQueue.size() == 1) {
// EOS
Log.i(TAG, "EOS");
- popAndRelease(curFrameInfo, false);
+ popAndRelease(false);
} else {
- nextReleaseTime += mWaitTime;
+ mNextReleaseTime += mWaitTime;
int curSysTime = getCurSysTime();
int curMediaTime = curSysTime - firstReleaseTime;
while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
curFrameInfo.displayTime <= curMediaTime) {
- if (!((curMediaTime - curFrameInfo.displayTime) < THRESHOLD_TIME)) {
+ if (!((curMediaTime - curFrameInfo.displayTime) <= mAllowedDelayTime)) {
Log.d(TAG, "Dropping expired frame " + curFrameInfo.number +
" display time " + curFrameInfo.displayTime +
" current time " + curMediaTime);
- popAndRelease(curFrameInfo, false);
+ popAndRelease(false);
} else {
- popAndRelease(curFrameInfo, true);
+ popAndRelease(true);
}
curFrameInfo = mFrameInfoQueue.peek();
}
if (curFrameInfo != null && curFrameInfo.displayTime > curMediaTime) {
if ((curFrameInfo.displayTime - curMediaTime) < THRESHOLD_TIME) {
- popAndRelease(curFrameInfo, true);
+ // release the frame now as we are already there
+ popAndRelease(true);
}
}
}
}
- int sleepTime = nextReleaseTime - getCurSysTime();
- if (sleepTime > 0) {
- try {
- mReleaseThread.sleep(sleepTime);
- } catch (InterruptedException e) {
- Log.e(TAG, "Threw InterruptedException on sleep");
- }
- } else {
- Log.d(TAG, "Thread sleep time less than 1");
+
+ long sleepTime = (long)(mNextReleaseTime - getCurSysTime());
+ mScheduler.schedule(mReleaseThread, sleepTime, TimeUnit.MILLISECONDS);
+
+ if (mLoopCount % mCorrectionLoopCount == 0) {
+ mNextReleaseTime += mWaitTimeCorrection;
}
- if (loopCount % mCorrectionLoopCount == 0) {
- nextReleaseTime += mWaitTimeCorrection;
- }
- loopCount += 1;
+ mLoopCount += 1;
}
}
}
@@ -109,7 +130,7 @@
public FrameReleaseQueue(boolean render, int frameRate) {
this.mFrameInfoQueue = new LinkedBlockingQueue();
this.mReleaseThread = new ReleaseThread();
- this.doFrameRelease = true;
+ this.doFrameRelease.set(true);
this.mRender = render;
this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
int waitTimeRemainder = 1000 % frameRate;
@@ -127,6 +148,12 @@
this.mCodec = mediaCodec;
}
+ public void setMime(String mime) {
+ if (mime.equals(MIME_AV1)) {
+ mFrameDelay = AV1_SUPERFRAME_DELAY;
+ }
+ }
+
public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
@@ -135,8 +162,10 @@
Log.e(TAG, "Failed to push frame with buffer id " + curFrameInfo.bufferId);
return false;
}
- if (!mReleaseThread.isAlive()) {
- mReleaseThread.start();
+
+ if (!mReleaseJobStarted && frameNumber >= mFrameDelay) {
+ mScheduler.execute(mReleaseThread);
+ mReleaseJobStarted = true;
Log.i(TAG, "Started frame release thread");
}
return true;
@@ -146,29 +175,33 @@
return (int)(System.nanoTime()/1000000);
}
- private void popAndRelease(FrameInfo curFrameInfo, boolean renderThisFrame) {
+ @SuppressWarnings("FutureReturnValueIgnored")
+ private void popAndRelease(boolean renderThisFrame) {
+ final boolean actualRender = (renderThisFrame && mRender);
try {
- curFrameInfo = mFrameInfoQueue.take();
+ final FrameInfo curFrameInfo = mFrameInfoQueue.take();
+
+ CompletableFuture.runAsync(() -> {
+ try {
+ mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
+ } catch (IllegalStateException e) {
+ e.printStackTrace();
+ }
+ });
+
} catch (InterruptedException e) {
Log.e(TAG, "Threw InterruptedException on take");
}
- boolean actualRender = (renderThisFrame && mRender);
- try {
- mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
- } catch (IllegalStateException e) {
- Log.e(TAG,
- "Threw IllegalStateException on releaseOutputBuffer for frame "
- + curFrameInfo.number);
- }
}
public void stopFrameRelease() {
- doFrameRelease = false;
- try {
- mReleaseThread.join();
- Log.i(TAG, "Joined frame release thread");
- } catch (InterruptedException e) {
- Log.e(TAG, "Threw InterruptedException on thread join");
+ doFrameRelease.set(false);
+ while (mFrameInfoQueue.size() > 0) {
+ try {
+ TimeUnit.SECONDS.sleep(1);
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Threw InterruptedException on sleep");
+ }
}
}
}
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 7abb0b6..5b7319a 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -84,6 +84,7 @@
"libprocessinfoservice_aidl",
],
shared_libs: [
+ "com.android.media.audio-aconfig-cc",
"libaudioclient_aidl_conversion",
"libaudioutils", // for clock.h, Statistics.h
"libbase",
@@ -179,3 +180,8 @@
local_include_dirs: ["include"],
export_include_dirs: ["include"],
}
+
+cc_library_headers {
+ name: "mediautils_headers",
+ export_include_dirs: ["include", "."],
+}
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index eef460f..2946398 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -27,6 +27,7 @@
#include <media/AidlConversionUtil.h>
#include <android/content/AttributionSourceState.h>
+#include <com_android_media_audio.h>
#include <iterator>
#include <algorithm>
#include <pwd.h>
@@ -46,6 +47,7 @@
static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
static const String16 sCallAudioInterception("android.permission.CALL_AUDIO_INTERCEPTION");
+static const String16 sAndroidPermissionBluetoothConnect("android.permission.BLUETOOTH_CONNECT");
static String16 resolveCallingPackage(PermissionController& permissionController,
const std::optional<String16> opPackageName, uid_t uid) {
@@ -374,6 +376,52 @@
return NO_ERROR;
}
+/**
+ * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
+ * a native audio service (audio flinger, audio policy) must be anonymized.
+ * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
+ * are not anonymized.
+ *
+ * @param attributionSource The attribution source of the calling app.
+ * @param caller string identifying the caller for logging.
+ * @return true if the MAC addresses must be anonymized, false otherwise.
+ */
+bool mustAnonymizeBluetoothAddress(
+ const AttributionSourceState& attributionSource, const String16& caller) {
+ if (!com::android::media::audio::bluetooth_mac_address_anonymization()) {
+ return false;
+ }
+
+ uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+ if (isAudioServerOrSystemServerUid(uid)) {
+ return false;
+ }
+ const std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource);
+ if (!resolvedAttributionSource.has_value()) {
+ return true;
+ }
+ permission::PermissionChecker permissionChecker;
+ return permissionChecker.checkPermissionForPreflightFromDatasource(
+ sAndroidPermissionBluetoothConnect, resolvedAttributionSource.value(), caller,
+ AppOpsManager::OP_BLUETOOTH_CONNECT)
+ != permission::PermissionChecker::PERMISSION_GRANTED;
+}
+
+/**
+ * Modifies the passed MAC address string in place for consumption by unprivileged clients.
+ * the string is assumed to have a valid MAC address format.
+ * the anonymzation must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
+ *
+ * @param address input/output the char string contining the MAC address to anonymize.
+ */
+void anonymizeBluetoothAddress(char *address) {
+ if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
+ return;
+ }
+ memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
+}
+
sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
const sp<IServiceManager> sm = defaultServiceManager();
if (sm == nullptr) {
diff --git a/media/utils/include/mediautils/ExtendedAccumulator.h b/media/utils/include/mediautils/ExtendedAccumulator.h
index 7e3e170..30045f3 100644
--- a/media/utils/include/mediautils/ExtendedAccumulator.h
+++ b/media/utils/include/mediautils/ExtendedAccumulator.h
@@ -48,9 +48,9 @@
public:
enum class Wrap {
- NORMAL = 0,
- UNDERFLOW = 1,
- OVERFLOW = 2,
+ Normal = 0,
+ Underflow = 1,
+ Overflow = 2,
};
using UnsignedInt = Integral;
@@ -63,11 +63,11 @@
std::pair<SignedInt, Wrap> poll(UnsignedInt value) {
auto acc = mAccumulated.load(std::memory_order_relaxed);
const auto bottom_bits = static_cast<UnsignedInt>(acc);
- std::pair<SignedInt, Wrap> res = {0, Wrap::NORMAL};
+ std::pair<SignedInt, Wrap> res = {0, Wrap::Normal};
const bool overflow = __builtin_sub_overflow(value, bottom_bits, &res.first);
if (overflow) {
- res.second = (res.first > 0) ? Wrap::OVERFLOW : Wrap::UNDERFLOW;
+ res.second = (res.first > 0) ? Wrap::Overflow : Wrap::Underflow;
}
const bool acc_overflow = __builtin_add_overflow(acc, res.first, &acc);
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 3d7981a..0b3a3f9 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -108,6 +108,10 @@
bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
void purgePermissionCache();
+bool mustAnonymizeBluetoothAddress(
+ const AttributionSourceState& attributionSource, const String16& caller);
+void anonymizeBluetoothAddress(char *address);
+
int32_t getOpForSource(audio_source_t source);
AttributionSourceState getCallingAttributionSource();
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 79621e2..4243b9c 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -138,7 +138,7 @@
template <typename Allocator>
class ScopedAllocator {
public:
- static constexpr size_t alignment() { return Allocator::alignment(); }
+ static size_t alignment() { return Allocator::alignment(); }
explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
@@ -218,7 +218,7 @@
template <typename Allocator, typename Policy>
class PolicyAllocator {
public:
- static constexpr size_t alignment() { return Allocator::alignment(); }
+ static size_t alignment() { return Allocator::alignment(); }
PolicyAllocator(Allocator allocator, Policy policy)
: mAllocator(allocator), mPolicy(std::move(policy)) {}
@@ -277,7 +277,7 @@
std::string name;
size_t allocation_number;
};
- static constexpr size_t alignment() { return Allocator::alignment(); }
+ static size_t alignment() { return Allocator::alignment(); }
SnoopingAllocator(Allocator allocator, std::string_view name)
: mName(name), mAllocator(std::move(allocator)) {}
@@ -362,16 +362,19 @@
template <class PrimaryAllocator, class SecondaryAllocator>
class FallbackAllocator {
public:
- static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
- static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+ static size_t alignment() { return PrimaryAllocator::alignment(); }
FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
- : mPrimary(primary), mSecondary(secondary) {}
+ : mPrimary(primary), mSecondary(secondary) {
+ verify_alignment();
+ }
// Default construct primary and secondary allocator
- FallbackAllocator() = default;
+ FallbackAllocator() {
+ verify_alignment();
+ }
template <typename T>
AllocationType allocate(T&& request) {
@@ -414,6 +417,10 @@
}
private:
+ void verify_alignment() {
+ LOG_ALWAYS_FATAL_IF(PrimaryAllocator::alignment() != SecondaryAllocator::alignment(),
+ "PrimaryAllocator::alignment() != SecondaryAllocator::alignment()");
+ }
[[no_unique_address]] PrimaryAllocator mPrimary;
[[no_unique_address]] SecondaryAllocator mSecondary;
};
@@ -423,7 +430,7 @@
template <typename Allocator>
class IndirectAllocator {
public:
- static constexpr size_t alignment() { return Allocator::alignment(); }
+ static size_t alignment() { return Allocator::alignment(); }
explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
: mAllocator(allocator) {}
@@ -449,7 +456,7 @@
// a shared memory mapped anonymous file) as allocations.
class MemoryHeapBaseAllocator {
public:
- static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+ static size_t alignment() { return kPageSize; }
static constexpr unsigned FLAGS = 0; // default flags
template <typename T>
@@ -475,5 +482,7 @@
// allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
static_cast<MemoryHeapBase&>(*heap).dispose();
}
+ private:
+ static inline const size_t kPageSize = getpagesize();
};
} // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 0689083..3fdc6eb 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -200,7 +200,10 @@
name: "timerthread_tests",
defaults: ["libmediautils_tests_defaults"],
-
+ // TODO(b/270180838)
+ test_options: {
+ unit_test: false,
+ },
srcs: [
"TimerThread-test.cpp",
],
diff --git a/media/utils/tests/extended_accumulator_tests.cpp b/media/utils/tests/extended_accumulator_tests.cpp
index e243e7e..2591df0 100644
--- a/media/utils/tests/extended_accumulator_tests.cpp
+++ b/media/utils/tests/extended_accumulator_tests.cpp
@@ -68,10 +68,10 @@
EXPECT_EQ(result, delta);
// Test overflow/underflow event reporting.
- if (next < base) EXPECT_EQ(TestDetect::Wrap::UNDERFLOW, status);
+ if (next < base) EXPECT_EQ(TestDetect::Wrap::Underflow, status);
else if (next > base + std::numeric_limits<TestUInt>::max())
- EXPECT_EQ(TestDetect::Wrap::OVERFLOW, status);
- else EXPECT_EQ(TestDetect::Wrap::NORMAL, status);
+ EXPECT_EQ(TestDetect::Wrap::Overflow, status);
+ else EXPECT_EQ(TestDetect::Wrap::Normal, status);
}
// Test this utility on every combination of prior and update value for the
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
index 11bc72a..c164cbd 100644
--- a/media/utils/tests/shared_memory_allocator_tests.cpp
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -19,21 +19,25 @@
#include <gtest/gtest.h>
#include <mediautils/SharedMemoryAllocator.h>
#include <sys/stat.h>
+#include <unistd.h>
#include <utils/Log.h>
using namespace android;
using namespace android::mediautils;
namespace {
+const size_t kPageSize = getpagesize();
+constexpr size_t kMaxPageSize = 65536; // arm64 supports 4k, 16k and 64k pages
+
void validate_block(const AllocationType& block) {
ASSERT_TRUE(block != nullptr);
- memset(block->unsecurePointer(), 10, 4096);
+ memset(block->unsecurePointer(), 10, kPageSize);
EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
}
template <size_t N = 0, bool FatalOwn = true>
struct ValidateForwarding {
- static constexpr size_t alignment() { return 1337; }
+ static size_t alignment() { return 1337; }
bool owns(const AllocationType& allocation) const {
if (allocation == owned) return true;
@@ -48,9 +52,9 @@
static inline size_t deallocate_all_count = 0;
static inline const AllocationType owned =
- MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+ MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
static inline const AllocationType not_owned =
- MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+ MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
};
@@ -64,17 +68,14 @@
shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
true);
static_assert(
- shared_allocator_impl::has_owns<
- PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
- true);
+ shared_allocator_impl::has_owns<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+ SizePolicy<kMaxPageSize>>> == true);
static_assert(
- shared_allocator_impl::has_dump<
- PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
- true);
-static_assert(
- shared_allocator_impl::has_deallocate_all<
- PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
- true);
+ shared_allocator_impl::has_dump<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+ SizePolicy<kMaxPageSize>>> == true);
+static_assert(shared_allocator_impl::has_deallocate_all<PolicyAllocator<
+ SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<kMaxPageSize>>> ==
+ true);
static_assert(shared_allocator_impl::has_owns<
FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
@@ -93,7 +94,7 @@
const auto memory = allocator.allocate(BasicAllocRequest{500});
ASSERT_TRUE(memory != nullptr);
const auto fd = dup(memory->getMemory()->getHeapID());
- EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+ EXPECT_EQ(memory->size(), static_cast<unsigned>(kPageSize));
EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
validate_block(memory);
allocator.deallocate(memory);
@@ -108,7 +109,7 @@
}
TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
- static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+ ASSERT_EQ(MemoryHeapBaseAllocator::alignment(), kPageSize);
MemoryHeapBaseAllocator allocator;
const auto first_memory = allocator.allocate(BasicAllocRequest{500});
const auto second_memory = allocator.allocate(BasicAllocRequest{500});
@@ -120,8 +121,8 @@
}
TEST(shared_memory_allocator_tests, snooping_allocator) {
- static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
- ValidateForwarding<0>::alignment());
+ ASSERT_EQ(SnoopingAllocator<ValidateForwarding<0>>::alignment(),
+ ValidateForwarding<0>::alignment());
SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
@@ -165,29 +166,29 @@
// TODO generic policy test
TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
PolicyAllocator allocator{MemoryHeapBaseAllocator{},
- SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+ SizePolicy<kMaxPageSize * 7, kMaxPageSize * 2, kMaxPageSize * 4>{}};
// Violate max size
- EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 5}) == nullptr);
// Violate min alloc size
- EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
- const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize}) == nullptr);
+ const auto first_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 4});
validate_block(first_memory);
// Violate pool size
- EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
- const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+ EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 4}) == nullptr);
+ const auto second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 3});
validate_block(second_memory);
allocator.deallocate(second_memory);
// Check pool size update after deallocation
- const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+ const auto new_second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 2});
validate_block(new_second_memory);
}
TEST(shared_memory_allocator_tests, indirect_allocator) {
- static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
- ValidateForwarding<0>::alignment());
+ ASSERT_EQ(IndirectAllocator<ValidateForwarding<0>>::alignment(),
+ ValidateForwarding<0>::alignment());
const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
IndirectAllocator allocator{allocator_handle};
- const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+ const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
EXPECT_TRUE(allocator_handle->owns(memory));
EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
allocator.deallocate(memory);
@@ -199,35 +200,37 @@
// Test appropriate forwarding of allocator, deallocate
const auto primary_allocator =
std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
- PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
- const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+ PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<kMaxPageSize>{}};
+ const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
EXPECT_TRUE(primary_allocator->owns(memory));
const auto& allocations = primary_allocator->getAllocations();
EXPECT_TRUE(allocations.size() == 1);
allocator.deallocate(memory);
EXPECT_TRUE(allocations.size() == 0);
- const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+ const auto memory2 = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation_2"});
EXPECT_TRUE(allocations.size() == 1);
EXPECT_TRUE(primary_allocator->owns(memory2));
allocator.deallocate(memory2);
EXPECT_FALSE(primary_allocator->owns(memory2));
EXPECT_TRUE(allocations.size() == 0);
// Test appropriate forwarding of own, dump, alignment, deallocate_all
- PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+ PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<kMaxPageSize>{}};
EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
- static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+ ASSERT_EQ(decltype(allocator2)::alignment(), ValidateForwarding<0>::alignment());
size_t prev = ValidateForwarding<0>::deallocate_all_count;
allocator2.deallocate_all();
EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
}
TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
- SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
- const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+ SnoopingAllocator allocator{
+ PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<kMaxPageSize * 2>{}}};
+ const auto memory = allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
validate_block(memory);
- ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+ ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{kMaxPageSize * 3}, "allocation_2"}) ==
+ nullptr);
const auto& allocations = allocator.getAllocations();
EXPECT_EQ(allocations.size(), 1ul);
for (const auto& [key, val] : allocations) {
@@ -240,23 +243,26 @@
TEST(shared_memory_allocator_tests, fallback_allocator) {
// Construct Fallback Allocator
const auto primary_allocator = std::make_shared<
- SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
- PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+ SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>>>(
+ PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>{},
+ "primary_allocator");
const auto secondary_allocator =
std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
- static_assert(decltype(fallback_allocator)::alignment() == 4096);
+ ASSERT_EQ(decltype(fallback_allocator)::alignment(), kPageSize);
// Basic Allocation Test
- const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+ const auto memory =
+ fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
validate_block(memory);
// Correct allocator selected
EXPECT_TRUE(fallback_allocator.owns(memory));
EXPECT_TRUE(primary_allocator->owns(memory));
EXPECT_FALSE(secondary_allocator->owns(memory));
// Test fallback allocation
- const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+ const auto memory2 =
+ fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_2"});
validate_block(memory2);
// Correct allocator selected
EXPECT_TRUE(fallback_allocator.owns(memory2));
@@ -276,7 +282,8 @@
EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
// Appropriate fallback after deallocation
- const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+ const auto memory3 =
+ fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_3"});
EXPECT_TRUE(fallback_allocator.owns(memory3));
EXPECT_TRUE(primary_allocator->owns(memory3));
EXPECT_FALSE(secondary_allocator->owns(memory3));
@@ -285,7 +292,8 @@
EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
fallback_allocator.deallocate(memory2);
EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
- const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+ const auto memory4 =
+ fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_4"});
EXPECT_TRUE(fallback_allocator.owns(memory4));
EXPECT_FALSE(primary_allocator->owns(memory4));
EXPECT_TRUE(secondary_allocator->owns(memory4));
@@ -311,7 +319,7 @@
EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
// Test alignment forwarding
- static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+ ASSERT_EQ(decltype(forward_test)::alignment(), Alloc1::alignment());
// Test deallocate_all forwarding
size_t prev1 = Alloc1::deallocate_all_count;
size_t prev2 = Alloc2::deallocate_all_count;
@@ -343,8 +351,8 @@
}
EXPECT_EQ(allocations.size(), 0ul);
// Test forwarding
- static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
- ValidateForwarding<0>::alignment());
+ ASSERT_EQ(ScopedAllocator<ValidateForwarding<0>>::alignment(),
+ ValidateForwarding<0>::alignment());
ScopedAllocator<ValidateForwarding<0>> forwarding{};
EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
}
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 0c878c9..afd28e5 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -138,7 +138,53 @@
],
}
-cc_library_shared {
+cc_defaults {
+ name: "libaudioflinger_dependencies",
+
+ shared_libs: [
+ "audioflinger-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "av-types-aidl-cpp",
+ "effect-aidl-cpp",
+ "libaudioclient_aidl_conversion",
+ "libactivitymanager_aidl",
+ "libaudioflinger_datapath",
+ "libaudioflinger_fastpath",
+ "libaudioflinger_timing",
+ "libaudioflinger_utils",
+ "libaudiofoundation",
+ "libaudiohal",
+ "libaudioprocessing",
+ "libaudioutils",
+ "libcutils",
+ "libutils",
+ "liblog",
+ "libbinder",
+ "libbinder_ndk",
+ "libaudioclient",
+ "libaudiomanager",
+ "libmediametrics",
+ "libmediautils",
+ "libnbaio",
+ "libnblog",
+ "libpermission",
+ "libpowermanager",
+ "libmemunreachable",
+ "libmedia_helper",
+ "libshmemcompat",
+ "libsounddose",
+ "libvibrator",
+ "packagemanager_aidl-cpp",
+ ],
+
+ static_libs: [
+ "libmedialogservice",
+ "libaudiospdif",
+ ],
+}
+
+
+cc_library {
name: "libaudioflinger",
defaults: [
@@ -146,6 +192,7 @@
"latest_android_hardware_audio_core_sounddose_ndk_export_shared_lib_header",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_media_audio_common_types_cpp_shared",
+ "libaudioflinger_dependencies",
],
srcs: [
@@ -165,44 +212,6 @@
"frameworks/av/services/medialog",
],
- shared_libs: [
- "audioflinger-aidl-cpp",
- "audioclient-types-aidl-cpp",
- "av-types-aidl-cpp",
- "effect-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "libactivitymanager_aidl",
- "libaudioflinger_datapath",
- "libaudioflinger_fastpath",
- "libaudioflinger_timing",
- "libaudioflinger_utils",
- "libaudiofoundation",
- "libaudiohal",
- "libaudioprocessing",
- "libaudiospdif",
- "libaudioutils",
- "libcutils",
- "libutils",
- "liblog",
- "libbinder",
- "libbinder_ndk",
- "libaudioclient",
- "libaudiomanager",
- "libmedialogservice",
- "libmediametrics",
- "libmediautils",
- "libnbaio",
- "libnblog",
- "libpermission",
- "libpowermanager",
- "libmemunreachable",
- "libmedia_helper",
- "libshmemcompat",
- "libsounddose",
- "libvibrator",
- "packagemanager_aidl-cpp",
- ],
-
static_libs: [
"libcpustats",
"libpermission",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 1591400..d9162d8 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -879,6 +879,13 @@
write(fd, timeCheckStats.c_str(), timeCheckStats.size());
dprintf(fd, "\n");
}
+ // dump mutex stats
+ const auto mutexStats = audio_utils::mutex::all_stats_to_string();
+ write(fd, mutexStats.c_str(), mutexStats.size());
+
+ // dump held mutexes
+ const auto mutexThreadInfo = audio_utils::mutex::all_threads_to_string();
+ write(fd, mutexThreadInfo.c_str(), mutexThreadInfo.size());
}
return NO_ERROR;
}
@@ -1082,7 +1089,7 @@
input.sharedBuffer, sessionId, &output.flags,
callingPid, adjAttributionSource, input.clientInfo.clientTid,
&lStatus, portId, input.audioTrackCallback, isSpatialized,
- isBitPerfect);
+ isBitPerfect, &output.afTrackFlags);
LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
// we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
@@ -2084,6 +2091,9 @@
}
if (removed) {
removedEffects = purgeStaleEffects_l();
+ std::vector< sp<IAfEffectModule> > removedOrphanEffects = purgeOrphanEffectChains_l();
+ removedEffects.insert(removedEffects.end(), removedOrphanEffects.begin(),
+ removedOrphanEffects.end());
}
}
for (auto& effect : removedEffects) {
@@ -3543,15 +3553,51 @@
return removedEffects;
}
+std::vector< sp<IAfEffectModule> > AudioFlinger::purgeOrphanEffectChains_l()
+{
+ ALOGV("purging stale effects from orphan chains");
+ std::vector< sp<IAfEffectModule> > removedEffects;
+ for (size_t index = 0; index < mOrphanEffectChains.size(); index++) {
+ sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
+ audio_session_t session = mOrphanEffectChains.keyAt(index);
+ if (session == AUDIO_SESSION_OUTPUT_MIX || session == AUDIO_SESSION_DEVICE
+ || session == AUDIO_SESSION_OUTPUT_STAGE) {
+ continue;
+ }
+ size_t numSessionRefs = mAudioSessionRefs.size();
+ bool found = false;
+ for (size_t k = 0; k < numSessionRefs; k++) {
+ AudioSessionRef *ref = mAudioSessionRefs.itemAt(k);
+ if (ref->mSessionid == session) {
+ ALOGV(" session %d still exists for %d with %d refs", session, ref->mPid,
+ ref->mCnt);
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ for (size_t i = 0; i < chain->numberOfEffects(); i++) {
+ sp<IAfEffectModule> effect = chain->getEffectModule(i);
+ removedEffects.push_back(effect);
+ }
+ }
+ }
+ for (auto& effect : removedEffects) {
+ effect->unPin();
+ updateOrphanEffectChains_l(effect);
+ }
+ return removedEffects;
+}
+
// dumpToThreadLog_l() must be called with AudioFlinger::mutex() held
void AudioFlinger::dumpToThreadLog_l(const sp<IAfThreadBase> &thread)
{
constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
- audio_utils::FdToString fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
- const int fd = fdToString.fd();
+ audio_utils::FdToStringOldImpl fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
+ const int fd = fdToString.borrowFdUnsafe();
if (fd >= 0) {
thread->dump(fd, {} /* args */);
- mThreadLog.logs(-1 /* time */, fdToString.getStringAndClose());
+ mThreadLog.logs(-1 /* time */, fdToString.closeAndGetString());
}
}
@@ -4281,24 +4327,42 @@
return lStatus;
}
-status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
- audio_io_handle_t dstOutput)
+status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+ audio_io_handle_t dstIo)
+NO_THREAD_SAFETY_ANALYSIS
{
- ALOGV("%s() session %d, srcOutput %d, dstOutput %d",
- __func__, sessionId, srcOutput, dstOutput);
+ ALOGV("%s() session %d, srcIo %d, dstIo %d", __func__, sessionId, srcIo, dstIo);
audio_utils::lock_guard _l(mutex());
- if (srcOutput == dstOutput) {
- ALOGW("%s() same dst and src outputs %d", __func__, dstOutput);
+ if (srcIo == dstIo) {
+ ALOGW("%s() same dst and src outputs %d", __func__, dstIo);
return NO_ERROR;
}
- IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcOutput);
+ IAfRecordThread* const srcRecordThread = checkRecordThread_l(srcIo);
+ IAfRecordThread* const dstRecordThread = checkRecordThread_l(dstIo);
+ if (srcRecordThread != nullptr || dstRecordThread != nullptr) {
+ if (srcRecordThread != nullptr) {
+ srcRecordThread->mutex().lock();
+ }
+ if (dstRecordThread != nullptr) {
+ dstRecordThread->mutex().lock();
+ }
+ status_t ret = moveEffectChain_ll(sessionId, srcRecordThread, dstRecordThread);
+ if (srcRecordThread != nullptr) {
+ srcRecordThread->mutex().unlock();
+ }
+ if (dstRecordThread != nullptr) {
+ dstRecordThread->mutex().unlock();
+ }
+ return ret;
+ }
+ IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcIo);
if (srcThread == nullptr) {
- ALOGW("%s() bad srcOutput %d", __func__, srcOutput);
+ ALOGW("%s() bad srcIo %d", __func__, srcIo);
return BAD_VALUE;
}
- IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstOutput);
+ IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstIo);
if (dstThread == nullptr) {
- ALOGW("%s() bad dstOutput %d", __func__, dstOutput);
+ ALOGW("%s() bad dstIo %d", __func__, dstIo);
return BAD_VALUE;
}
@@ -4434,6 +4498,48 @@
return status;
}
+
+// moveEffectChain_ll must be called with both srcThread (if not null) and dstThread (if not null)
+// mutex()s held
+status_t AudioFlinger::moveEffectChain_ll(audio_session_t sessionId,
+ IAfRecordThread* srcThread, IAfRecordThread* dstThread)
+{
+ sp<IAfEffectChain> chain = nullptr;
+ if (srcThread != 0) {
+ const Vector<sp<IAfEffectChain>> effectChains = srcThread->getEffectChains_l();
+ for (size_t i = 0; i < effectChains.size(); i ++) {
+ if (effectChains[i]->sessionId() == sessionId) {
+ chain = effectChains[i];
+ break;
+ }
+ }
+ ALOGV_IF(effectChains.size() == 0, "%s: no effect chain on io=%d", __func__,
+ srcThread->id());
+ if (chain == nullptr) {
+ ALOGE("%s wrong session id %d", __func__, sessionId);
+ return BAD_VALUE;
+ }
+ ALOGV("%s: removing effect chain for session=%d io=%d", __func__, sessionId,
+ srcThread->id());
+ srcThread->removeEffectChain_l(chain);
+ } else {
+ chain = getOrphanEffectChain_l(sessionId);
+ if (chain == nullptr) {
+ ALOGE("%s: no orphan effect chain found for session=%d", __func__, sessionId);
+ return BAD_VALUE;
+ }
+ }
+ if (dstThread != 0) {
+ ALOGV("%s: adding effect chain for session=%d on io=%d", __func__, sessionId,
+ dstThread->id());
+ dstThread->addEffectChain_l(chain);
+ return NO_ERROR;
+ }
+ ALOGV("%s: parking to orphan effect chain for session=%d", __func__, sessionId);
+ putOrphanEffectChain_l(chain);
+ return NO_ERROR;
+}
+
status_t AudioFlinger::moveAuxEffectToIo(int EffectId,
const sp<IAfPlaybackThread>& dstThread, sp<IAfPlaybackThread>* srcThread)
{
@@ -4548,6 +4654,11 @@
bool AudioFlinger::updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
{
audio_utils::lock_guard _l(mutex());
+ return updateOrphanEffectChains_l(effect);
+}
+
+bool AudioFlinger::updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect)
+{
audio_session_t session = effect->sessionId();
ssize_t index = mOrphanEffectChains.indexOfKey(session);
ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index b1751da..0f75d6e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -427,7 +427,8 @@
// for as long as possible. The memory is only freed when it is needed for another log writer.
Vector< sp<NBLog::Writer> > mUnregisteredWriters;
audio_utils::mutex& unregisteredWritersMutex() const { return mUnregisteredWritersMutex; }
- mutable audio_utils::mutex mUnregisteredWritersMutex;
+ mutable audio_utils::mutex mUnregisteredWritersMutex{
+ audio_utils::MutexOrder::kAudioFlinger_UnregisteredWritersMutex};
AudioFlinger() ANDROID_API;
~AudioFlinger() override;
@@ -499,7 +500,7 @@
bool mPendingRequests;
// Mutex and condition variable around mPendingRequests' value
- audio_utils::mutex mMutex;
+ audio_utils::mutex mMutex{audio_utils::MutexOrder::kMediaLogNotifier_Mutex};
audio_utils::condition_variable mCondition;
// Duration of the sleep period after a processed request
@@ -553,6 +554,10 @@
// used by IAfDeviceEffectManagerCallback, IAfPatchPanelCallback, IAfThreadCallback
audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) final;
+ status_t moveEffectChain_ll(audio_session_t sessionId,
+ IAfRecordThread* srcThread, IAfRecordThread* dstThread)
+ REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
+
// return thread associated with primary hardware device, or NULL
DeviceTypeSet primaryOutputDevice_l() const REQUIRES(mutex());
@@ -585,6 +590,9 @@
std::vector< sp<IAfEffectModule> > purgeStaleEffects_l() REQUIRES(mutex());
+ std::vector< sp<IAfEffectModule> > purgeOrphanEffectChains_l() REQUIRES(mutex());
+ bool updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect) REQUIRES(mutex());
+
void broadcastParametersToRecordThreads_l(const String8& keyValuePairs) REQUIRES(mutex());
void forwardParametersToDownstreamPatches_l(
audio_io_handle_t upStream, const String8& keyValuePairs,
@@ -601,18 +609,19 @@
int mCnt;
};
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioFlinger_Mutex};
// protects mClients and mNotificationClients.
// must be locked after mutex() and ThreadBase::mutex() if both must be locked
// avoids acquiring AudioFlinger::mutex() from inside thread loop.
- mutable audio_utils::mutex mClientMutex;
+ mutable audio_utils::mutex mClientMutex{audio_utils::MutexOrder::kAudioFlinger_ClientMutex};
DefaultKeyedVector<pid_t, wp<Client>> mClients GUARDED_BY(clientMutex()); // see ~Client()
audio_utils::mutex& hardwareMutex() const { return mHardwareMutex; }
- mutable audio_utils::mutex mHardwareMutex;
+ mutable audio_utils::mutex mHardwareMutex{
+ audio_utils::MutexOrder::kAudioFlinger_HardwareMutex};
// NOTE: If both mMutex and mHardwareMutex mutexes must be held,
// always take mMutex before mHardwareMutex
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 0a7be75..201d147 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -60,18 +60,37 @@
__func__, handle, patch.mHalHandle,
patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
audio_utils::lock_guard _l(mutex());
- for (auto& effect : mDeviceEffects) {
- status_t status = effect.second->onCreatePatch(handle, patch);
- ALOGV("%s Effect onCreatePatch status %d", __func__, status);
- ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+ for (auto& effectProxies : mDeviceEffects) {
+ for (auto& effect : effectProxies.second) {
+ const status_t status = effect->onCreatePatch(handle, patch);
+ ALOGV("%s Effect onCreatePatch status %d", __func__, status);
+ ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+ }
}
}
void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
ALOGV("%s", __func__);
audio_utils::lock_guard _l(mutex());
- for (auto& effect : mDeviceEffects) {
- effect.second->onReleasePatch(handle);
+ for (auto& effectProxies : mDeviceEffects) {
+ for (auto& effect : effectProxies.second) {
+ effect->onReleasePatch(handle);
+ }
+ }
+}
+
+void DeviceEffectManager::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+ ALOGV("%s oldhandle %d newHandle %d mHalHandle %d device sink %08x",
+ __func__, oldHandle, newHandle, patch.mHalHandle,
+ patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+ audio_utils::lock_guard _l(mutex());
+ for (auto& effectProxies : mDeviceEffects) {
+ for (auto& effect : effectProxies.second) {
+ const status_t status = effect->onUpdatePatch(oldHandle, newHandle, patch);
+ ALOGV("%s Effect onUpdatePatch status %d", __func__, status);
+ ALOGW_IF(status != NO_ERROR, "%s onUpdatePatch error %d", __func__, status);
+ }
}
}
@@ -87,6 +106,7 @@
bool probe,
bool notifyFramesProcessed) {
sp<IAfDeviceEffectProxy> effect;
+ std::vector<sp<IAfDeviceEffectProxy>> effectsForDevice = {};
sp<IAfEffectHandle> handle;
status_t lStatus;
@@ -100,12 +120,21 @@
audio_utils::lock_guard _l(mutex());
auto iter = mDeviceEffects.find(device);
if (iter != mDeviceEffects.end()) {
- effect = iter->second;
- } else {
+ effectsForDevice = iter->second;
+ for (const auto& iterEffect : effectsForDevice) {
+ if (memcmp(&iterEffect->desc().uuid, &descriptor->uuid, sizeof(effect_uuid_t)) ==
+ 0) {
+ effect = iterEffect;
+ break;
+ }
+ }
+ }
+ if (effect == nullptr) {
effect = IAfDeviceEffectProxy::create(device, mMyCallback,
descriptor,
mAfDeviceEffectManagerCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT),
notifyFramesProcessed);
+ effectsForDevice.push_back(effect);
}
// create effect handle and connect it to effect module
handle = IAfEffectHandle::create(
@@ -119,7 +148,8 @@
lStatus = NO_ERROR;
}
if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
- mDeviceEffects.emplace(device, effect);
+ mDeviceEffects.erase(device);
+ mDeviceEffects.emplace(device, effectsForDevice);
}
}
}
@@ -187,8 +217,10 @@
String8 outStr;
outStr.appendFormat("%*sEffect for device %s address %s:\n", 2, "",
::android::toString(iter.first.mType).c_str(), iter.first.getAddress());
- write(fd, outStr.c_str(), outStr.size());
- iter.second->dump2(fd, 4);
+ for (const auto& effect : iter.second) {
+ write(fd, outStr.c_str(), outStr.size());
+ effect->dump2(fd, 4);
+ }
}
if (locked) {
@@ -199,7 +231,20 @@
size_t DeviceEffectManager::removeEffect(const sp<IAfDeviceEffectProxy>& effect)
{
audio_utils::lock_guard _l(mutex());
- mDeviceEffects.erase(effect->device());
+ const auto& iter = mDeviceEffects.find(effect->device());
+ if (iter != mDeviceEffects.end()) {
+ const auto& iterEffect = std::find_if(
+ iter->second.begin(), iter->second.end(), [&effect](const auto& effectProxy) {
+ return memcmp(&effectProxy->desc().uuid, &effect->desc().uuid,
+ sizeof(effect_uuid_t)) == 0;
+ });
+ if (iterEffect != iter->second.end()) {
+ iter->second.erase(iterEffect);
+ if (iter->second.empty()) {
+ mDeviceEffects.erase(effect->device());
+ }
+ }
+ }
return mDeviceEffects.size();
}
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 6a5c889..7045c8b 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -75,6 +75,9 @@
EXCLUDES_DeviceEffectManager_Mutex;
void onReleaseAudioPatch(audio_patch_handle_t handle) final
EXCLUDES_DeviceEffectManager_Mutex;
+ void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) final
+ EXCLUDES_DeviceEffectManager_Mutex;
private:
static status_t checkEffectCompatibility(const effect_descriptor_t *desc);
@@ -82,10 +85,11 @@
audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::DeviceEffectManager_Mutex) {
return mMutex;
}
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kDeviceEffectManager_Mutex};
const sp<IAfDeviceEffectManagerCallback> mAfDeviceEffectManagerCallback;
const sp<DeviceEffectManagerCallback> mMyCallback;
- std::map<AudioDeviceTypeAddr, sp<IAfDeviceEffectProxy>> mDeviceEffects GUARDED_BY(mutex());
+ std::map<AudioDeviceTypeAddr, std::vector<sp<IAfDeviceEffectProxy>>>
+ mDeviceEffects GUARDED_BY(mutex());
};
class DeviceEffectManagerCallback : public EffectCallbackInterface {
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 95fed5b..73a89e5 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -514,11 +514,12 @@
if (!locked) {
result.append("\t\tCould not lock Fx mutex:\n");
}
-
- result.append("\t\tSession State Registered Enabled Suspended:\n");
- result.appendFormat("\t\t%05d %03d %s %s %s\n",
- mSessionId, mState, mPolicyRegistered ? "y" : "n",
- mPolicyEnabled ? "y" : "n", mSuspended ? "y" : "n");
+ bool isInternal = isInternal_l();
+ result.append("\t\tSession State Registered Internal Enabled Suspended:\n");
+ result.appendFormat("\t\t%05d %03d %s %s %s %s\n",
+ mSessionId, mState, mPolicyRegistered ? "y" : "n", isInternal ? "y" : "n",
+ ((isInternal && isEnabled()) || (!isInternal && mPolicyEnabled)) ? "y" : "n",
+ mSuspended ? "y" : "n");
result.append("\t\tDescriptor:\n");
char uuidStr[64];
@@ -1004,8 +1005,9 @@
// mConfig.outputCfg.buffer.frameCount cannot be zero.
mMaxDisableWaitCnt = (uint32_t)std::max(
(uint64_t)1, // mMaxDisableWaitCnt must be greater than zero.
- (uint64_t)MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
- / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount));
+ (uint64_t)mConfig.outputCfg.buffer.frameCount == 0 ? 1
+ : (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
+ / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount)));
exit:
// TODO: consider clearing mConfig on error.
@@ -3323,6 +3325,23 @@
return status;
}
+status_t DeviceEffectProxy::onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+ audio_patch_handle_t newPatchHandle,
+ const IAfPatchPanel::Patch& /* patch */) {
+ status_t status = NAME_NOT_FOUND;
+ ALOGV("%s", __func__);
+ audio_utils::lock_guard _l(proxyMutex());
+ if (mEffectHandles.find(oldPatchHandle) != mEffectHandles.end()) {
+ ALOGV("%s replacing effect from handle %d to handle %d", __func__, oldPatchHandle,
+ newPatchHandle);
+ sp<IAfEffectHandle> effect = mEffectHandles.at(oldPatchHandle);
+ mEffectHandles.erase(oldPatchHandle);
+ mEffectHandles.emplace(newPatchHandle, effect);
+ status = NO_ERROR;
+ }
+ return status;
+}
+
status_t DeviceEffectProxy::onCreatePatch(
audio_patch_handle_t patchHandle, const IAfPatchPanel::Patch& patch) {
status_t status = NAME_NOT_FOUND;
@@ -3336,6 +3355,9 @@
}
if (status == NO_ERROR || status == ALREADY_EXISTS) {
audio_utils::lock_guard _l(proxyMutex());
+ size_t erasedHandle = mEffectHandles.erase(patchHandle);
+ ALOGV("%s %s effecthandle %p for patch %d",
+ __func__, (erasedHandle == 0 ? "adding" : "replacing"), handle.get(), patchHandle);
mEffectHandles.emplace(patchHandle, handle);
}
ALOGW_IF(status == BAD_VALUE,
@@ -3371,18 +3393,21 @@
if (mDescriptor.flags & EFFECT_FLAG_HW_ACC_TUNNEL) {
audio_utils::lock_guard _l(proxyMutex());
- mDevicePort = *port;
- mHalEffect = new EffectModule(mMyCallback,
+ if (mHalEffect != nullptr && mDevicePort.id == port->id) {
+ ALOGV("%s reusing HAL effect", __func__);
+ } else {
+ mDevicePort = *port;
+ mHalEffect = new EffectModule(mMyCallback,
const_cast<effect_descriptor_t *>(&mDescriptor),
mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
false /* pinned */, port->id);
- if (audio_is_input_device(mDevice.mType)) {
- mHalEffect->setInputDevice(mDevice);
- } else {
- mHalEffect->setDevices({mDevice});
+ if (audio_is_input_device(mDevice.mType)) {
+ mHalEffect->setInputDevice(mDevice);
+ } else {
+ mHalEffect->setDevices({mDevice});
+ }
+ mHalEffect->configure();
}
- mHalEffect->configure();
-
*handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
mNotifyFramesProcessed);
status = (*handle)->initCheck();
@@ -3470,6 +3495,23 @@
return mManagerCallback->removeEffectFromHal(&mDevicePort, effect);
}
+status_t DeviceEffectProxy::command(
+ int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+ std::vector<uint8_t>* reply) {
+ audio_utils::lock_guard _l(proxyMutex());
+ status_t status = EffectBase::command(cmdCode, cmdData, maxReplySize, reply);
+ if (status == NO_ERROR) {
+ for (auto& handle : mEffectHandles) {
+ sp<IAfEffectBase> effect = handle.second->effect().promote();
+ if (effect != nullptr) {
+ status = effect->command(cmdCode, cmdData, maxReplySize, reply);
+ }
+ }
+ }
+ ALOGV("%s status %d", __func__, status);
+ return status;
+}
+
bool DeviceEffectProxy::isOutput() const {
if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE) {
return mDevicePort.role == AUDIO_PORT_ROLE_SINK;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 8869b69..9208c88 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -135,7 +135,7 @@
DISALLOW_COPY_AND_ASSIGN(EffectBase);
// mutex for process, commands and handles list protection
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectBase_Mutex};
mediautils::atomic_sp<EffectCallbackInterface> mCallback; // parent effect chain
const int mId; // this instance unique ID
const audio_session_t mSessionId; // audio session ID
@@ -151,7 +151,7 @@
// Mutex protecting transactions with audio policy manager as mutex() cannot
// be held to avoid cross deadlocks with audio policy mutex
audio_utils::mutex& policyMutex() const { return mPolicyMutex; }
- mutable audio_utils::mutex mPolicyMutex;
+ mutable audio_utils::mutex mPolicyMutex{audio_utils::MutexOrder::kEffectBase_PolicyMutex};
// Effect is registered in APM or not
bool mPolicyRegistered = false;
// Effect enabled state communicated to APM. Enabled state corresponds to
@@ -367,7 +367,8 @@
DISALLOW_COPY_AND_ASSIGN(EffectHandle);
audio_utils::mutex& mutex() const { return mMutex; }
- mutable audio_utils::mutex mMutex; // protects IEffect method calls
+ // protects IEffect method calls
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
const wp<IAfEffectBase> mEffect; // pointer to controlled EffectModule
const sp<media::IEffectClient> mEffectClient; // callback interface for client notifications
/*const*/ sp<Client> mClient; // client for shared memory allocation, see
@@ -625,7 +626,8 @@
std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const;
- mutable audio_utils::mutex mMutex; // mutex protecting effect list
+ // mutex protecting effect list
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectChain_Mutex};
Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
audio_session_t mSessionId; // audio session ID
sp<EffectBufferHalInterface> mInBuffer; // chain input buffer
@@ -672,6 +674,9 @@
status_t onCreatePatch(audio_patch_handle_t patchHandle,
const IAfPatchPanel::Patch& patch) final;
+ status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
+ const IAfPatchPanel::Patch& patch) final;
+
void onReleasePatch(audio_patch_handle_t patchHandle) final;
size_t removeEffect(const sp<IAfEffectModule>& effect) final;
@@ -685,6 +690,11 @@
audio_channel_mask_t channelMask() const final;
uint32_t channelCount() const final;
+ status_t command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxReplySize,
+ std::vector<uint8_t>* reply) final;
+
void dump2(int fd, int spaces) const final;
private:
@@ -756,7 +766,8 @@
const sp<ProxyCallback> mMyCallback;
audio_utils::mutex& proxyMutex() const { return mProxyMutex; }
- mutable audio_utils::mutex mProxyMutex;
+ mutable audio_utils::mutex mProxyMutex{
+ audio_utils::MutexOrder::kDeviceEffectProxy_ProxyMutex};
std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyMutex
sp<IAfEffectModule> mHalEffect; // protected by mProxyMutex
struct audio_port_config mDevicePort = { .id = AUDIO_PORT_HANDLE_NONE };
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index ea0c6d9..8c5bc4b 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -125,6 +125,11 @@
virtual sp<IAfEffectModule> asEffectModule() = 0;
virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
+ virtual status_t command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxReplySize,
+ std::vector<uint8_t>* reply) = 0;
+
virtual void dump(int fd, const Vector<String16>& args) const = 0;
private:
@@ -133,11 +138,6 @@
virtual void setSuspended(bool suspended) = 0;
virtual bool suspended() const = 0;
- virtual status_t command(int32_t cmdCode,
- const std::vector<uint8_t>& cmdData,
- int32_t maxReplySize,
- std::vector<uint8_t>* reply) = 0;
-
virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
virtual IAfEffectHandle* controlHandle_l() = 0;
@@ -360,6 +360,9 @@
virtual status_t onCreatePatch(
audio_patch_handle_t patchHandle,
const IAfPatchPanel::Patch& patch) = 0;
+ virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+ audio_patch_handle_t newPatchHandle,
+ const IAfPatchPanel::Patch& patch) = 0;
virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 5a7429d..7084be9 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -455,7 +455,8 @@
audio_port_handle_t portId,
const sp<media::IAudioTrackCallback>& callback,
bool isSpatialized,
- bool isBitPerfect)
+ bool isBitPerfect,
+ audio_output_flags_t* afTrackFlags)
REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index ef932ec..41c5096 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -30,7 +30,7 @@
bool MelReporter::activateHalSoundDoseComputation(const std::string& module,
const sp<DeviceHalInterface>& device) {
- if (mSoundDoseManager->forceUseFrameworkMel()) {
+ if (mSoundDoseManager->isFrameworkMelForced()) {
ALOGD("%s: Forcing use of internal MEL computation.", __func__);
activateInternalSoundDoseComputation();
return false;
@@ -233,7 +233,17 @@
audio_utils::lock_guard _afl(mAfMelReporterCallback->mutex()); // AudioFlinger_Mutex
audio_utils::lock_guard _l(mutex());
- stopMelComputationForPatch_l(melPatch);
+ if (melPatch.csdActive) {
+ // only need to stop if patch was active
+ melPatch.csdActive = false;
+ stopMelComputationForPatch_l(melPatch);
+ }
+}
+
+void MelReporter::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+ onReleaseAudioPatch(oldHandle);
+ onCreateAudioPatch(newHandle, patch);
}
sp<media::ISoundDose> MelReporter::getSoundDoseInterface(
@@ -308,7 +318,7 @@
}
bool MelReporter::useHalSoundDoseInterface_l() {
- return !mSoundDoseManager->forceUseFrameworkMel() & mUseHalSoundDoseInterface;
+ return !mSoundDoseManager->isFrameworkMelForced() & mUseHalSoundDoseInterface;
}
std::string MelReporter::dump() {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index ce89b24..235dd11 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -89,6 +89,9 @@
const IAfPatchPanel::Patch& patch) final
EXCLUDES_AudioFlinger_Mutex;
void onReleaseAudioPatch(audio_patch_handle_t handle) final EXCLUDES_AudioFlinger_Mutex;
+ void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch) final EXCLUDES_AudioFlinger_Mutex;
/**
* The new metadata can determine whether we should compute MEL for the given thread.
@@ -135,7 +138,7 @@
* Lock for protecting the active mel patches. Do not mix with the AudioFlinger lock.
* Locking order AudioFlinger::mutex() -> PatchCommandThread::mutex() -> MelReporter::mutex().
*/
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kMelReporter_Mutex};
std::unordered_map<audio_patch_handle_t, ActiveMelPatch> mActiveMelPatches
GUARDED_BY(mutex());
std::unordered_map<audio_port_handle_t, int> mActiveDevices GUARDED_BY(mutex());
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
index f4c76d6..2cfefa0 100644
--- a/services/audioflinger/PatchCommandThread.cpp
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -58,6 +58,16 @@
releaseAudioPatchCommand(handle);
}
+void PatchCommandThread::updateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+ ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+ __func__, oldHandle, patch.mHalHandle,
+ patch.mAudioPatch.num_sinks,
+ patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+ updateAudioPatchCommand(oldHandle, newHandle, patch);
+}
+
bool PatchCommandThread::threadLoop()
{
audio_utils::unique_lock _l(mutex());
@@ -103,6 +113,21 @@
}
}
break;
+ case UPDATE_AUDIO_PATCH: {
+ const auto data = (UpdateAudioPatchData*) command->mData.get();
+ ALOGV("%s processing update audio patch old handle %d new handle %d",
+ __func__,
+ data->mOldHandle, data->mNewHandle);
+
+ for (const auto& listener : listenersCopy) {
+ auto spListener = listener.promote();
+ if (spListener) {
+ spListener->onUpdateAudioPatch(data->mOldHandle,
+ data->mNewHandle, data->mPatch);
+ }
+ }
+ }
+ break;
default:
ALOGW("%s unknown command %d", __func__, command->mCommand);
break;
@@ -144,6 +169,16 @@
sendCommand(command);
}
+void PatchCommandThread::updateAudioPatchCommand(
+ audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch) {
+ sp<Command> command = sp<Command>::make(UPDATE_AUDIO_PATCH,
+ new UpdateAudioPatchData(oldHandle, newHandle, patch));
+ ALOGV("%s adding update patch old handle %d new handle %d mHalHandle %d.",
+ __func__, oldHandle, newHandle, patch.mHalHandle);
+ sendCommand(command);
+}
+
void PatchCommandThread::exit() {
ALOGV("%s", __func__);
{
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
index 8ca96f1..c316d8a 100644
--- a/services/audioflinger/PatchCommandThread.h
+++ b/services/audioflinger/PatchCommandThread.h
@@ -38,6 +38,7 @@
enum {
CREATE_AUDIO_PATCH,
RELEASE_AUDIO_PATCH,
+ UPDATE_AUDIO_PATCH,
};
class PatchCommandListener : public virtual RefBase {
@@ -45,6 +46,9 @@
virtual void onCreateAudioPatch(audio_patch_handle_t handle,
const IAfPatchPanel::Patch& patch) = 0;
virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+ virtual void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch) = 0;
};
PatchCommandThread() : Thread(false /* canCallJava */) {}
@@ -56,6 +60,8 @@
void createAudioPatch(audio_patch_handle_t handle, const IAfPatchPanel::Patch& patch)
EXCLUDES_PatchCommandThread_Mutex;
void releaseAudioPatch(audio_patch_handle_t handle) EXCLUDES_PatchCommandThread_Mutex;
+ void updateAudioPatch(audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
// Thread virtuals
void onFirstRef() override;
@@ -66,6 +72,9 @@
void createAudioPatchCommand(audio_patch_handle_t handle,
const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
void releaseAudioPatchCommand(audio_patch_handle_t handle) EXCLUDES_PatchCommandThread_Mutex;
+ void updateAudioPatchCommand(audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
+
private:
class CommandData;
@@ -99,6 +108,18 @@
audio_patch_handle_t mHandle;
};
+ class UpdateAudioPatchData : public CommandData {
+ public:
+ UpdateAudioPatchData(audio_patch_handle_t oldHandle,
+ audio_patch_handle_t newHandle,
+ const IAfPatchPanel::Patch& patch)
+ : mOldHandle(oldHandle), mNewHandle(newHandle), mPatch(patch) {}
+
+ const audio_patch_handle_t mOldHandle;
+ const audio_patch_handle_t mNewHandle;
+ const IAfPatchPanel::Patch mPatch;
+ };
+
void sendCommand(const sp<Command>& command) EXCLUDES_PatchCommandThread_Mutex;
audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::PatchCommandThread_Mutex) {
@@ -109,11 +130,12 @@
return mListenerMutex;
}
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kPatchCommandThread_Mutex};
audio_utils::condition_variable mWaitWorkCV;
std::deque<sp<Command>> mCommands GUARDED_BY(mutex()); // list of pending commands
- mutable audio_utils::mutex mListenerMutex;
+ mutable audio_utils::mutex mListenerMutex{
+ audio_utils::MutexOrder::kPatchCommandThread_ListenerMutex};
std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(listenerMutex());
};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 17591dd..4333cc8 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -134,7 +134,8 @@
if (patch->num_sources > 2) {
return INVALID_OPERATION;
}
-
+ bool reuseExistingHalPatch = false;
+ audio_patch_handle_t oldhandle = AUDIO_PATCH_HANDLE_NONE;
if (*handle != AUDIO_PATCH_HANDLE_NONE) {
auto iter = mPatches.find(*handle);
if (iter != mPatches.end()) {
@@ -152,6 +153,7 @@
if (removedPatch.mHalHandle != AUDIO_PATCH_HANDLE_NONE) {
audio_module_handle_t hwModule = AUDIO_MODULE_HANDLE_NONE;
const struct audio_patch &oldPatch = removedPatch.mAudioPatch;
+ oldhandle = *handle;
if (oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
(patch->sources[0].type != AUDIO_PORT_TYPE_DEVICE ||
oldPatch.sources[0].ext.device.hw_module !=
@@ -174,8 +176,12 @@
hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
}
halHandle = removedPatch.mHalHandle;
+ // Prevent to remove/add device effect when mix / device did not change, and
+ // hal patch has not been released
+ // Note that no patch leak at hal layer as halHandle is reused.
+ reuseExistingHalPatch = (hwDevice == 0) && patchesHaveSameRoute(*patch, oldPatch);
}
- erasePatch(*handle);
+ erasePatch(*handle, reuseExistingHalPatch);
}
}
@@ -406,7 +412,23 @@
mAfPatchPanelCallback->updateOutDevicesForRecordThreads_l(devices);
}
+ // For endpoint patches, we do not need to re-evaluate the device effect state
+ // if the same HAL patch is reused (see calls to mAfPatchPanelCallback below)
+ if (endpointPatch) {
+ for (auto& p : mPatches) {
+ // end point patches are skipped so we do not compare against this patch
+ if (!p.second.mIsEndpointPatch && patchesHaveSameRoute(
+ newPatch.mAudioPatch, p.second.mAudioPatch)) {
+ ALOGV("%s() Sw Bridge endpoint reusing halHandle=%d", __func__,
+ p.second.mHalHandle);
+ halHandle = p.second.mHalHandle;
+ reuseExistingHalPatch = true;
+ break;
+ }
+ }
+ }
mAfPatchPanelCallback->mutex().unlock();
+
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
mAfPatchPanelCallback->mutex().lock();
if (status == NO_ERROR) {
@@ -436,7 +458,19 @@
*handle = static_cast<audio_patch_handle_t>(
mAfPatchPanelCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH));
newPatch.mHalHandle = halHandle;
- mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(*handle, newPatch);
+ // Skip device effect:
+ // -for sw bridge as effect are likely held by endpoint patches
+ // -for endpoint reusing a HalPatch handle
+ if (!(newPatch.isSoftware()
+ || (endpointPatch && reuseExistingHalPatch))) {
+ if (reuseExistingHalPatch) {
+ mAfPatchPanelCallback->getPatchCommandThread()->updateAudioPatch(
+ oldhandle, *handle, newPatch);
+ } else {
+ mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(
+ *handle, newPatch);
+ }
+ }
if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
addSoftwarePatchToInsertedModules_l(insertedModule, *handle, &newPatch.mAudioPatch);
}
@@ -741,12 +775,14 @@
{
ALOGV("%s handle %d", __func__, handle);
status_t status = NO_ERROR;
+ bool doReleasePatch = true;
auto iter = mPatches.find(handle);
if (iter == mPatches.end()) {
return BAD_VALUE;
}
Patch &removedPatch = iter->second;
+ const bool isSwBridge = removedPatch.isSoftware();
const struct audio_patch &patch = removedPatch.mAudioPatch;
const struct audio_port_config &src = patch.sources[0];
@@ -798,22 +834,40 @@
break;
}
}
- mAfPatchPanelCallback->mutex().unlock();
- status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
- mAfPatchPanelCallback->mutex().lock();
+ // Check whether the removed patch Hal Handle is used in another non-Endpoint patch.
+ // Since this is a non-Endpoint patch, the removed patch is not considered (it is
+ // removed later from mPatches).
+ if (removedPatch.mIsEndpointPatch) {
+ for (auto& p: mPatches) {
+ if (!p.second.mIsEndpointPatch
+ && p.second.mHalHandle == removedPatch.mHalHandle) {
+ ALOGV("%s() Sw Bridge endpoint used existing halHandle=%d, do not release",
+ __func__, p.second.mHalHandle);
+ doReleasePatch = false;
+ break;
+ }
+ }
+ }
+ if (doReleasePatch) {
+ mAfPatchPanelCallback->mutex().unlock();
+ status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+ mAfPatchPanelCallback->mutex().lock();
+ }
} break;
default:
status = BAD_VALUE;
}
- erasePatch(handle);
+ erasePatch(handle, /* reuseExistingHalPatch= */ !doReleasePatch || isSwBridge);
return status;
}
-void PatchPanel::erasePatch(audio_patch_handle_t handle) {
+void PatchPanel::erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch) {
mPatches.erase(handle);
removeSoftwarePatchFromInsertedModules(handle);
- mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+ if (!reuseExistingHalPatch) {
+ mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+ }
}
/* List connected audio ports and they attributes */
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index b107eb0..f84b40e 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -89,7 +89,38 @@
const struct audio_patch *patch)
REQUIRES(audio_utils::AudioFlinger_Mutex);
void removeSoftwarePatchFromInsertedModules(audio_patch_handle_t handle);
- void erasePatch(audio_patch_handle_t handle);
+ /**
+ * erase the patch referred by its handle.
+ * @param handle of the patch to be erased
+ * @param reuseExistingHalPatch if set, do not trig the callback of listeners, listener
+ * would receive instead a onUpdateAudioPatch when the patch will be recreated.
+ * It prevents for example DeviceEffectManager to spuriously remove / add a device on an already
+ * opened input / output mix.
+ */
+ void erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch = false);
+
+ /**
+ * Returns true if the old and new patches passed as arguments describe the same
+ * connections between the first sink and the first source
+ * @param oldPatch previous patch
+ * @param newPatch new patch
+ * @return true if the route is unchanged between the old and new patch, false otherwise
+ */
+ inline bool patchesHaveSameRoute(
+ const struct audio_patch &newPatch, const struct audio_patch &oldPatch) const {
+ return (newPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+ oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+ newPatch.sources[0].id == oldPatch.sources[0].id &&
+ newPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+ oldPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+ newPatch.sinks[0].ext.mix.handle == oldPatch.sinks[0].ext.mix.handle) ||
+ (newPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+ oldPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+ newPatch.sinks[0].id == oldPatch.sinks[0].id &&
+ newPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+ oldPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+ newPatch.sources[0].ext.mix.handle == oldPatch.sources[0].ext.mix.handle);
+ }
const sp<IAfPatchPanelCallback> mAfPatchPanelCallback;
std::map<audio_patch_handle_t, Patch> mPatches;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index ae60ed0..4a1948c 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -63,8 +63,8 @@
wp<IAfThreadBase> mThread;
std::atomic_bool mHasOpPlayAudio;
- const AttributionSourceState mAttributionSource;
- const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
+ const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as
+ // int32_t
const int mId; // for logging purposes only
const uid_t mUid;
const String16 mPackageName;
@@ -470,7 +470,8 @@
SourceMetadatas mTrackMetadatas;
/** Protects mTrackMetadatas against concurrent access. */
audio_utils::mutex& trackMetadataMutex() const { return mTrackMetadataMutex; }
- mutable audio_utils::mutex mTrackMetadataMutex;
+ mutable audio_utils::mutex mTrackMetadataMutex{
+ audio_utils::MutexOrder::kOutputTrack_TrackMetadataMutex};
}; // end of OutputTrack
// playback track, used by PatchPanel
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 8d3de38..3de9968 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -221,7 +221,8 @@
std::unique_ptr<void, decltype(free)*> mSinkBuffer; // frame size aligned continuous buffer
std::unique_ptr<void, decltype(free)*> mStubBuffer; // buffer used for AudioBufferProvider
size_t mUnconsumedFrames = 0;
- mutable audio_utils::mutex mReadMutex;
+ mutable audio_utils::mutex mReadMutex{
+ audio_utils::MutexOrder::kPassthruPatchRecord_ReadMutex};
audio_utils::condition_variable mReadCV;
size_t mReadBytes = 0; // GUARDED_BY(readMutex())
status_t mReadError = NO_ERROR; // GUARDED_BY(readMutex())
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index f75790e..c4b41fd 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -85,6 +85,7 @@
#include <utils/Trace.h>
#include <fcntl.h>
+#include <future>
#include <linux/futex.h>
#include <math.h>
#include <memory>
@@ -2378,7 +2379,8 @@
audio_port_handle_t portId,
const sp<media::IAudioTrackCallback>& callback,
bool isSpatialized,
- bool isBitPerfect)
+ bool isBitPerfect,
+ audio_output_flags_t *afTrackFlags)
{
size_t frameCount = *pFrameCount;
size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2697,6 +2699,7 @@
if (mType == DIRECT) {
trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
}
+ *afTrackFlags = trackFlags;
track = IAfTrack::create(this, client, streamType, attr, sampleRate, format,
channelMask, frameCount,
@@ -3897,15 +3900,25 @@
{
aflog::setThreadWriter(mNBLogWriter.get());
+ std::future<void> priorityBoostFuture; // joined on dtor; this is a one-shot boost.
if (mType == SPATIALIZER) {
const pid_t tid = getTid();
if (tid == -1) { // odd: we are here, we must be a running thread.
ALOGW("%s: Cannot update Spatializer mixer thread priority, no tid", __func__);
} else {
- const int priorityBoost = requestSpatializerPriority(getpid(), tid);
- if (priorityBoost > 0) {
- stream()->setHalThreadPriority(priorityBoost);
- }
+ // We launch the priority boost request in a separate thread because
+ // the SchedulingPolicyService may not be available during early
+ // boot time, with a wait causing boot delay.
+ // There is also a PrioConfigEvent that does this, but it will also
+ // block other config events. This command should be able
+ // to run concurrent with other stream commands.
+ priorityBoostFuture = std::async(std::launch::async,
+ [tid, output_sp = stream()]() {
+ const int priorityBoost = requestSpatializerPriority(getpid(), tid);
+ if (priorityBoost > 0) {
+ output_sp->setHalThreadPriority(priorityBoost);
+ }
+ });
}
}
@@ -8179,6 +8192,12 @@
case IAfTrackBase::PAUSING:
mActiveTracks.remove(activeTrack);
activeTrack->setState(IAfTrackBase::PAUSED);
+ if (activeTrack->isFastTrack()) {
+ ALOGV("%s fast track is paused, thus removed from active list", __func__);
+ // Keep a ref on fast track to wait for FastCapture thread to get updated
+ // state before potential track removal
+ fastTrackToRemove = activeTrack;
+ }
doBroadcast = true;
size--;
continue;
@@ -10289,7 +10308,7 @@
NO_THREAD_SAFETY_ANALYSIS // clang bug
{
ALOGV("%s", __FUNCTION__);
- audio_utils::lock_guard(mutex());
+ audio_utils::lock_guard l_{mutex()};
if (mHalStream == 0) {
return NO_INIT;
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index b84079a..a5afdd8 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -112,7 +112,8 @@
return mMutex;
}
const int mType; // event type e.g. CFG_EVENT_IO
- mutable audio_utils::mutex mMutex; // mutex associated with mCondition
+ // mutex associated with mCondition
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kConfigEvent_Mutex};
audio_utils::condition_variable mCondition; // condition for status return
// NO_THREAD_SAFETY_ANALYSIS Can we add GUARDED_BY?
@@ -537,7 +538,7 @@
audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::ThreadBase_Mutex) {
return mMutex;
}
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kThreadBase_Mutex};
void onEffectEnable(const sp<IAfEffectModule>& effect) final EXCLUDES_ThreadBase_Mutex;
void onEffectDisable() final EXCLUDES_ThreadBase_Mutex;
@@ -983,7 +984,8 @@
audio_port_handle_t portId,
const sp<media::IAudioTrackCallback>& callback,
bool isSpatialized,
- bool isBitPerfect) final
+ bool isBitPerfect,
+ audio_output_flags_t* afTrackFlags) final
REQUIRES(audio_utils::AudioFlinger_Mutex);
bool isTrackActive(const sp<IAfTrack>& track) const final {
@@ -1453,7 +1455,8 @@
sp<AsyncCallbackThread> mCallbackThread;
audio_utils::mutex& audioTrackCbMutex() const { return mAudioTrackCbMutex; }
- mutable audio_utils::mutex mAudioTrackCbMutex;
+ mutable audio_utils::mutex mAudioTrackCbMutex{
+ audio_utils::MutexOrder::kPlaybackThread_AudioTrackCbMutex};
// Record of IAudioTrackCallback
std::map<sp<IAfTrack>, sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
@@ -1808,7 +1811,7 @@
// to indicate that the callback has been received via resetDraining()
uint32_t mDrainSequence;
audio_utils::condition_variable mWaitWorkCV;
- mutable audio_utils::mutex mMutex;
+ mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
bool mAsyncError;
audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 224c65b..fe582eb 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -571,9 +571,7 @@
getPackagesForUid(uid, packages);
if (isServiceUid(uid)) {
if (packages.isEmpty()) {
- ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
- id,
- attr.usage,
+ ALOGW("OpPlayAudio: not muting track:%d usage:%d for service UID %d", id, attr.usage,
uid);
return nullptr;
}
@@ -597,7 +595,6 @@
audio_usage_t usage, int id, uid_t uid)
: mThread(wp<IAfThreadBase>::fromExisting(thread)),
mHasOpPlayAudio(true),
- mAttributionSource(attributionSource),
mUsage((int32_t)usage),
mId(id),
mUid(uid),
@@ -617,10 +614,11 @@
// make sure not to broadcast the initial state since it is not needed and could
// cause a deadlock since this method can be called with the mThread->mLock held
checkPlayAudioForUsage(/*doBroadcast=*/false);
- if (mAttributionSource.packageName.has_value()) {
+ if (mPackageName.size()) {
mOpCallback = new PlayAudioOpCallback(this);
- mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO,
- mPackageName, mOpCallback);
+ mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mPackageName, mOpCallback);
+ } else {
+ ALOGW("Skipping OpPlayAudioMonitor due to null package name");
}
}
@@ -631,16 +629,16 @@
// Note this method is never called (and never to be) for audio server / patch record track
// - not called from constructor due to check on UID,
// - not called from PlayAudioOpCallback because the callback is not installed in this case
-void OpPlayAudioMonitor::checkPlayAudioForUsage(bool doBroadcast)
-{
- const bool hasAppOps = mAttributionSource.packageName.has_value()
- && mAppOpsManager.checkAudioOpNoThrow(
- AppOpsManager::OP_PLAY_AUDIO, mUsage, mUid, mPackageName) ==
- AppOpsManager::MODE_ALLOWED;
+void OpPlayAudioMonitor::checkPlayAudioForUsage(bool doBroadcast) {
+ const bool hasAppOps =
+ mPackageName.size() &&
+ mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO, mUsage, mUid,
+ mPackageName) == AppOpsManager::MODE_ALLOWED;
bool shouldChange = !hasAppOps; // check if we need to update.
if (mHasOpPlayAudio.compare_exchange_strong(shouldChange, hasAppOps)) {
- ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasAppOps ? "not " : "");
+ ALOGI("OpPlayAudio: track:%d package:%s usage:%d %smuted", mId,
+ String8(mPackageName).c_str(), mUsage, hasAppOps ? "not " : "");
if (doBroadcast) {
auto thread = mThread.promote();
if (thread != nullptr && thread->type() == IAfThreadBase::OFFLOAD) {
@@ -658,11 +656,11 @@
void OpPlayAudioMonitor::PlayAudioOpCallback::opChanged(int32_t op,
const String16& packageName) {
- // we only have uid, so we need to check all package names anyway
- UNUSED(packageName);
if (op != AppOpsManager::OP_PLAY_AUDIO) {
return;
}
+
+ ALOGI("%s OP_PLAY_AUDIO callback received for %s", __func__, String8(packageName).c_str());
sp<OpPlayAudioMonitor> monitor = mMonitor.promote();
if (monitor != NULL) {
monitor->checkPlayAudioForUsage(/*doBroadcast=*/true);
@@ -1316,7 +1314,9 @@
if (!playbackThread->isTrackActive(this)) {
reset();
mState = STOPPED;
- } else if (!isFastTrack() && !isOffloaded() && !isDirect()) {
+ } else if (isPatchTrack() || (!isFastTrack() && !isOffloaded() && !isDirect())) {
+ // for a PatchTrack (whatever fast ot not), do not drain but move directly
+ // to STOPPED to avoid closing while active.
mState = STOPPED;
} else {
// For fast tracks prepareTracks_l() will set state to STOPPING_2
@@ -1645,22 +1645,18 @@
if (mMuteEventExtras == nullptr) {
mMuteEventExtras = std::make_unique<os::PersistableBundle>();
}
- mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
- static_cast<int>(muteState));
+ mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey), static_cast<int>(muteState));
- result = audioManager->portEvent(mPortId,
- PLAYER_UPDATE_MUTED,
- mMuteEventExtras);
+ result = audioManager->portEvent(mPortId, PLAYER_UPDATE_MUTED, mMuteEventExtras);
}
if (result == OK) {
+ ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+ int(muteState), int(mMuteState));
mMuteState = muteState;
} else {
- ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
- __func__,
- id(),
- mPortId,
- result);
+ ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
+ mPortId, result);
}
}
diff --git a/services/audioflinger/afutils/DumpTryLock.h b/services/audioflinger/afutils/DumpTryLock.h
index e4ad112..05e050e 100644
--- a/services/audioflinger/afutils/DumpTryLock.h
+++ b/services/audioflinger/afutils/DumpTryLock.h
@@ -19,7 +19,6 @@
#include <audio_utils/mutex.h>
#include <utils/Mutex.h>
-#include <utils/Timers.h>
namespace android::afutils {
@@ -36,13 +35,7 @@
inline bool dumpTryLock(audio_utils::mutex& mutex) TRY_ACQUIRE(true, mutex)
{
static constexpr int64_t kDumpLockTimeoutNs = 1'000'000'000;
-
- const int64_t timeoutNs = kDumpLockTimeoutNs + systemTime(SYSTEM_TIME_REALTIME);
- const struct timespec ts = {
- .tv_sec = static_cast<time_t>(timeoutNs / 1000000000),
- .tv_nsec = static_cast<long>(timeoutNs % 1000000000),
- };
- return pthread_mutex_timedlock(mutex.native_handle(), &ts) == 0;
+ return mutex.try_lock(kDumpLockTimeoutNs);
}
} // android::afutils
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 39c80d8..92bf3b3 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -33,6 +33,10 @@
namespace {
+// Port handle used when CSD is computed on all devices. Should be a different value than
+// AUDIO_PORT_HANDLE_NONE which is associated with a sound dose callback failure
+constexpr audio_port_handle_t CSD_ON_ALL_DEVICES_PORT_HANDLE = -1;
+
int64_t getMonotonicSecond() {
struct timespec now_ts;
if (clock_gettime(CLOCK_MONOTONIC, &now_ts) != 0) {
@@ -175,6 +179,13 @@
}
audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
+ if (isComputeCsdForcedOnAllDevices()) {
+ // If CSD is forced on all devices return random port id. Used only in testing.
+ // This is necessary since the patches that are registered before
+ // setComputeCsdOnAllDevices will not be contributing to mActiveDevices
+ return CSD_ON_ALL_DEVICES_PORT_HANDLE;
+ }
+
const std::lock_guard _l(mLock);
audio_devices_t type;
@@ -491,7 +502,7 @@
ALOGV("%s csd is disabled", __func__);
return false;
}
- if (forceComputeCsdOnAllDevices()) {
+ if (isComputeCsdForcedOnAllDevices()) {
return true;
}
@@ -515,7 +526,7 @@
ALOGV("%s csd is disabled", __func__);
return false;
}
- if (forceComputeCsdOnAllDevices()) {
+ if (isComputeCsdForcedOnAllDevices()) {
return true;
}
@@ -536,7 +547,7 @@
mUseFrameworkMel = useFrameworkMel;
}
-bool SoundDoseManager::forceUseFrameworkMel() const {
+bool SoundDoseManager::isFrameworkMelForced() const {
const std::lock_guard _l(mLock);
return mUseFrameworkMel;
}
@@ -546,7 +557,7 @@
mComputeCsdOnAllDevices = computeCsdOnAllDevices;
}
-bool SoundDoseManager::forceComputeCsdOnAllDevices() const {
+bool SoundDoseManager::isComputeCsdForcedOnAllDevices() const {
const std::lock_guard _l(mLock);
return mComputeCsdOnAllDevices;
}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 6e0bc34..347eabe 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -136,8 +136,8 @@
// used for testing only
size_t getCachedMelRecordsSize() const;
- bool forceUseFrameworkMel() const;
- bool forceComputeCsdOnAllDevices() const;
+ bool isFrameworkMelForced() const;
+ bool isComputeCsdForcedOnAllDevices() const;
/** Method for converting from audio_utils::CsdRecord to media::SoundDoseRecord. */
static media::SoundDoseRecord csdRecordToSoundDoseRecord(const audio_utils::CsdRecord& legacy);
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 5f6dcb9..294080b 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -262,11 +262,11 @@
}
TEST_F(SoundDoseManagerTest, GetDefaultForceComputeCsdOnAllDevices) {
- EXPECT_FALSE(mSoundDoseManager->forceComputeCsdOnAllDevices());
+ EXPECT_FALSE(mSoundDoseManager->isComputeCsdForcedOnAllDevices());
}
TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
- EXPECT_FALSE(mSoundDoseManager->forceUseFrameworkMel());
+ EXPECT_FALSE(mSoundDoseManager->isFrameworkMelForced());
}
TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStopsNonHeadphone) {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index d49a002..b164159 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -270,6 +270,10 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+ virtual status_t updatePolicyMix(
+ const AudioMix& mix,
+ const std::vector<AudioMixMatchCriterion>& updatedCriteria) = 0;
+
virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
= 0;
virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index fa3a5d3..a2ebb8d 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -34,5 +34,18 @@
{
"name": "audiopolicy_tests"
}
+ ],
+ "postsubmit": [
+ {
+ "name": "GtsGmscoreHostTestCases",
+ "options" : [
+ {
+ "include-filter": "com.google.android.gts.audio.AudioHostTest"
+ },
+ {
+ "include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
+ }
+ ]
+ }
]
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 1e57edd..13b70e5 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -328,6 +328,13 @@
wp<AudioPolicyMix> mPolicyMix; // non NULL when used by a dynamic policy
virtual uint32_t getRecommendedMuteDurationMs() const { return 0; }
+ virtual std::string info() const {
+ std::string result;
+ result.append("[portId:" );
+ result.append(android::internal::ToString(getId()));
+ result.append("]");
+ return result;
+ }
protected:
const sp<PolicyAudioPort> mPolicyAudioPort;
@@ -471,6 +478,8 @@
PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+ virtual std::string info() const override;
+
const sp<IOProfile> mProfile; // I/O profile this output derives from
audio_io_handle_t mIoHandle; // output handle
uint32_t mLatency; //
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 7e29e10..b560bc4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -59,6 +59,8 @@
status_t unregisterMix(const AudioMix& mix);
+ status_t updateMix(const AudioMix& mix, const std::vector<AudioMixMatchCriterion>& newCriteria);
+
void closeOutput(sp<SwAudioOutputDescriptor> &desc);
/**
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index 59eee52..c2e4b11 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -25,6 +25,10 @@
namespace android {
+class AudioInputCollection;
+class AudioInputDescriptor;
+class AudioPolicyClientInterface;
+
class EffectDescriptor : public RefBase
{
public:
@@ -40,6 +44,8 @@
int mId; // effect unique ID
audio_io_handle_t mIo; // io the effect is attached to
+ bool mIsOrphan = false; // on creation, effect is not yet attached but not yet orphan
+ bool mEnabledWhenMoved = false; // Backup enabled state before being moved
audio_session_t mSession; // audio session the effect is on
effect_descriptor_t mDesc; // effect descriptor
bool mEnabled; // enabled state: CPU load being used or not
@@ -69,12 +75,29 @@
void moveEffects(audio_session_t session,
audio_io_handle_t srcOutput,
- audio_io_handle_t dstOutput);
+ audio_io_handle_t dstOutput,
+ AudioPolicyClientInterface *clientInterface);
void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
+ void moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo, audio_io_handle_t dstIo,
+ const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
+ void moveEffectsForIo(audio_session_t sessionId, audio_io_handle_t dstIo,
+ const AudioInputCollection *inputs, AudioPolicyClientInterface *mClientInterface);
+ void putOrphanEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+ const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
+ void putOrphanEffectsForIo(audio_io_handle_t srcIo);
+ /**
+ * @brief Checks if an effect session was already attached to an io handle and return it if
+ * found. Check only for a given effect type if effectType is not null or for any effect
+ * otherwise.
+ * @param sessionId to consider.
+ * @param effectType to consider.
+ * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
+ */
audio_io_handle_t getIoForSession(audio_session_t sessionId,
const effect_uuid_t *effectType = nullptr);
-
+ bool hasOrphansForSession(audio_session_t sessionId);
+ EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
private:
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 37cbbc4..d027564 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -1003,6 +1003,18 @@
return clientsForStream;
}
+std::string SwAudioOutputDescriptor::info() const {
+ std::string result;
+ result.append("[" );
+ result.append(AudioOutputDescriptor::info());
+ result.append("[io:" );
+ result.append(android::internal::ToString(mIoHandle));
+ result.append(", " );
+ result.append(isDuplicated() ? "duplicating" : mProfile->getTagName());
+ result.append("]]");
+ return result;
+}
+
void SwAudioOutputCollection::dump(String8 *dst) const
{
dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index f870b4f..7ee75c7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -21,6 +21,7 @@
#include <iterator>
#include <optional>
#include <regex>
+#include <vector>
#include "AudioPolicyMix.h"
#include "TypeConverter.h"
#include "HwModule.h"
@@ -225,6 +226,31 @@
return BAD_VALUE;
}
+status_t AudioPolicyMixCollection::updateMix(
+ const AudioMix& mix, const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
+ if (!areMixCriteriaConsistent(mix.mCriteria)) {
+ ALOGE("updateMix(): updated criteria are not consistent "
+ "(MATCH & EXCLUDE criteria of the same type)");
+ return BAD_VALUE;
+ }
+
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioPolicyMix>& registeredMix = itemAt(i);
+ if (mix.mDeviceType == registeredMix->mDeviceType &&
+ mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0 &&
+ mix.mRouteFlags == registeredMix->mRouteFlags) {
+ registeredMix->mCriteria = updatedCriteria;
+ ALOGV("updateMix(): updated mix for dev=0x%x addr=%s", mix.mDeviceType,
+ mix.mDeviceAddress.c_str());
+ return NO_ERROR;
+ }
+ }
+
+ ALOGE("updateMix(): mix not registered for dev=0x%x addr=%s", mix.mDeviceType,
+ mix.mDeviceAddress.c_str());
+ return BAD_VALUE;
+}
+
status_t AudioPolicyMixCollection::getAudioPolicyMix(audio_devices_t deviceType,
const String8& address, sp<AudioPolicyMix> &policyMix) const
{
@@ -269,6 +295,7 @@
ALOGV("getOutputForAttr() querying %zu mixes:", size());
primaryMix.clear();
bool mixesDisallowsRequestedDevice = false;
+ const bool isMmapRequested = (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
for (size_t i = 0; i < size(); i++) {
sp<AudioPolicyMix> policyMix = itemAt(i);
const bool primaryOutputMix = !is_mix_loopback_render(policyMix->mRouteFlags);
@@ -279,6 +306,17 @@
mixesDisallowsRequestedDevice = true;
}
+ if (!primaryOutputMix && isMmapRequested) {
+ // AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
+ // the current MmapStreamInterface::start to reject a specific client added to a shared
+ // mmap stream.
+ // As a result all MMAP_NOIRQ requests have to be rejected when an loopback render
+ // policy is present. That ensures no shared mmap stream is used when an loopback
+ // render policy is registered.
+ ALOGD("%s: Rejecting MMAP_NOIRQ request due to LOOPBACK|RENDER mix present.", __func__);
+ return INVALID_OPERATION;
+ }
+
if (primaryOutputMix && primaryMix != nullptr) {
ALOGV("%s: Skiping %zu: Primary output already found", __func__, i);
continue; // Primary output already found
@@ -289,12 +327,21 @@
continue; // skip the mix
}
- if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) && is_mix_loopback(policyMix->mRouteFlags)) {
- // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
- // using dynamic audio policy.
- ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic audio policy mix.",
- __func__);
- return INVALID_OPERATION;
+ if (isMmapRequested) {
+ if (is_mix_loopback(policyMix->mRouteFlags)) {
+ // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
+ // using dynamic audio policy.
+ ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic "
+ "audio policy mix.", __func__);
+ return INVALID_OPERATION;
+ }
+ if (mixDevice != nullptr) {
+ // TODO(b/301619865): Only disallow the device that doesn't support MMAP.
+ ALOGD("%s: Rejecting MMAP_NOIRQ request matched to dynamic audio policy "
+ "mix pointing to device %s which the mmap support is unknown at this moment",
+ __func__, mixDevice->toString(false).c_str());
+ return INVALID_OPERATION;
+ }
}
if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 3f9c8b0..c85df0f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -18,9 +18,15 @@
//#define LOG_NDEBUG 0
#include <android-base/stringprintf.h>
+
+#include "AudioInputDescriptor.h"
#include "EffectDescriptor.h"
#include <utils/String8.h>
+#include <AudioPolicyInterface.h>
+#include "AudioPolicyMix.h"
+#include "HwModule.h"
+
namespace android {
void EffectDescriptor::dump(String8 *dst, int spaces) const
@@ -175,30 +181,57 @@
return MAX_EFFECTS_MEMORY;
}
-void EffectDescriptorCollection::moveEffects(audio_session_t session,
- audio_io_handle_t srcOutput,
- audio_io_handle_t dstOutput)
+void EffectDescriptorCollection::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+ audio_io_handle_t dstIo,
+ AudioPolicyClientInterface *clientInterface)
{
- ALOGV("%s session %d srcOutput %d dstOutput %d", __func__, session, srcOutput, dstOutput);
+ ALOGV("%s session %d srcIo %d dstIo %d", __func__, sessionId, srcIo, dstIo);
for (size_t i = 0; i < size(); i++) {
sp<EffectDescriptor> effect = valueAt(i);
- if (effect->mSession == session && effect->mIo == srcOutput) {
- effect->mIo = dstOutput;
+ if (effect->mSession == sessionId && effect->mIo == srcIo) {
+ effect->mIo = dstIo;
+ // Backup enable state before any updatePolicyState call
+ effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
+ }
+ }
+ clientInterface->moveEffects(sessionId, srcIo, dstIo);
+}
+
+void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids, audio_io_handle_t dstIo)
+{
+ ALOGV("%s num effects %zu, first ID %d, dstIo %d",
+ __func__, ids.size(), ids.size() ? ids[0] : 0, dstIo);
+ for (size_t i = 0; i < size(); i++) {
+ sp<EffectDescriptor> effect = valueAt(i);
+ if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
+ effect->mIo = dstIo;
+ effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
}
}
}
-void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids,
- audio_io_handle_t dstOutput)
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId)
{
- ALOGV("%s num effects %zu, first ID %d, dstOutput %d",
- __func__, ids.size(), ids.size() ? ids[0] : 0, dstOutput);
- for (size_t i = 0; i < size(); i++) {
+ for (size_t i = 0; i < size(); ++i) {
sp<EffectDescriptor> effect = valueAt(i);
- if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
- effect->mIo = dstOutput;
+ if (effect->mSession == sessionId && effect->mIsOrphan) {
+ return true;
}
}
+ return false;
+}
+
+EffectDescriptorCollection EffectDescriptorCollection::getOrphanEffectsForSession(
+ audio_session_t sessionId) const
+{
+ EffectDescriptorCollection effects;
+ for (size_t i = 0; i < size(); i++) {
+ sp<EffectDescriptor> effect = valueAt(i);
+ if (effect->mSession == sessionId && effect->mIsOrphan) {
+ effects.add(keyAt(i), effect);
+ }
+ }
+ return effects;
}
audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
@@ -214,6 +247,84 @@
return AUDIO_IO_HANDLE_NONE;
}
+void EffectDescriptorCollection::moveEffectsForIo(audio_session_t session,
+ audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+ AudioPolicyClientInterface *clientInterface)
+{
+ // No src io: try to find from effect session the src Io to move from
+ audio_io_handle_t srcIo = getIoForSession(session);
+ if (hasOrphansForSession(session) || (srcIo != AUDIO_IO_HANDLE_NONE && srcIo != dstIo)) {
+ moveEffects(session, srcIo, dstIo, inputs, clientInterface);
+ }
+}
+
+void EffectDescriptorCollection::moveEffects(audio_session_t session,
+ audio_io_handle_t srcIo, audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+ AudioPolicyClientInterface *clientInterface)
+{
+ if ((srcIo != AUDIO_IO_HANDLE_NONE && srcIo == dstIo)
+ || (srcIo == AUDIO_IO_HANDLE_NONE && !hasOrphansForSession(session))) {
+ return;
+ }
+ // Either we may find orphan effects for given session or effects for this session might have
+ // been assigned first to another input (it may happen when an input is released or recreated
+ // after client sets its preferred device)
+ EffectDescriptorCollection effectsToMove;
+ if (srcIo == AUDIO_IO_HANDLE_NONE) {
+ ALOGV("%s: restoring effects for session %d from orphan park to io=%d", __func__,
+ session, dstIo);
+ effectsToMove = getOrphanEffectsForSession(session);
+ } else {
+ ALOGV("%s: moving effects for session %d from io=%d to io=%d", __func__, session, srcIo,
+ dstIo);
+ if (const sp<AudioInputDescriptor>& previousInputDesc = inputs->valueFor(srcIo)) {
+ effectsToMove = getEffectsForIo(srcIo);
+ for (size_t i = 0; i < effectsToMove.size(); ++i) {
+ const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+ effect->mEnabledWhenMoved = effect->mEnabled;
+ previousInputDesc->trackEffectEnabled(effect, false);
+ }
+ } else {
+ ALOGW("%s: no effect descriptor for srcIo %d", __func__, srcIo);
+ }
+ }
+ moveEffects(session, srcIo, dstIo, clientInterface);
+
+ if (dstIo != AUDIO_IO_HANDLE_NONE) {
+ if (const sp<AudioInputDescriptor>& inputDesc = inputs->valueFor(dstIo)) {
+ for (size_t i = 0; i < effectsToMove.size(); ++i) {
+ const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+ inputDesc->trackEffectEnabled(effect, effect->mEnabledWhenMoved);
+ }
+ } else {
+ ALOGW("%s: no effect descriptor for dstIo %d", __func__, dstIo);
+ }
+ }
+}
+
+void EffectDescriptorCollection::putOrphanEffectsForIo(audio_io_handle_t srcIo)
+{
+ for (size_t i = 0; i < size(); i++) {
+ sp<EffectDescriptor> effect = valueAt(i);
+ if (effect->mIo == srcIo) {
+ effect->mIo = AUDIO_IO_HANDLE_NONE;
+ effect->mIsOrphan = true;
+ }
+ }
+}
+
+void EffectDescriptorCollection::putOrphanEffects(audio_session_t session,
+ audio_io_handle_t srcIo, const AudioInputCollection *inputs,
+ AudioPolicyClientInterface *clientInterface)
+{
+ if (getIoForSession(session) != srcIo) {
+ // Effect session not held by this client io handle
+ return;
+ }
+ ALOGV("%s: park effects for session %d and io=%d to orphans", __func__, session, srcIo);
+ moveEffects(session, srcIo, AUDIO_IO_HANDLE_NONE, inputs, clientInterface);
+}
+
EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
{
EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 671b30a..86600f4 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -112,3 +112,15 @@
name: "r_submix_audio_policy_configuration",
srcs: ["r_submix_audio_policy_configuration.xml"],
}
+filegroup {
+ name: "bluetooth_audio_policy_configuration_7_0",
+ srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
+}
+filegroup {
+ name: "bluetooth_with_le_audio_policy_configuration_7_0",
+ srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
+}
+filegroup {
+ name: "hearing_aid_audio_policy_configuration_7_0",
+ srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
+}
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index b6089b7..3aec064 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -99,6 +99,7 @@
":audio_policy_engine_criteria",
// ":audio_policy_engine_criterion_types",
// ":edd_files",
+ ":parameter_frameworks_configuration_schemas",
],
out: ["PolicyConfigurableDomains.xml"],
}
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index e06bbb3..13cc165 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -175,34 +175,37 @@
// - cannot route from voice call RX OR
// - audio HAL version is < 3.0 and TX device is on the primary HW module
if (getPhoneState() == AUDIO_MODE_IN_CALL) {
- audio_devices_t txDevice = AUDIO_DEVICE_NONE;
- sp<DeviceDescriptor> txDeviceDesc =
- getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
- if (txDeviceDesc != nullptr) {
- txDevice = txDeviceDesc->type();
- }
sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
- LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
- DeviceVector availPrimaryInputDevices =
- availableInputDevices.getDevicesFromHwModule(primaryOutput->getModuleHandle());
+ if (primaryOutput != nullptr) {
+ audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+ sp<DeviceDescriptor> txDeviceDesc =
+ getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ if (txDeviceDesc != nullptr) {
+ txDevice = txDeviceDesc->type();
+ }
+ DeviceVector availPrimaryInputDevices =
+ availableInputDevices.getDevicesFromHwModule(
+ primaryOutput->getModuleHandle());
- // TODO: getPrimaryOutput return only devices from first module in
- // audio_policy_configuration.xml, hearing aid is not there, but it's
- // a primary device
- // FIXME: this is not the right way of solving this problem
- DeviceVector availPrimaryOutputDevices = availableOutputDevices.getDevicesFromTypes(
- primaryOutput->supportedDevices().types());
- availPrimaryOutputDevices.add(
- availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
+ // TODO: getPrimaryOutput return only devices from first module in
+ // audio_policy_configuration.xml, hearing aid is not there, but it's
+ // a primary device
+ // FIXME: this is not the right way of solving this problem
+ DeviceVector availPrimaryOutputDevices = availableOutputDevices.getDevicesFromTypes(
+ primaryOutput->supportedDevices().types());
+ availPrimaryOutputDevices.add(
+ availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
- if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
- String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
- ((availPrimaryInputDevices.getDevice(
- txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
- (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
- availableOutputDevices = availPrimaryOutputDevices;
+ if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
+ String8(""), AUDIO_FORMAT_DEFAULT) == nullptr)
+ || ((availPrimaryInputDevices.getDevice(
+ txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
+ (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
+ availableOutputDevices = availPrimaryOutputDevices;
+ }
+ } else {
+ ALOGE("%s, STRATEGY_PHONE: Primary output not found", __func__);
}
-
}
// Do not use A2DP devices when in call but use them when not in call
// (e.g for voice mail playback)
@@ -597,8 +600,11 @@
if ((getPhoneState() == AUDIO_MODE_IN_CALL) &&
(availableOutputDevices.getDevice(AUDIO_DEVICE_OUT_TELEPHONY_TX,
String8(""), AUDIO_FORMAT_DEFAULT)) == nullptr) {
- LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
- availableDevices = availablePrimaryDevices;
+ if (!availablePrimaryDevices.isEmpty()) {
+ availableDevices = availablePrimaryDevices;
+ } else {
+ ALOGE("%s, AUDIO_SOURCE_VOICE_COMMUNICATION: Primary devices not found", __func__);
+ }
}
if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
@@ -652,8 +658,11 @@
case AUDIO_SOURCE_HOTWORD:
// We should not use primary output criteria for Hotword but rather limit
// to devices attached to the same HW module as the build in mic
- LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
- availableDevices = availablePrimaryDevices;
+ if (!availablePrimaryDevices.isEmpty()) {
+ availableDevices = availablePrimaryDevices;
+ } else {
+ ALOGE("%s, AUDIO_SOURCE_HOTWORD: Primary devices not found", __func__);
+ }
if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
device = availableDevices.getDevice(
AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index d4176c1..e8066fb 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "utils/Errors.h"
#define LOG_TAG "APM_AudioPolicyManager"
// Need to keep the log statements even in production builds
@@ -337,7 +338,7 @@
outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
continue;
}
- setOutputDevices(desc, newDevices, force, 0);
+ setOutputDevices(__func__, desc, newDevices, force, 0);
}
if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
!activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
@@ -657,7 +658,7 @@
status_t AudioPolicyManager::updateCallRouting(bool fromCache, uint32_t delayMs, uint32_t *waitMs)
{
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
+ if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) {
DeviceVector rxDevices = selectBestRxSinkDevicesForCall(fromCache);
return updateCallRoutingInternal(rxDevices, delayMs, waitMs);
}
@@ -670,14 +671,21 @@
bool createTxPatch = false;
bool createRxPatch = false;
uint32_t muteWaitMs = 0;
- if(!hasPrimaryOutput() ||
+ if (hasPrimaryOutput() &&
mPrimaryOutput->devices().onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_STUB)) {
return INVALID_OPERATION;
}
- ALOG_ASSERT(!rxDevices.isEmpty(), "%s() no selected output device", __func__);
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
+
+ disconnectTelephonyAudioSource(mCallRxSourceClient);
+ disconnectTelephonyAudioSource(mCallTxSourceClient);
+
+ if (rxDevices.isEmpty()) {
+ ALOGW("%s() no selected output device", __func__);
+ return INVALID_OPERATION;
+ }
if (txSourceDevice == nullptr) {
ALOGE("%s() selected input device not available", __func__);
return INVALID_OPERATION;
@@ -686,9 +694,6 @@
ALOGV("%s device rxDevice %s txDevice %s", __func__,
rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
- disconnectTelephonyAudioSource(mCallRxSourceClient);
- disconnectTelephonyAudioSource(mCallTxSourceClient);
-
auto telephonyRxModule =
mHwModules.getModuleForDeviceType(AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
auto telephonyTxModule =
@@ -728,7 +733,11 @@
// Use legacy routing method for voice calls via setOutputDevice() on primary output.
// Otherwise, create two audio patches for TX and RX path.
if (!createRxPatch) {
- muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
+ if (!hasPrimaryOutput()) {
+ ALOGW("%s() no primary output available", __func__);
+ return INVALID_OPERATION;
+ }
+ muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
} else { // create RX path audio patch
connectTelephonyRxAudioSource();
// If the TX device is on the primary HW module but RX device is
@@ -874,21 +883,21 @@
}
}
- if (hasPrimaryOutput()) {
- if (state == AUDIO_MODE_IN_CALL) {
- (void)updateCallRouting(false /*fromCache*/, delayMs);
- } else {
+ if (state == AUDIO_MODE_IN_CALL) {
+ (void)updateCallRouting(false /*fromCache*/, delayMs);
+ } else {
+ if (oldState == AUDIO_MODE_IN_CALL) {
+ disconnectTelephonyAudioSource(mCallRxSourceClient);
+ disconnectTelephonyAudioSource(mCallTxSourceClient);
+ }
+ if (hasPrimaryOutput()) {
DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
// force routing command to audio hardware when ending call
// even if no device change is needed
if (isStateInCall(oldState) && rxDevices.isEmpty()) {
rxDevices = mPrimaryOutput->devices();
}
- if (oldState == AUDIO_MODE_IN_CALL) {
- disconnectTelephonyAudioSource(mCallRxSourceClient);
- disconnectTelephonyAudioSource(mCallTxSourceClient);
- }
- setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+ setOutputDevices(__func__, mPrimaryOutput, rxDevices, force, 0);
}
}
@@ -906,7 +915,7 @@
outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
continue;
}
- setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
+ setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
}
}
@@ -1325,10 +1334,15 @@
AudioProfileVector profiles;
status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
if (ret == NO_ERROR && !profiles.empty()) {
- config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
- : *profiles[0]->getChannels().begin();
- config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
- : *profiles[0]->getSampleRates().begin();
+ const auto channels = profiles[0]->getChannels();
+ if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+ config->channel_mask = *channels.begin();
+ }
+ const auto sampleRates = profiles[0]->getSampleRates();
+ if (!sampleRates.empty() &&
+ (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+ config->sample_rate = *sampleRates.begin();
+ }
config->format = profiles[0]->getFormat();
}
return INVALID_OPERATION;
@@ -2335,7 +2349,8 @@
return DEAD_OBJECT;
}
const uint32_t muteWaitMs =
- setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
+ setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
+ requiresMuteCheck);
// apply volume rules for current stream and device if necessary
auto &curves = getVolumeCurves(client->attributes());
@@ -2418,7 +2433,7 @@
outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
continue;
}
- setOutputDevices(desc, newDevices, force, delayMs);
+ setOutputDevices(__func__, desc, newDevices, force, delayMs);
// re-apply device specific volume if not done by setOutputDevice()
if (!force) {
applyStreamVolumes(desc, newDevices.types(), delayMs);
@@ -2520,7 +2535,7 @@
// still contain data that needs to be drained. The latency only covers the audio HAL
// and kernel buffers. Also the latency does not always include additional delay in the
// audio path (audio DSP, CODEC ...)
- setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2,
+ setOutputDevices(__func__, outputDesc, newDevices, false, outputDesc->latency()*2,
nullptr, true /*requiresMuteCheck*/, requiresVolumeCheck);
// force restoring the device selection on other active outputs if it differs from the
@@ -2543,7 +2558,7 @@
outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
continue;
}
- setOutputDevices(desc, newDevices2, force, delayMs);
+ setOutputDevices(__func__, desc, newDevices2, force, delayMs);
// re-apply device specific volume if not done by setOutputDevice()
if (!force) {
@@ -2646,6 +2661,7 @@
sp<AudioPolicyMix> policyMix;
sp<DeviceDescriptor> device;
sp<AudioInputDescriptor> inputDesc;
+ sp<AudioInputDescriptor> previousInputDesc;
sp<RecordClientDescriptor> clientDesc;
audio_port_handle_t requestedDeviceId = *selectedDeviceId;
uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -2775,10 +2791,15 @@
status_t ret = getProfilesForDevices(
DeviceVector(device), profiles, flags, true /*isInput*/);
if (ret == NO_ERROR && !profiles.empty()) {
- config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
- : *profiles[0]->getChannels().begin();
- config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
- : *profiles[0]->getSampleRates().begin();
+ const auto channels = profiles[0]->getChannels();
+ if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+ config->channel_mask = *channels.begin();
+ }
+ const auto sampleRates = profiles[0]->getSampleRates();
+ if (!sampleRates.empty() &&
+ (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+ config->sample_rate = *sampleRates.begin();
+ }
config->format = profiles[0]->getFormat();
}
goto error;
@@ -2797,6 +2818,8 @@
requestedDeviceId, attributes.source, flags,
isSoundTrigger);
inputDesc = mInputs.valueFor(*input);
+ // Move (if found) effect for the client session to its input
+ mEffects.moveEffectsForIo(session, *input, &mInputs, mpClientInterface);
inputDesc->addClient(clientDesc);
ALOGV("getInputForAttr() returns input %d type %d selectedDeviceId %d for port ID %d",
@@ -3106,7 +3129,7 @@
ALOGV("%s %d", __FUNCTION__, input);
inputDesc->removeClient(portId);
-
+ mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
if (inputDesc->getClientCount() > 0) {
ALOGV("%s(%d) %zu clients remaining", __func__, portId, inputDesc->getClientCount());
return;
@@ -3468,8 +3491,8 @@
}
if (output != mMusicEffectOutput) {
- mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
- mpClientInterface->moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
+ mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output,
+ mpClientInterface);
mMusicEffectOutput = output;
}
@@ -3777,6 +3800,17 @@
return res;
}
+status_t AudioPolicyManager::updatePolicyMix(
+ const AudioMix& mix,
+ const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
+ status_t res = mPolicyMixes.updateMix(mix, updatedCriteria);
+ if (res == NO_ERROR) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+ }
+ return res;
+}
+
void AudioPolicyManager::dumpManualSurroundFormats(String8 *dst) const
{
size_t i = 0;
@@ -3939,8 +3973,9 @@
outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
continue;
}
- waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
- !skipDelays /*requiresMuteCheck*/,
+
+ waitMs = setOutputDevices(__func__, outputDesc, newDevices, forceRouting, delayMs,
+ nullptr, !skipDelays /*requiresMuteCheck*/,
!forceRouting /*requiresVolumeCheck*/, skipDelays);
// Only apply special touch sound delay once
delayMs = 0;
@@ -4927,7 +4962,7 @@
// TODO: reconfigure output format and channels here
ALOGV("%s setting device %s on output %d",
__func__, dumpDeviceTypes(devices.types()).c_str(), outputDesc->mIoHandle);
- setOutputDevices(outputDesc, devices, true, 0, handle);
+ setOutputDevices(__func__, outputDesc, devices, true, 0, handle);
index = mAudioPatches.indexOfKey(*handle);
if (index >= 0) {
if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) {
@@ -5186,7 +5221,7 @@
return BAD_VALUE;
}
- setOutputDevices(outputDesc,
+ setOutputDevices(__func__, outputDesc,
getNewOutputDevices(outputDesc, true /*fromCache*/),
true,
0,
@@ -5224,14 +5259,9 @@
return NO_ERROR;
}
patchHandle = outputDesc->getPatchHandle();
- // When a Sw bridge is released, the mixer used by this bridge will release its
- // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
- // Reuse patch handle to force audio flinger removing initial mixer patch removal
- // updating hal patch handle (prevent leaks).
// While using a HwBridge, force reconsidering device only if not reusing an existing
// output and no more activity on output (will force to close).
- bool force = sourceDesc->useSwBridge() ||
- (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+ const bool force = sourceDesc->canCloseOutput() && !outputDesc->isActive();
// APM pattern is to have always outputs opened / patch realized for reachable devices.
// Update device may result to NONE (empty), coupled with force, it releases the patch.
// Reconsider device only for cases:
@@ -5240,7 +5270,7 @@
// 3 / Inactive Output previously hosting SwBridge that can be closed.
bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
sourceDesc->canCloseOutput();
- setOutputDevices(outputDesc,
+ setOutputDevices(__func__, outputDesc,
updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
outputDesc->devices(),
force,
@@ -5377,7 +5407,7 @@
outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
continue;
}
- setOutputDevices(outputDesc, newDevices, false);
+ setOutputDevices(__func__, outputDesc, newDevices, false);
}
}
reopenOutputsWithDevices(outputsToReopen);
@@ -6254,12 +6284,13 @@
if (mPrimaryOutput == nullptr &&
outProfile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
mPrimaryOutput = outputDesc;
+ mPrimaryModuleHandle = mPrimaryOutput->getModuleHandle();
}
if ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
outputDesc->close();
} else {
addOutput(output, outputDesc);
- setOutputDevices(outputDesc,
+ setOutputDevices(__func__, outputDesc,
DeviceVector(supportedDevice),
true,
0,
@@ -6459,8 +6490,8 @@
if (device_distinguishes_on_address(deviceType)) {
ALOGV("checkOutputsForDevice(): setOutputDevices %s",
device->toString().c_str());
- setOutputDevices(desc, DeviceVector(device), true/*force*/, 0/*delay*/,
- NULL/*patch handle*/);
+ setOutputDevices(__func__, desc, DeviceVector(device), true/*force*/,
+ 0/*delay*/, NULL/*patch handle*/);
}
ALOGV("checkOutputsForDevice(): adding output %d", output);
}
@@ -6755,6 +6786,7 @@
mpClientInterface->onAudioPatchListUpdate();
}
+ mEffects.putOrphanEffectsForIo(input);
inputDesc->close();
mInputs.removeItem(input);
@@ -7087,7 +7119,9 @@
DeviceVector AudioPolicyManager::getNewOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
bool fromCache)
{
- DeviceVector devices;
+ if (outputDesc == nullptr) {
+ return DeviceVector{};
+ }
ssize_t index = mAudioPatches.indexOfKey(outputDesc->getPatchHandle());
if (index >= 0) {
@@ -7121,6 +7155,7 @@
return DeviceVector(device);
}
+ DeviceVector devices;
for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
@@ -7343,7 +7378,7 @@
if (!desc->supportedDevices().containsAtLeastOne(outputDesc->supportedDevices())) {
continue;
}
- ALOGVV("%s() %s (curDevice %s)", __func__,
+ ALOGVV("%s() output %s %s (curDevice %s)", __func__, desc->info().c_str(),
mute ? "muting" : "unmuting", curDevices.toString().c_str());
setStrategyMute(productStrategy, mute, desc, mute ? 0 : delayMs);
if (desc->isStrategyActive(productStrategy)) {
@@ -7396,7 +7431,8 @@
return 0;
}
-uint32_t AudioPolicyManager::setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+uint32_t AudioPolicyManager::setOutputDevices(const char *caller,
+ const sp<SwAudioOutputDescriptor>& outputDesc,
const DeviceVector &devices,
bool force,
int delayMs,
@@ -7405,13 +7441,15 @@
bool skipMuteDelay)
{
// TODO(b/262404095): Consider if the output need to be reopened.
- ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
+ std::string logPrefix = std::string("caller ") + caller + outputDesc->info();
+ ALOGV("%s %s device %s delayMs %d", __func__, logPrefix.c_str(),
+ devices.toString().c_str(), delayMs);
uint32_t muteWaitMs;
if (outputDesc->isDuplicated()) {
- muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
+ muteWaitMs = setOutputDevices(__func__, outputDesc->subOutput1(), devices, force, delayMs,
nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
- muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
+ muteWaitMs += setOutputDevices(__func__, outputDesc->subOutput2(), devices, force, delayMs,
nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
return muteWaitMs;
}
@@ -7421,7 +7459,8 @@
DeviceVector prevDevices = outputDesc->devices();
DeviceVector availPrevDevices = mAvailableOutputDevices.filter(prevDevices);
- ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
+ ALOGV("%s %s prevDevice %s", __func__, logPrefix.c_str(),
+ prevDevices.toString().c_str());
if (!filteredDevices.isEmpty()) {
outputDesc->setDevices(filteredDevices);
@@ -7431,7 +7470,8 @@
if (requiresMuteCheck) {
muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevices, delayMs);
} else {
- ALOGV("%s: suppressing checkDeviceMuteStrategies", __func__);
+ ALOGV("%s: %s suppressing checkDeviceMuteStrategies", __func__,
+ logPrefix.c_str());
muteWaitMs = 0;
}
@@ -7441,7 +7481,8 @@
// output profile or if new device is not supported AND previous device(s) is(are) still
// available (otherwise reset device must be done on the output)
if (!devices.isEmpty() && filteredDevices.isEmpty() && !availPrevDevices.empty()) {
- ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
+ ALOGV("%s: %s unsupported device %s for output", __func__, logPrefix.c_str(),
+ devices.toString().c_str());
// restore previous device after evaluating strategy mute state
outputDesc->setDevices(prevDevices);
return muteWaitMs;
@@ -7454,16 +7495,19 @@
// AND the output is connected by a valid audio patch.
// Doing this check here allows the caller to call setOutputDevices() without conditions
if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) && !force && outputRouted) {
- ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
- filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
+ ALOGV("%s %s setting same device %s or null device, force=%d, patch handle=%d",
+ __func__, logPrefix.c_str(), filteredDevices.toString().c_str(), force,
+ outputDesc->getPatchHandle());
if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
- ALOGV("%s setting same device on routed output, force apply volumes", __func__);
+ ALOGV("%s %s setting same device on routed output, force apply volumes",
+ __func__, logPrefix.c_str());
applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs, true /*force*/);
}
return muteWaitMs;
}
- ALOGV("%s changing device to %s", __func__, filteredDevices.toString().c_str());
+ ALOGV("%s %s changing device to %s", __func__, logPrefix.c_str(),
+ filteredDevices.toString().c_str());
// do the routing
if (filteredDevices.isEmpty() || mAvailableOutputDevices.filter(filteredDevices).empty()) {
@@ -8383,6 +8427,7 @@
if (mPrimaryOutput == nullptr && profile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
ALOGV("%s(): re-assigning mPrimaryOutput", __func__);
mPrimaryOutput = desc;
+ mPrimaryModuleHandle = mPrimaryOutput->getModuleHandle();
}
return desc;
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 5b9f6ad..a1c8f62 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -292,6 +292,9 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+ virtual status_t updatePolicyMix(
+ const AudioMix& mix,
+ const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
virtual status_t setUidDeviceAffinities(uid_t uid,
const AudioDeviceTypeAddrVector& devices);
virtual status_t removeUidDeviceAffinities(uid_t uid);
@@ -526,6 +529,7 @@
/**
* @brief setOutputDevices change the route of the specified output.
+ * @param caller of the method
* @param outputDesc to be considered
* @param device to be considered to route the output
* @param force if true, force the routing even if no change.
@@ -539,7 +543,8 @@
* @return the number of ms we have slept to allow new routing to take effect in certain
* cases.
*/
- uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+ uint32_t setOutputDevices(const char *caller,
+ const sp<SwAudioOutputDescriptor>& outputDesc,
const DeviceVector &device,
bool force = false,
int delayMs = 0,
@@ -815,10 +820,10 @@
bool isPrimaryModule(const sp<HwModule> &module) const
{
- if (module == 0 || !hasPrimaryOutput()) {
+ if (module == nullptr || mPrimaryModuleHandle == AUDIO_MODULE_HANDLE_NONE) {
return false;
}
- return module->getHandle() == mPrimaryOutput->getModuleHandle();
+ return module->getHandle() == mPrimaryModuleHandle;
}
DeviceVector availablePrimaryOutputDevices() const
{
@@ -930,6 +935,8 @@
EngineInstance mEngine; // Audio Policy Engine instance
AudioPolicyClientInterface *mpClientInterface; // audio policy client interface
sp<SwAudioOutputDescriptor> mPrimaryOutput; // primary output descriptor
+ // mPrimaryModuleHandle is cached mPrimaryOutput->getModuleHandle();
+ audio_module_handle_t mPrimaryModuleHandle = AUDIO_MODULE_HANDLE_NONE;
// list of descriptors for outputs currently opened
sp<SwAudioOutputDescriptor> mSpatializerOutput;
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index f4fc8f1..c674909 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -7,26 +7,8 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_library_shared {
- name: "libaudiopolicyservice",
-
- defaults: [
- "latest_android_media_audio_common_types_cpp_shared",
- ],
-
- srcs: [
- "AudioPolicyClientImpl.cpp",
- "AudioPolicyEffects.cpp",
- "AudioPolicyInterfaceImpl.cpp",
- "AudioPolicyService.cpp",
- "CaptureStateNotifier.cpp",
- "Spatializer.cpp",
- "SpatializerPoseController.cpp",
- ],
-
- include_dirs: [
- "frameworks/av/services/audioflinger"
- ],
+cc_defaults {
+ name: "libaudiopolicyservice_dependencies",
shared_libs: [
"libactivitymanager_aidl",
@@ -41,7 +23,6 @@
"libaudioutils",
"libbinder",
"libcutils",
- "libeffectsconfig",
"libhardware_legacy",
"libheadtracking",
"libheadtracking-binding",
@@ -67,6 +48,36 @@
],
static_libs: [
+ "libeffectsconfig",
+ "libaudiopolicycomponents",
+ ]
+}
+
+cc_library {
+ name: "libaudiopolicyservice",
+
+ defaults: [
+ "libaudiopolicyservice_dependencies",
+ "latest_android_media_audio_common_types_cpp_shared",
+ ],
+
+ srcs: [
+ "AudioRecordClient.cpp",
+ "AudioPolicyClientImpl.cpp",
+ "AudioPolicyEffects.cpp",
+ "AudioPolicyInterfaceImpl.cpp",
+ "AudioPolicyService.cpp",
+ "CaptureStateNotifier.cpp",
+ "Spatializer.cpp",
+ "SpatializerPoseController.cpp",
+ ],
+
+ include_dirs: [
+ "frameworks/av/services/audioflinger"
+ ],
+
+
+ static_libs: [
"framework-permission-aidl-cpp",
],
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 70a1785..85b7ad9 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -44,10 +44,7 @@
AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
// load xml config with effectsFactoryHal
status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
- if (loadResult == NO_ERROR) {
- mDefaultDeviceEffectFuture =
- std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
- } else if (loadResult < 0) {
+ if (loadResult < 0) {
ALOGW("Failed to query effect configuration, fallback to load .conf");
// load automatic audio effect modules
if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
@@ -60,6 +57,11 @@
}
}
+void AudioPolicyEffects::setDefaultDeviceEffects() {
+ mDefaultDeviceEffectFuture = std::async(
+ std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+}
+
AudioPolicyEffects::~AudioPolicyEffects()
{
size_t i = 0;
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 9f65a96..e17df48 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -117,6 +117,8 @@
// Remove the default stream effect from wherever it's attached.
status_t removeStreamDefaultEffect(audio_unique_id_t id);
+ void setDefaultDeviceEffects();
+
private:
void initDefaultDeviceEffects();
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 5d86e7c..509b673 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include "AudioPolicyService.h"
+#include "AudioRecordClient.h"
#include "TypeConverter.h"
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
@@ -1529,6 +1530,19 @@
return Status::ok();
}
+template <typename Port>
+void anonymizePortBluetoothAddress(Port *port) {
+ if (port->type != AUDIO_PORT_TYPE_DEVICE) {
+ return;
+ }
+ if (!(audio_is_a2dp_device(port->ext.device.type)
+ || audio_is_ble_device(port->ext.device.type)
+ || audio_is_bluetooth_sco_device(port->ext.device.type)
+ || audio_is_hearing_aid_out_device(port->ext.device.type))) {
+ return;
+ }
+ anonymizeBluetoothAddress(port->ext.device.address);
+}
Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
media::AudioPortType typeAidl, Int* count,
@@ -1551,10 +1565,20 @@
if (mAudioPolicyManager == NULL) {
return binderStatusFromStatusT(NO_INIT);
}
+
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
+
AutoCallerClear acc;
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
numPortsReq = std::min(numPortsReq, num_ports);
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ for (size_t i = 0; i < numPortsReq; ++i) {
+ anonymizePortBluetoothAddress(&ports[i]);
+ }
+ }
+
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1581,8 +1605,16 @@
if (mAudioPolicyManager == NULL) {
return binderStatusFromStatusT(NO_INIT);
}
+
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
+
AutoCallerClear acc;
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ anonymizePortBluetoothAddress(&port);
+ }
+
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
return Status::ok();
}
@@ -1644,10 +1676,25 @@
if (mAudioPolicyManager == NULL) {
return binderStatusFromStatusT(NO_INIT);
}
+
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
+
AutoCallerClear acc;
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
numPatchesReq = std::min(numPatchesReq, num_patches);
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ for (size_t i = 0; i < numPatchesReq; ++i) {
+ for (size_t j = 0; j < patches[i].num_sources; ++j) {
+ anonymizePortBluetoothAddress(&patches[i].sources[j]);
+ }
+ for (size_t j = 0; j < patches[i].num_sinks; ++j) {
+ anonymizePortBluetoothAddress(&patches[i].sinks[j]);
+ }
+ }
+ }
+
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(patches.get(), patches.get() + numPatchesReq,
std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
@@ -1763,6 +1810,22 @@
}
}
+Status AudioPolicyService::updatePolicyMixes(
+ const ::std::vector<::android::media::AudioMixUpdate>& updates) {
+ Mutex::Autolock _l(mLock);
+ for (const auto& update : updates) {
+ AudioMix mix = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_AudioMix(update.audioMix));
+ std::vector<AudioMixMatchCriterion> newCriteria =
+ VALUE_OR_RETURN_BINDER_STATUS(convertContainer<std::vector<AudioMixMatchCriterion>>(
+ update.newCriteria, aidl2legacy_AudioMixMatchCriterion));
+ int status;
+ if((status = mAudioPolicyManager->updatePolicyMix(mix, newCriteria)) != NO_ERROR) {
+ return binderStatusFromStatusT(status);
+ }
+ }
+ return binderStatusFromStatusT(NO_ERROR);
+}
+
Status AudioPolicyService::setUidDeviceAffinities(
int32_t uidAidl,
const std::vector<AudioDevice>& devicesAidl) {
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 5fbbc30..7241597 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,7 +25,6 @@
#include <sys/time.h>
#include <dlfcn.h>
-#include <android/content/pm/IPackageManagerNative.h>
#include <audio_utils/clock.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
@@ -35,6 +34,7 @@
#include <binder/IResultReceiver.h>
#include <utils/String16.h>
#include <utils/threads.h>
+#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
#include <hardware_legacy/power.h>
#include <media/AidlConversion.h>
@@ -121,6 +121,7 @@
BINDER_METHOD_ENTRY(releaseSoundTriggerSession) \
BINDER_METHOD_ENTRY(getPhoneState) \
BINDER_METHOD_ENTRY(registerPolicyMixes) \
+BINDER_METHOD_ENTRY(updatePolicyMixes) \
BINDER_METHOD_ENTRY(setUidDeviceAffinities) \
BINDER_METHOD_ENTRY(removeUidDeviceAffinities) \
BINDER_METHOD_ENTRY(setUserIdDeviceAffinities) \
@@ -217,27 +218,6 @@
{
delete interface;
}
-
-namespace {
-int getTargetSdkForPackageName(std::string_view packageName) {
- const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
- int targetSdk = -1;
- if (binder != nullptr) {
- const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
- if (pm != nullptr) {
- const auto status = pm->getTargetSdkVersionForPackage(
- String16{packageName.data(), packageName.size()}, &targetSdk);
- ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
- return status.isOk() ? targetSdk : -1;
- }
- }
- return targetSdk;
-}
-
-bool doesPackageTargetAtLeastU(std::string_view packageName) {
- return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
-}
-} // anonymous
// ----------------------------------------------------------------------------
AudioPolicyService::AudioPolicyService()
@@ -328,6 +308,9 @@
}
}
AudioSystem::audioPolicyReady();
+ // AudioFlinger will handle effect creation and register these effects on audio_policy
+ // service. Hence, audio_policy service must be ready.
+ audioPolicyEffects->setDefaultDeviceEffects();
}
void AudioPolicyService::unloadAudioPolicyManager()
@@ -985,7 +968,8 @@
}
}
}
- if (current->attributes.source != AUDIO_SOURCE_HOTWORD) {
+ if (current->attributes.source != AUDIO_SOURCE_HOTWORD &&
+ !isVirtualSource(current->attributes.source)) {
onlyHotwordActive = false;
}
if (currentUid == mPhoneStateOwnerUid &&
@@ -1187,20 +1171,6 @@
return false;
}
-/* static */
-bool AudioPolicyService::isAppOpSource(audio_source_t source)
-{
- switch (source) {
- case AUDIO_SOURCE_FM_TUNER:
- case AUDIO_SOURCE_ECHO_REFERENCE:
- case AUDIO_SOURCE_REMOTE_SUBMIX:
- return false;
- default:
- break;
- }
- return true;
-}
-
void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
{
AutoCallerClear acc;
@@ -1340,6 +1310,7 @@
case TRANSACTION_isStreamActiveRemotely:
case TRANSACTION_isSourceActive:
case TRANSACTION_registerPolicyMixes:
+ case TRANSACTION_updatePolicyMixes:
case TRANSACTION_setMasterMono:
case TRANSACTION_getSurroundFormats:
case TRANSACTION_getReportedSurroundFormats:
@@ -1901,113 +1872,6 @@
return binder::Status::ok();
}
-// ----------- AudioPolicyService::OpRecordAudioMonitor implementation ----------
-
-// static
-sp<AudioPolicyService::OpRecordAudioMonitor>
-AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
- wp<AudioCommandThread> commandThread)
-{
- if (isAudioServerOrRootUid(attributionSource.uid)) {
- ALOGV("not silencing record for audio or root source %s",
- attributionSource.toString().c_str());
- return nullptr;
- }
-
- if (!AudioPolicyService::isAppOpSource(attr.source)) {
- ALOGD("not monitoring app op for uid %d and source %d",
- attributionSource.uid, attr.source);
- return nullptr;
- }
-
- if (!attributionSource.packageName.has_value()
- || attributionSource.packageName.value().size() == 0) {
- return nullptr;
- }
- return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
-}
-
-AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp,
- wp<AudioCommandThread> commandThread) :
- mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
- mCommandThread(commandThread)
-{
-}
-
-AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
- if (mOpCallback != 0) {
- mAppOpsManager.stopWatchingMode(mOpCallback);
- }
- mOpCallback.clear();
-}
-
-void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
-{
- checkOp();
- mOpCallback = new RecordAudioOpCallback(this);
- ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
- int flags = doesPackageTargetAtLeastU(
- mAttributionSource.packageName.value_or("")) ?
- AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
- // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))),
- flags,
- mOpCallback);
-}
-
-bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
- return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
-{
- // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- const int32_t mode = mAppOpsManager.checkOp(mAppOp,
- mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
- // verbose logging only log when appOp changed
- ALOGI_IF(hasIt != mHasOp.load(),
- "App op %d missing, %ssilencing record %s",
- mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
- mHasOp.store(hasIt);
-
- if (updateUidStates) {
- sp<AudioCommandThread> commandThread = mCommandThread.promote();
- if (commandThread != nullptr) {
- commandThread->updateUidStatesCommand();
- }
- }
-}
-
-AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
- const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
- const String16& packageName __unused) {
- sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
- if (monitor != NULL) {
- if (op != monitor->getOp()) {
- return;
- }
- monitor->checkOp(true);
- }
-}
-
-
// ----------- AudioPolicyService::AudioCommandThread implementation ----------
AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 8d5628f..aaf0b1b 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -1,4 +1,3 @@
-
/*
* Copyright (C) 2009 The Android Open Source Project
*
@@ -28,7 +27,6 @@
#include <utils/Vector.h>
#include <utils/SortedVector.h>
#include <binder/ActivityManager.h>
-#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
#include <binder/IUidObserver.h>
#include <system/audio.h>
@@ -64,6 +62,12 @@
// ----------------------------------------------------------------------------
+namespace media::audiopolicy {
+ class AudioRecordClient;
+}
+
+using ::android::media::audiopolicy::AudioRecordClient;
+
class AudioPolicyService :
public BinderService<AudioPolicyService>,
public media::BnAudioPolicyService,
@@ -194,6 +198,8 @@
binder::Status getPhoneState(AudioMode* _aidl_return) override;
binder::Status registerPolicyMixes(const std::vector<media::AudioMix>& mixes,
bool registration) override;
+ binder::Status updatePolicyMixes(
+ const ::std::vector<::android::media::AudioMixUpdate>& updates) override;
binder::Status setUidDeviceAffinities(int32_t uid,
const std::vector<AudioDevice>& devices) override;
binder::Status removeUidDeviceAffinities(int32_t uid) override;
@@ -401,7 +407,6 @@
// Handles binder shell commands
virtual status_t shellCommand(int in, int out, int err, Vector<String16>& args);
- class AudioRecordClient;
// Sets whether the given UID records only silence
virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mLock);
@@ -542,6 +547,7 @@
// Thread used to send audio config commands to audio flinger
// For audio config commands, it is necessary because audio flinger requires that the calling
// process (user) has permission to modify audio settings.
+ public:
class AudioCommandThread : public Thread {
class AudioCommand;
public:
@@ -732,6 +738,7 @@
wp<AudioPolicyService> mService;
};
+ private:
class AudioPolicyClient : public AudioPolicyClientInterface
{
public:
@@ -909,6 +916,7 @@
bool mAudioVolumeGroupCallbacksEnabled;
};
+ public:
class AudioClient : public virtual RefBase {
public:
AudioClient(const audio_attributes_t attributes,
@@ -930,82 +938,8 @@
const audio_port_handle_t deviceId; // selected input device port ID
bool active; // Playback/Capture is active or inactive
};
-
- // Checks and monitors app ops for AudioRecordClient
- class OpRecordAudioMonitor : public RefBase {
- public:
- ~OpRecordAudioMonitor() override;
- bool hasOp() const;
- int32_t getOp() const { return mAppOp; }
-
- static sp<OpRecordAudioMonitor> createIfNeeded(
- const AttributionSourceState& attributionSource,
- const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
-
private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
- wp<AudioCommandThread> commandThread);
- void onFirstRef() override;
-
- AppOpsManager mAppOpsManager;
-
- class RecordAudioOpCallback : public BnAppOpsCallback {
- public:
- explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
- void opChanged(int32_t op, const String16& packageName) override;
-
- private:
- const wp<OpRecordAudioMonitor> mMonitor;
- };
-
- sp<RecordAudioOpCallback> mOpCallback;
- // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
- // in AppOp callback and in onFirstRef()
- // updateUidStates is true when the silenced state of active AudioRecordClients must be
- // re-evaluated
- void checkOp(bool updateUidStates = false);
-
- std::atomic_bool mHasOp;
- const AttributionSourceState mAttributionSource;
- const int32_t mAppOp;
- wp<AudioCommandThread> mCommandThread;
- };
-
- // --- AudioRecordClient ---
- // Information about each registered AudioRecord client
- // (between calls to getInputForAttr() and releaseInput())
- class AudioRecordClient : public AudioClient {
- public:
- AudioRecordClient(const audio_attributes_t attributes,
- const audio_io_handle_t io,
- const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId,
- const AttributionSourceState& attributionSource,
- bool canCaptureOutput, bool canCaptureHotword,
- wp<AudioCommandThread> commandThread) :
- AudioClient(attributes, io, attributionSource,
- session, portId, deviceId), attributionSource(attributionSource),
- startTimeNs(0), canCaptureOutput(canCaptureOutput),
- canCaptureHotword(canCaptureHotword), silenced(false),
- mOpRecordAudioMonitor(
- OpRecordAudioMonitor::createIfNeeded(attributionSource,
- attributes, commandThread)) {}
- ~AudioRecordClient() override = default;
-
- bool hasOp() const {
- return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
- }
-
- const AttributionSourceState attributionSource; // attribution source of client
- nsecs_t startTimeNs;
- const bool canCaptureOutput;
- const bool canCaptureHotword;
- bool silenced;
-
- private:
- sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
- };
// --- AudioPlaybackClient ---
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
new file mode 100644
index 0000000..a89a84d
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/content/pm/IPackageManagerNative.h>
+
+#include "AudioRecordClient.h"
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using android::AudioPolicyService;
+
+namespace {
+bool isAppOpSource(audio_source_t source)
+{
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return false;
+ default:
+ break;
+ }
+ return true;
+}
+
+int getTargetSdkForPackageName(std::string_view packageName) {
+ const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+ int targetSdk = -1;
+ if (binder != nullptr) {
+ const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+ if (pm != nullptr) {
+ const auto status = pm->getTargetSdkVersionForPackage(
+ String16{packageName.data(), packageName.size()}, &targetSdk);
+ return status.isOk() ? targetSdk : -1;
+ }
+ }
+ return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+ return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+}
+
+// static
+sp<OpRecordAudioMonitor>
+OpRecordAudioMonitor::createIfNeeded(
+ const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+ wp<AudioPolicyService::AudioCommandThread> commandThread)
+{
+ if (isAudioServerOrRootUid(attributionSource.uid)) {
+ ALOGV("not silencing record for audio or root source %s",
+ attributionSource.toString().c_str());
+ return nullptr;
+ }
+
+ if (!isAppOpSource(attr.source)) {
+ ALOGD("not monitoring app op for uid %d and source %d",
+ attributionSource.uid, attr.source);
+ return nullptr;
+ }
+
+ if (!attributionSource.packageName.has_value()
+ || attributionSource.packageName.value().size() == 0) {
+ return nullptr;
+ }
+ return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+OpRecordAudioMonitor::OpRecordAudioMonitor(
+ const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioPolicyService::AudioCommandThread> commandThread) :
+ mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+ mCommandThread(commandThread)
+{
+}
+
+OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+ if (mOpCallback != 0) {
+ mAppOpsManager.stopWatchingMode(mOpCallback);
+ }
+ mOpCallback.clear();
+}
+
+void OpRecordAudioMonitor::onFirstRef()
+{
+ checkOp();
+ mOpCallback = new RecordAudioOpCallback(this);
+ ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+
+ int flags = doesPackageTargetAtLeastU(
+ mAttributionSource.packageName.value_or("")) ?
+ AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
+ // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))),
+ flags,
+ mOpCallback);
+}
+
+bool OpRecordAudioMonitor::hasOp() const {
+ return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+ // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+ mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))));
+ const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ // verbose logging only log when appOp changed
+ ALOGI_IF(hasIt != mHasOp.load(),
+ "App op %d missing, %ssilencing record %s",
+ mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+ mHasOp.store(hasIt);
+
+ if (updateUidStates) {
+ sp<AudioPolicyService::AudioCommandThread> commandThread = mCommandThread.promote();
+ if (commandThread != nullptr) {
+ commandThread->updateUidStatesCommand();
+ }
+ }
+}
+
+OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+ const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+ const String16& packageName __unused) {
+ sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+ if (monitor != NULL) {
+ if (op != monitor->getOp()) {
+ return;
+ }
+ monitor->checkOp(true);
+ }
+}
+
+} // android::media::audiopolicy::internal
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
new file mode 100644
index 0000000..d3be316
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/content/AttributionSourceState.h>
+#include <binder/AppOpsManager.h>
+#include <system/audio.h>
+#include <utils/RefBase.h>
+
+#include <cstdint>
+
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using ::android::content::AttributionSourceState;
+
+// Checks and monitors app ops for AudioRecordClient
+class OpRecordAudioMonitor : public RefBase {
+public:
+ ~OpRecordAudioMonitor() override;
+ bool hasOp() const;
+ int32_t getOp() const { return mAppOp; }
+
+ static sp<OpRecordAudioMonitor> createIfNeeded(
+ const AttributionSourceState& attributionSource,
+ const audio_attributes_t& attr,
+ wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+private:
+ OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+ void onFirstRef() override;
+
+ AppOpsManager mAppOpsManager;
+
+ class RecordAudioOpCallback : public BnAppOpsCallback {
+ public:
+ explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+ void opChanged(int32_t op, const String16& packageName) override;
+
+ private:
+ const wp<OpRecordAudioMonitor> mMonitor;
+ };
+
+ sp<RecordAudioOpCallback> mOpCallback;
+ // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+ // in AppOp callback and in onFirstRef()
+ // updateUidStates is true when the silenced state of active AudioRecordClients must be
+ // re-evaluated
+ void checkOp(bool updateUidStates = false);
+
+ std::atomic_bool mHasOp;
+ const AttributionSourceState mAttributionSource;
+ const int32_t mAppOp;
+ wp<AudioPolicyService::AudioCommandThread> mCommandThread;
+};
+
+// --- AudioRecordClient ---
+// Information about each registered AudioRecord client
+// (between calls to getInputForAttr() and releaseInput())
+class AudioRecordClient : public AudioPolicyService::AudioClient {
+public:
+ AudioRecordClient(const audio_attributes_t attributes,
+ const audio_io_handle_t io,
+ const audio_session_t session, audio_port_handle_t portId,
+ const audio_port_handle_t deviceId,
+ const AttributionSourceState& attributionSource,
+ bool canCaptureOutput, bool canCaptureHotword,
+ wp<AudioPolicyService::AudioCommandThread> commandThread) :
+ AudioClient(attributes, io, attributionSource,
+ session, portId, deviceId), attributionSource(attributionSource),
+ startTimeNs(0), canCaptureOutput(canCaptureOutput),
+ canCaptureHotword(canCaptureHotword), silenced(false),
+ mOpRecordAudioMonitor(
+ OpRecordAudioMonitor::createIfNeeded(attributionSource,
+ attributes, commandThread)) {}
+ ~AudioRecordClient() override = default;
+
+ bool hasOp() const {
+ return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+ }
+
+ const AttributionSourceState attributionSource; // attribution source of client
+ nsecs_t startTimeNs;
+ const bool canCaptureOutput;
+ const bool canCaptureHotword;
+ bool silenced;
+
+private:
+ sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
+};
+
+}; // namespace android::media::audiopolicy::internal
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 7c5ab08..8dbf471 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -26,7 +26,9 @@
#define LOG_TAG "APM_Test"
#include <Serializer.h>
#include <android-base/file.h>
+#include <android-base/properties.h>
#include <android/content/AttributionSourceState.h>
+#include <hardware/audio_effect.h>
#include <media/AudioPolicy.h>
#include <media/PatchBuilder.h>
#include <media/RecordingActivityTracker.h>
@@ -185,6 +187,7 @@
bool* isBitPerfect = nullptr);
void getInputForAttr(
const audio_attributes_t &attr,
+ audio_io_handle_t *input,
audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
@@ -296,6 +299,7 @@
void AudioPolicyManagerTest::getInputForAttr(
const audio_attributes_t &attr,
+ audio_io_handle_t *input,
const audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
@@ -304,7 +308,6 @@
int sampleRate,
audio_input_flags_t flags,
audio_port_handle_t *portId) {
- audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
config.channel_mask = channelMask;
@@ -315,7 +318,7 @@
AudioPolicyInterface::input_type_t inputType;
AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, &input, riid, session, attributionSource, &config, flags,
+ &attr, input, riid, session, attributionSource, &config, flags,
selectedDeviceId, &inputType, portId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
}
@@ -945,10 +948,13 @@
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t mixPortId = AUDIO_PORT_HANDLE_NONE;
audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
- audio_attributes_t attr = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+ audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source,
+ AUDIO_FLAG_NONE, ""};
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1,
+ &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
+ AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX,
+ &mixPortId));
std::vector<audio_port_v7> ports;
ASSERT_NO_FATAL_FAILURE(
@@ -1709,10 +1715,11 @@
audio_attributes_t attr = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
std::string tags = "addr=" + mMixAddress;
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
- AUDIO_INPUT_FLAG_NONE, &mPortId);
+ getInputForAttr(attr, &input, param.session, mTracker->getRiid(),
+ &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
ASSERT_EQ(extractionPort.id, selectedDeviceId);
@@ -1906,7 +1913,7 @@
audio_io_handle_t mOutput;
audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- audio_port_handle_t mPortId;
+ audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t mOutputType;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
bool mIsSpatialized;
@@ -1949,14 +1956,18 @@
}
TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
- MmapPlaybackStreamMatchingRenderDapMixSucceeds) {
- // Add render-only mix matching the test uid.
+ MmapPlaybackStreamMatchingRenderDapMixSupportingMmapSucceeds) {
+ // Add render-only mix matching the test uid.
const int testUid = 12345;
- status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
- /*mixAddress=*/"", audioConfig, {createUidCriterion(testUid)});
+ // test_audio_policy_configuration.xml declares mmap-capable mix port
+ // for AUDIO_DEVICE_OUT_USB_DEVICE.
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_USB_DEVICE, /*mixAddress=*/"",
+ audioConfig, {createUidCriterion(testUid)});
ASSERT_EQ(NO_ERROR, ret);
- // Geting output for matching uid should succeed for mmaped stream.
+ // Geting output for matching uid should succeed for mmaped stream, because matched mix
+ // redirects to mmap capable device.
audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
@@ -1965,13 +1976,35 @@
&mOutputType, &mIsSpatialized, &mIsBitPerfect));
}
+TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
+ MmapPlaybackStreamMatchingRenderDapMixNotSupportingMmapFails) {
+ // Add render-only mix matching the test uid.
+ const int testUid = 12345;
+ // Per test_audio_policy_configuration.xml AUDIO_DEVICE_OUT_SPEAKER doesn't support mmap.
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_SPEAKER, /*mixAddress=*/"", audioConfig,
+ {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid should fail for mmaped stream, because
+ // matched mix redirects to device which doesn't support mmap.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ ASSERT_EQ(INVALID_OPERATION,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
INSTANTIATE_TEST_SUITE_P(
MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
/*deviceAddress=*/"remote_submix_media"),
DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- /*deviceAddress=*/"remote_submix_media")));
+ /*deviceAddress=*/"remote_submix_media"),
+ DPMmapTestParam(MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
+ /*deviceAddress=*/"")));
class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
public testing::WithParamInterface<DPTestParam> {
@@ -2028,9 +2061,10 @@
audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
- AUDIO_INPUT_FLAG_NONE, &portId);
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ getInputForAttr(param.attributes, &input, param.session, mTracker->getRiid(),
+ &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
if (param.expected_match) {
EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
} else {
@@ -2213,9 +2247,10 @@
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
- getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
- AUDIO_INPUT_FLAG_NONE);
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ getInputForAttr({}, &input, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE);
}
ASSERT_EQ(devicePort.id, routedPortId);
@@ -2962,7 +2997,8 @@
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
attr.source = source;
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
48000));
auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
@@ -2982,7 +3018,8 @@
mManager->setDevicesRoleForCapturePreset(source, role,
{preferredDevice->getDeviceTypeAddr()}));
selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
48000));
ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -2992,7 +3029,8 @@
ASSERT_EQ(NO_ERROR,
mManager->clearDevicesRoleForCapturePreset(source, role));
selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
48000));
ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -3017,7 +3055,8 @@
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
attr.source = source;
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
48000));
auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
@@ -3029,9 +3068,10 @@
mManager->setDevicesRoleForCapturePreset(source, role,
{selectedDevice->getDeviceTypeAddr()}));
selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
- 48000));
+ input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1,
+ &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
+ AUDIO_CHANNEL_IN_STEREO, 48000));
ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
// After clearing disabled device for capture preset, the selected device for input should be
@@ -3039,7 +3079,8 @@
ASSERT_EQ(NO_ERROR,
mManager->clearDevicesRoleForCapturePreset(source, role));
selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ input = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
48000));
ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -3075,3 +3116,77 @@
DevicesRoleForCapturePresetParam({AUDIO_SOURCE_HOTWORD, DEVICE_ROLE_PREFERRED})
)
);
+
+
+const effect_descriptor_t TEST_EFFECT_DESC = {
+ {0xf2a4bb20, 0x0c3c, 0x11e3, 0x8b07, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+ {0xff93e360, 0x0c3c, 0x11e3, 0x8a97, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
+ EFFECT_CONTROL_API_VERSION,
+ EFFECT_FLAG_TYPE_PRE_PROC,
+ 0,
+ 1,
+ "APM test Effect",
+ "The Android Open Source Project",
+};
+
+class AudioPolicyManagerPreProcEffectTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F(AudioPolicyManagerPreProcEffectTest, DeviceDisconnectWhileClientActive) {
+ const audio_source_t source = AUDIO_SOURCE_MIC;
+ const std::string address = "BUS00_MIC";
+ const std::string deviceName = "randomName";
+ audio_port_handle_t portId;
+ audio_devices_t type = AUDIO_DEVICE_IN_BUS;
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(type,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(), deviceName.c_str(),
+ AUDIO_FORMAT_DEFAULT));
+ auto availableDevices = mManager->getAvailableInputDevices();
+ ASSERT_GT(availableDevices.size(), 1);
+
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ attr.source = source;
+ audio_session_t session = TEST_SESSION_ID;
+ audio_io_handle_t inputClientHandle = 777;
+ int effectId = 666;
+ audio_port_v7 devicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, type, address, &devicePort));
+
+ audio_port_handle_t routedPortId = devicePort.id;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &inputClientHandle, session, 1, &routedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000, AUDIO_INPUT_FLAG_NONE, &portId));
+ ASSERT_EQ(devicePort.id, routedPortId);
+ auto selectedDevice = availableDevices.getDeviceFromId(routedPortId);
+ ASSERT_NE(nullptr, selectedDevice);
+
+ // Add a pre processing effect on the input client session
+ ASSERT_EQ(NO_ERROR, mManager->registerEffect(&TEST_EFFECT_DESC, inputClientHandle,
+ PRODUCT_STRATEGY_NONE, session, effectId));
+
+ ASSERT_EQ(NO_ERROR, mManager->startInput(portId));
+
+ // Force a device disconnection to close the input, no crash expected of APM
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ type, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+
+ // Reconnect the device
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+
+ inputClientHandle += 1;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, type, address, &devicePort));
+ routedPortId = devicePort.id;
+
+ // Reconnect the client changing voluntarily the io, but keeping the session to get the
+ // effect attached again
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &inputClientHandle, session, 1, &routedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+
+ // unregister effect should succeed since effect shall have been restore on the client session
+ ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
+}
\ No newline at end of file
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 50ca26a..9e092c6 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -55,6 +55,16 @@
samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
<mixPort name="hifi_output" role="source" flags="AUDIO_OUTPUT_FLAG_BIT_PERFECT"/>
+ <mixPort name="mmap_no_irq_out" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus_input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -79,6 +89,8 @@
</devicePort>
<devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
</devicePort>
+ <devicePort tagName="BUS Device In" type="AUDIO_DEVICE_IN_BUS" role="source" address="BUS00_MIC">
+ </devicePort>
</devicePorts>
<routes>
<route type="mix" sink="Speaker"
@@ -96,7 +108,9 @@
<route type="mix" sink="BT A2DP Out"
sources="primary output,hifi_output"/>
<route type="mix" sink="USB Device Out"
- sources="primary output,hifi_output"/>
+ sources="primary output,hifi_output,mmap_no_irq_out"/>
+ <route type="mix" sink="mixport_bus_input"
+ sources="BUS Device In"/>
</routes>
</module>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index a45365a..af4c70c 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -35,9 +35,91 @@
],
}
-cc_library_shared {
+cc_defaults {
+ name: "libcameraservice_deps",
+ defaults: ["android.hardware.graphics.common-ndk_shared"],
+ shared_libs: [
+ "libactivitymanager_aidl",
+ "libbase",
+ "libdl",
+ "libui",
+ "liblog",
+ "libutilscallstack",
+ "libutils",
+ "libbinder",
+ "libbinder_ndk",
+ "libactivitymanager_aidl",
+ "libpermission",
+ "libcutils",
+ "libexif",
+ "libmedia",
+ "libmediautils",
+ "libcamera_client",
+ "libcamera_metadata",
+ "libfmq",
+ "libgui",
+ "libhardware",
+ "libhidlbase",
+ "libimage_io",
+ "libjpeg",
+ "libultrahdr",
+ "libmedia_codeclist",
+ "libmedia_omx",
+ "libmemunreachable",
+ "libprocessgroup",
+ "libprocinfo",
+ "libsensorprivacy",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libxml2",
+ "libyuv",
+ "android.hardware.camera.common@1.0",
+ "android.hardware.camera.device@1.0",
+ "android.hardware.camera.device@3.2",
+ "android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
+ "android.hardware.camera.device@3.5",
+ "android.hardware.camera.device@3.6",
+ "android.hardware.camera.device@3.7",
+ "android.hardware.common-V2-ndk",
+ "android.hardware.common.fmq-V1-ndk",
+ "camera_platform_flags_c_lib",
+ "media_permission-aidl-cpp",
+ ],
+
+ static_libs: [
+ "android.frameworks.cameraservice.common@2.0",
+ "android.frameworks.cameraservice.service@2.0",
+ "android.frameworks.cameraservice.service@2.1",
+ "android.frameworks.cameraservice.service@2.2",
+ "android.frameworks.cameraservice.device@2.0",
+ "android.frameworks.cameraservice.device@2.1",
+ "android.frameworks.cameraservice.common-V1-ndk",
+ "android.frameworks.cameraservice.service-V1-ndk",
+ "android.frameworks.cameraservice.device-V1-ndk",
+ "android.hardware.camera.common-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
+ "android.hardware.camera.metadata-V2-ndk",
+ "android.hardware.camera.provider@2.4",
+ "android.hardware.camera.provider@2.5",
+ "android.hardware.camera.provider@2.6",
+ "android.hardware.camera.provider@2.7",
+ "android.hardware.camera.provider-V2-ndk",
+ "libaidlcommonsupport",
+ "libbinderthreadstateutils",
+ "libcameraservice_device_independent",
+ "libdynamic_depth",
+ "libprocessinfoservice_aidl",
+ "media_permission-aidl-cpp",
+ ],
+}
+
+cc_library {
name: "libcameraservice",
+ defaults: [
+ "libcameraservice_deps",
+ ],
// Camera service source
srcs: [
@@ -105,6 +187,7 @@
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
"utils/AutoConditionLock.cpp",
+ "utils/SchedulingPolicyUtils.cpp",
"utils/SessionConfigurationUtils.cpp",
"utils/SessionConfigurationUtilsHidl.cpp",
"utils/SessionStatsBuilder.cpp",
@@ -119,73 +202,6 @@
"libmediametrics_headers",
],
- shared_libs: [
- "libactivitymanager_aidl",
- "libbase",
- "libdl",
- "libexif",
- "libui",
- "liblog",
- "libutilscallstack",
- "libutils",
- "libbinder",
- "libbinder_ndk",
- "libactivitymanager_aidl",
- "libpermission",
- "libcutils",
- "libmedia",
- "libmediautils",
- "libcamera_client",
- "libcamera_metadata",
- "libdynamic_depth",
- "libfmq",
- "libgui",
- "libhardware",
- "libhidlbase",
- "libimage_io",
- "libjpeg",
- "libultrahdr",
- "libmedia_codeclist",
- "libmedia_omx",
- "libmemunreachable",
- "libsensorprivacy",
- "libstagefright",
- "libstagefright_foundation",
- "libxml2",
- "libyuv",
- "android.frameworks.cameraservice.common@2.0",
- "android.frameworks.cameraservice.service@2.0",
- "android.frameworks.cameraservice.service@2.1",
- "android.frameworks.cameraservice.service@2.2",
- "android.frameworks.cameraservice.device@2.0",
- "android.frameworks.cameraservice.device@2.1",
- "android.frameworks.cameraservice.common-V1-ndk",
- "android.frameworks.cameraservice.service-V1-ndk",
- "android.frameworks.cameraservice.device-V1-ndk",
- "android.hardware.camera.common@1.0",
- "android.hardware.camera.provider@2.4",
- "android.hardware.camera.provider@2.5",
- "android.hardware.camera.provider@2.6",
- "android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V2-ndk",
- "android.hardware.camera.device@3.2",
- "android.hardware.camera.device@3.3",
- "android.hardware.camera.device@3.4",
- "android.hardware.camera.device@3.5",
- "android.hardware.camera.device@3.6",
- "android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V2-ndk",
- "media_permission-aidl-cpp",
- ],
-
- static_libs: [
- "libaidlcommonsupport",
- "libprocessinfoservice_aidl",
- "libbinderthreadstateutils",
- "media_permission-aidl-cpp",
- "libcameraservice_device_independent",
- ],
-
export_shared_lib_headers: [
"libbinder",
"libactivitymanager_aidl",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 31ac392..8add05e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -47,6 +47,7 @@
#include <binder/PermissionController.h>
#include <binder/IResultReceiver.h>
#include <binderthreadstate/CallerUtils.h>
+#include <com_android_internal_camera_flags.h>
#include <cutils/atomic.h>
#include <cutils/properties.h>
#include <cutils/misc.h>
@@ -89,6 +90,7 @@
const char* kActivityServiceName = "activity";
const char* kSensorPrivacyServiceName = "sensor_privacy";
const char* kAppopsServiceName = "appops";
+ const char* kProcessInfoServiceName = "processinfo";
}; // namespace anonymous
namespace android {
@@ -104,6 +106,7 @@
using hardware::camera2::ICameraInjectionSession;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
using hardware::camera2::utils::ConcurrentCameraIdCombination;
+namespace flags = com::android::internal::camera::flags;
// ----------------------------------------------------------------------------
// Logging support -- this is for debugging only
@@ -123,6 +126,8 @@
static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
static const std::string sCameraPermission("android.permission.CAMERA");
static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
+static const std::string sCameraHeadlessSystemUserPermission(
+ "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
static const std::string
sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
static const std::string sCameraOpenCloseListenerPermission(
@@ -694,25 +699,125 @@
broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+static bool isAutomotiveDevice() {
+ // Checks the property ro.hardware.type and returns true if it is
+ // automotive.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.hardware.type", value, "");
+ return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isHeadlessSystemUserMode() {
+ // Checks if the device is running in headless system user mode
+ // by checking the property ro.fw.mu.headless_system_user.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.fw.mu.headless_system_user", value, "");
+ return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isAutomotivePrivilegedClient(int32_t uid) {
+ // Returns false if this is not an automotive device type.
+ if (!isAutomotiveDevice())
+ return false;
+
+ // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+ // privileged client uid used for safety critical use cases such as
+ // rear view and surround view.
+ return uid == AID_AUTOMOTIVE_EVS;
+}
+
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+ // Returns false if this is not an automotive device type.
+ if (!isAutomotiveDevice())
+ return false;
+
+ // Returns false if no camera id is provided.
+ if (cam_id.empty())
+ return false;
+
+ SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+ if (getSystemCameraKind(cam_id, &systemCameraKind) != OK) {
+ // This isn't a known camera ID, so it's not a system camera.
+ ALOGE("%s: Unknown camera id %s, ", __FUNCTION__, cam_id.c_str());
+ return false;
+ }
+
+ if (systemCameraKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+ ALOGE("%s: camera id %s is not a system camera", __FUNCTION__, cam_id.c_str());
+ return false;
+ }
+
+ CameraMetadata cameraInfo;
+ status_t res = mCameraProviderManager->getCameraCharacteristics(
+ cam_id, false, &cameraInfo, false);
+ if (res != OK){
+ ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
+ cam_id.c_str());
+ return false;
+ }
+
+ camera_metadata_entry auto_location = cameraInfo.find(ANDROID_AUTOMOTIVE_LOCATION);
+ if (auto_location.count != 1)
+ return false;
+
+ uint8_t location = auto_location.data.u8[0];
+ if ((location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT) &&
+ (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_REAR) &&
+ (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT) &&
+ (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT)) {
+ return false;
+ }
+
+ return true;
+}
+
+bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) const{
+ if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+ // If cameraId is empty, then it means that this check is not used for the
+ // purpose of accessing a specific camera, hence grant permission just
+ // based on uid to the automotive privileged client.
+ if (cameraId.empty())
+ return true;
+ // If this call is used for accessing a specific camera then cam_id must be provided.
+ // In that case, only pre-grants the permission for accessing the exterior system only
+ // camera.
+ return isAutomotiveExteriorSystemCamera(cameraId);
+ }
+
permission::PermissionChecker permissionChecker;
+ return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
+ toString16(message), attributedOpCode)
+ != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
+ int callingUid) const{
AttributionSourceState attributionSource{};
attributionSource.pid = callingPid;
attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sSystemCameraPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
- bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sCameraPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForSystemCamera = checkPermission(cameraId,
+ sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+ bool checkPermissionForCamera = checkPermission(cameraId,
+ sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
return checkPermissionForSystemCamera && checkPermissionForCamera;
}
+bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+ int callingPid, int callingUid) const{
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
+ std::string(), AppOpsManager::OP_NONE);
+}
+
Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
bool hasSystemCameraPermissions =
- hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
+ hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
CameraThreadState::getCallingUid());
switch (type) {
case CAMERA_TYPE_BACKWARD_COMPATIBLE:
@@ -738,8 +843,7 @@
return Status::ok();
}
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
- cameraIdRemapping) {
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
const int pid = CameraThreadState::getCallingPid();
const int uid = CameraThreadState::getCallingUid();
@@ -764,21 +868,20 @@
std::string cameraIdToReplace, updatedCameraId;
for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
packageName = packageIdRemapping.packageName;
- if (packageName == "") {
+ if (packageName.empty()) {
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
"CameraIdRemapping: Package name cannot be empty");
}
-
if (packageIdRemapping.cameraIdsToReplace.size()
!= packageIdRemapping.updatedCameraIds.size()) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
"CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
- packageName.c_str());
+ packageName.c_str());
}
for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
updatedCameraId = packageIdRemapping.updatedCameraIds[i];
- if (cameraIdToReplace == "" || updatedCameraId == "") {
+ if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
"CameraIdRemapping: Camera Id cannot be empty for package %s",
packageName.c_str());
@@ -880,7 +983,7 @@
if (packageName.empty()) {
packageNameVal = getPackageNameFromUid(clientUid);
}
- if (clientUid < AID_APP_START || packageNameVal.empty()) {
+ if (clientUid < AID_APP_START || packageNameVal.empty()) {
// We shouldn't remap cameras for processes with system/vendor UIDs.
return inputCameraId;
}
@@ -918,9 +1021,8 @@
return STATUS_ERROR(ERROR_DISCONNECTED,
"Camera subsystem is not available");
}
- bool hasSystemCameraPermissions =
- hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
- CameraThreadState::getCallingUid());
+ bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
+ CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
if (hasSystemCameraPermissions) {
cameraIdBound = mNumberOfCameras;
@@ -949,13 +1051,12 @@
const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
auto callingPid = CameraThreadState::getCallingPid();
auto callingUid = CameraThreadState::getCallingUid();
- permission::PermissionChecker permissionChecker;
AttributionSourceState attributionSource{};
attributionSource.pid = callingPid;
attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sSystemCameraPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
+ sSystemCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE);
if (checkPermissionForSystemCamera || getpid() == callingPid) {
deviceIds = &mNormalDeviceIds;
}
@@ -1029,13 +1130,11 @@
// If it's not calling from cameraserver, check the permission only if
// android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
// it would've already been checked in shouldRejectSystemCameraConnection.
- permission::PermissionChecker permissionChecker;
AttributionSourceState attributionSource{};
attributionSource.pid = callingPid;
attributionSource.uid = callingUid;
- bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sCameraPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
if ((callingPid != getpid()) &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
!checkPermissionForCamera) {
@@ -1069,7 +1168,7 @@
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
const std::string cameraId = resolveCameraId(
- unresolvedCameraId, CameraThreadState::getCallingUid());
+ unresolvedCameraId, CameraThreadState::getCallingUid());
if (!mInitialized) {
ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
@@ -1188,7 +1287,8 @@
int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
- bool forceSlowJpegMode, const std::string& originalCameraId, /*out*/sp<BasicClient>* client) {
+ bool forceSlowJpegMode, const std::string& originalCameraId,
+ /*out*/sp<BasicClient>* client) {
// For HIDL devices
if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
// Create CameraClient based on device version reported by the HAL.
@@ -1477,7 +1577,6 @@
Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
const std::string& clientName, int& clientUid, int& clientPid,
/*out*/int& originalClientPid) const {
- permission::PermissionChecker permissionChecker;
AttributionSourceState attributionSource{};
int callingPid = CameraThreadState::getCallingPid();
@@ -1529,9 +1628,8 @@
attributionSource.pid = clientPid;
attributionSource.uid = clientUid;
attributionSource.packageName = clientName;
- bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sCameraPermission), attributionSource, String16(), AppOpsManager::OP_NONE)
- != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
+ std::string(), AppOpsManager::OP_NONE);
if (callingPid != getpid() &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1552,8 +1650,12 @@
callingUid, procState);
}
- // If sensor privacy is enabled then prevent access to the camera
- if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+ // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+ // such as rear view and surround view cannot be disabled and are exempt from sensor privacy
+ // policy. In all other cases,if sensor privacy is enabled then prevent access to the camera.
+ if ((!isAutomotivePrivilegedClient(callingUid) ||
+ !isAutomotiveExteriorSystemCamera(cameraId)) &&
+ mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
return STATUS_ERROR_FMT(ERROR_DISABLED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
@@ -1579,6 +1681,20 @@
clientUserId, cameraId.c_str());
}
+ if (flags::camera_hsum_permission()) {
+ // If the System User tries to access the camera when the device is running in
+ // headless system user mode, ensure that client has the required permission
+ // CAMERA_HEADLESS_SYSTEM_USER.
+ if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
+ !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+ ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
+ return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+ "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
+ User without camera headless system user permission",
+ clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ }
+ }
+
return Status::ok();
}
@@ -1671,33 +1787,6 @@
}
}
- // Get current active client PIDs
- std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
- ownerPids.push_back(clientPid);
-
- std::vector<int> priorityScores(ownerPids.size());
- std::vector<int> states(ownerPids.size());
-
- // Get priority scores of all active PIDs
- status_t err = ProcessInfoService::getProcessStatesScoresFromPids(
- ownerPids.size(), &ownerPids[0], /*out*/&states[0],
- /*out*/&priorityScores[0]);
- if (err != OK) {
- ALOGE("%s: Priority score query failed: %d",
- __FUNCTION__, err);
- return err;
- }
-
- // Update all active clients' priorities
- std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
- for (size_t i = 0; i < ownerPids.size() - 1; i++) {
- pidToPriorityMap.emplace(ownerPids[i],
- resource_policy::ClientPriority(priorityScores[i], states[i],
- /* isVendorClient won't get copied over*/ false,
- /* oomScoreOffset won't get copied over*/ 0));
- }
- mActiveClientManager.updatePriorities(pidToPriorityMap);
-
// Get state for the given cameraId
auto state = getCameraState(cameraId);
if (state == nullptr) {
@@ -1707,16 +1796,57 @@
return BAD_VALUE;
}
- int32_t actualScore = priorityScores[priorityScores.size() - 1];
- int32_t actualState = states[states.size() - 1];
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
+ if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+ // If processinfo service is not available and the client is automotive privileged
+ // client used for safety critical uses cases such as rear-view and surround-view which
+ // needs to be available before android boot completes, then use the hardcoded values
+ // for the process state and priority score. As this scenario is before android system
+ // services are up and client is native client, hence using NATIVE_ADJ as the priority
+ // score and state as PROCESS_STATE_BOUND_TOP as such automotive apps need to be
+ // visible on the top.
+ clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+ sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+ state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
+ ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+ } else {
+ // Get current active client PIDs
+ std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
+ ownerPids.push_back(clientPid);
- // Make descriptor for incoming client. We store the oomScoreOffset
- // since we might need it later on new handleEvictionsLocked and
- // ProcessInfoService would not take that into account.
- clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
- sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
- state->getConflicting(), actualScore, clientPid, actualState,
- oomScoreOffset, systemNativeClient);
+ std::vector<int> priorityScores(ownerPids.size());
+ std::vector<int> states(ownerPids.size());
+
+ // Get priority scores of all active PIDs
+ status_t err = ProcessInfoService::getProcessStatesScoresFromPids(ownerPids.size(),
+ &ownerPids[0], /*out*/&states[0], /*out*/&priorityScores[0]);
+ if (err != OK) {
+ ALOGE("%s: Priority score query failed: %d", __FUNCTION__, err);
+ return err;
+ }
+
+ // Update all active clients' priorities
+ std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
+ for (size_t i = 0; i < ownerPids.size() - 1; i++) {
+ pidToPriorityMap.emplace(ownerPids[i],
+ resource_policy::ClientPriority(priorityScores[i], states[i],
+ /* isVendorClient won't get copied over*/ false,
+ /* oomScoreOffset won't get copied over*/ 0));
+ }
+ mActiveClientManager.updatePriorities(pidToPriorityMap);
+
+ int32_t actualScore = priorityScores[priorityScores.size() - 1];
+ int32_t actualState = states[states.size() - 1];
+
+ // Make descriptor for incoming client. We store the oomScoreOffset
+ // since we might need it later on new handleEvictionsLocked and
+ // ProcessInfoService would not take that into account.
+ clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+ sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+ state->getConflicting(), actualScore, clientPid, actualState,
+ oomScoreOffset, systemNativeClient);
+ }
resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
@@ -1895,7 +2025,7 @@
// have android.permission.SYSTEM_CAMERA permissions.
if (!isVendorListener && (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA ||
(systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
- !hasPermissionsForSystemCamera(clientPid, clientUid)))) {
+ !hasPermissionsForSystemCamera(std::string(), clientPid, clientUid)))) {
return true;
}
return false;
@@ -1935,7 +2065,7 @@
// characteristics) even if clients don't have android.permission.CAMERA. We do not want the
// same behavior for system camera devices.
if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
- !hasPermissionsForSystemCamera(cPid, cUid)) {
+ !hasPermissionsForSystemCamera(cameraId, cPid, cUid)) {
ALOGW("Rejecting access to system only camera %s, inadequete permissions",
cameraId.c_str());
return true;
@@ -1986,16 +2116,20 @@
clientUserId = multiuser_get_user_id(callingUid);
}
- if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
+ // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+ // such as rear view and surround view cannot be disabled.
+ if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(cameraId))
+ && mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
std::string msg = "Camera disabled by device policy";
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(ERROR_DISABLED, msg.c_str());
}
// enforce system camera permissions
- if (oomScoreOffset > 0 &&
- !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
- !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+ if (oomScoreOffset > 0
+ && !hasPermissionsForSystemCamera(cameraId, callingPid,
+ CameraThreadState::getCallingUid())
+ && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
"camera id %s without SYSTEM_CAMERA permissions",
clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2006,10 +2140,10 @@
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
- targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
- unresolvedCameraId, /*out*/client);
+ targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
+ /*out*/client);
- if (!ret.isOk()) {
+ if(!ret.isOk()) {
logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
return ret;
}
@@ -2082,6 +2216,8 @@
bool isNonSystemNdk = false;
std::string clientPackageName;
+ int packageUid = (clientUid == USE_CALLING_UID) ?
+ CameraThreadState::getCallingUid() : clientUid;
if (clientPackageNameMaybe.size() <= 0) {
// NDK calls don't come with package names, but we need one for various cases.
// Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -2089,8 +2225,6 @@
// same permissions, so picking any associated package name is sufficient. For some
// other cases, this may give inaccurate names for clients in logs.
isNonSystemNdk = true;
- int packageUid = (clientUid == USE_CALLING_UID) ?
- CameraThreadState::getCallingUid() : clientUid;
clientPackageName = getPackageNameFromUid(packageUid);
} else {
clientPackageName = clientPackageNameMaybe;
@@ -2287,32 +2421,38 @@
clientPackageName));
}
- // Set camera muting behavior
- bool isCameraPrivacyEnabled =
- mSensorPrivacyPolicy->isCameraPrivacyEnabled();
- if (client->supportsCameraMute()) {
- client->setCameraMute(
- mOverrideCameraMuteMode || isCameraPrivacyEnabled);
- } else if (isCameraPrivacyEnabled) {
- // no camera mute supported, but privacy is on! => disconnect
- ALOGI("Camera mute not supported for package: %s, camera id: %s",
- client->getPackageName().c_str(), cameraId.c_str());
- // Do not hold mServiceLock while disconnecting clients, but
- // retain the condition blocking other clients from connecting
- // in mServiceLockWrapper if held.
- mServiceLock.unlock();
- // Clear caller identity temporarily so client disconnect PID
- // checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
- // Note AppOp to trigger the "Unblock" dialog
- client->noteAppOp();
- client->disconnect();
- CameraThreadState::restoreCallingIdentity(token);
- // Reacquire mServiceLock
- mServiceLock.lock();
+ // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
+ // cases such as rear view and surround view cannot be disabled and are exempt from camera
+ // privacy policy.
+ if ((!isAutomotivePrivilegedClient(packageUid) ||
+ !isAutomotiveExteriorSystemCamera(cameraId))) {
+ // Set camera muting behavior.
+ bool isCameraPrivacyEnabled =
+ mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+ if (client->supportsCameraMute()) {
+ client->setCameraMute(
+ mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+ } else if (isCameraPrivacyEnabled) {
+ // no camera mute supported, but privacy is on! => disconnect
+ ALOGI("Camera mute not supported for package: %s, camera id: %s",
+ client->getPackageName().c_str(), cameraId.c_str());
+ // Do not hold mServiceLock while disconnecting clients, but
+ // retain the condition blocking other clients from connecting
+ // in mServiceLockWrapper if held.
+ mServiceLock.unlock();
+ // Clear caller identity temporarily so client disconnect PID
+ // checks work correctly
+ int64_t token = CameraThreadState::clearCallingIdentity();
+ // Note AppOp to trigger the "Unblock" dialog
+ client->noteAppOp();
+ client->disconnect();
+ CameraThreadState::restoreCallingIdentity(token);
+ // Reacquire mServiceLock
+ mServiceLock.lock();
- return STATUS_ERROR_FMT(ERROR_DISABLED,
- "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
+ return STATUS_ERROR_FMT(ERROR_DISABLED,
+ "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
+ }
}
if (shimUpdateOnly) {
@@ -2449,8 +2589,7 @@
}
Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
- int32_t torchStrength,
- const sp<IBinder>& clientBinder) {
+ int32_t torchStrength, const sp<IBinder>& clientBinder) {
Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
@@ -2577,8 +2716,7 @@
return Status::ok();
}
-Status CameraService::setTorchMode(const std::string& unresolvedCameraId,
- bool enabled,
+Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
const sp<IBinder>& clientBinder) {
Mutex::Autolock lock(mServiceLock);
@@ -2921,13 +3059,11 @@
// Check for camera permissions
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
- permission::PermissionChecker permissionChecker;
AttributionSourceState attributionSource{};
attributionSource.pid = callingPid;
attributionSource.uid = callingUid;
- bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
- toString16(sCameraPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForCamera = checkPermission(std::string(),
+ sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
if ((callingPid != getpid()) && !checkPermissionForCamera) {
ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -2975,13 +3111,13 @@
auto clientUid = CameraThreadState::getCallingUid();
auto clientPid = CameraThreadState::getCallingPid();
- permission::PermissionChecker permissionChecker;
AttributionSourceState attributionSource{};
attributionSource.uid = clientUid;
attributionSource.pid = clientPid;
- bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
- toString16(sCameraOpenCloseListenerPermission), attributionSource, String16(),
- AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+
+ bool openCloseCallbackAllowed = checkPermission(std::string(),
+ sCameraOpenCloseListenerPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE);
Mutex::Autolock lock(mServiceLock);
@@ -5119,6 +5255,7 @@
state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
&logicalCameraIds]
(const std::string& cameraId, StatusInternal status) {
+
if (status != StatusInternal::ENUMERATING) {
// Update torch status if it has a flash unit.
Mutex::Autolock al(mTorchStatusMutex);
@@ -6015,8 +6152,10 @@
" clear-stream-use-case-override clear the stream use case override\n"
" set-zoom-override <-1/0/1> enable or disable zoom override\n"
" Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
- " remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
+ " set-watchdog <VALUE> enables or disables the camera service watchdog\n"
+ " Valid values 0=disable, 1=enable\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
+ " remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 68f7f73..6819136 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
#define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
+#include <android/content/AttributionSourceState.h>
#include <android/hardware/BnCameraService.h>
#include <android/hardware/BnSensorPrivacyListener.h>
#include <android/hardware/ICameraServiceListener.h>
@@ -103,6 +104,9 @@
// Event log ID
static const int SN_EVENT_LOG_ID = 0x534e4554;
+ // Keep this in sync with frameworks/base/core/java/android/os/UserHandle.java
+ static const userid_t USER_SYSTEM = 0;
+
// Register camera service
static void instantiate();
@@ -617,6 +621,13 @@
int32_t updateAudioRestrictionLocked();
private:
+ /**
+ * Returns true if the device is an automotive device and cameraId is system
+ * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+ * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+ * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+ */
+ bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
// TODO: b/263304156 update this to make use of a death callback for more
// robust/fault tolerant logging
@@ -633,6 +644,25 @@
}
/**
+ * Pre-grants the permission if the attribution source uid is for an automotive
+ * privileged client. Otherwise uses system service permission checker to check
+ * for the appropriate permission. If this function is called for accessing a specific
+ * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+ * privileged client so that permission is pre-granted only to access system camera device
+ * which is located outside of the vehicle body frame because camera located inside the vehicle
+ * cabin would need user permission.
+ */
+ bool checkPermission(const std::string& cameraId, const std::string& permission,
+ const content::AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) const;
+
+ bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
+ const;
+
+ bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+ int callingUid) const;
+
+ /**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
*/
@@ -892,7 +922,7 @@
// Should a device status update be skipped for a particular camera device ? (this can happen
// under various conditions. For example if a camera device is advertised as
// system only or hidden secure camera, amongst possible others.
- static bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
+ bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
int clientPid, int clientUid);
// Gets the kind of camera device (i.e public, hidden secure or system only)
@@ -1421,11 +1451,11 @@
*/
static std::string getFormattedCurrentTime();
- static binder::Status makeClient(
- const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
- const std::string& packageName, bool systemNativeClient,
- const std::optional<std::string>& featureId, const std::string& cameraId, int api1CameraId,
- int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
+ static binder::Status makeClient(const sp<CameraService>& cameraService,
+ const sp<IInterface>& cameraCb, const std::string& packageName,
+ bool systemNativeClient, const std::optional<std::string>& featureId,
+ const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
+ int clientPid, uid_t clientUid, int servicePid,
std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
const std::string& originalCameraId,
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index 9f25865..afc432d 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -44,7 +44,7 @@
watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
// Default cycles and cycle length values used to calculate permitted elapsed time
-const static size_t kMaxCycles = 100;
+const static size_t kMaxCycles = 650;
const static uint32_t kCycleLengthMs = 100;
namespace android {
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index 48c804d..3cc843d 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -77,6 +77,10 @@
{34, {
ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+ ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
+ ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
@@ -108,6 +112,7 @@
ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
ANDROID_EXTENSION_CURRENT_TYPE,
ANDROID_EXTENSION_STRENGTH,
+ ANDROID_FLASH_STRENGTH_LEVEL,
ANDROID_SCALER_RAW_CROP_REGION,
} },
};
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c27fc90..28f7054 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -18,6 +18,7 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <utils/CameraThreadState.h>
#include <utils/Log.h>
@@ -55,6 +56,8 @@
using namespace camera3;
using camera3::camera_stream_rotation_t::CAMERA_STREAM_ROTATION_0;
+namespace flags = com::android::internal::camera::flags;
+
CameraDeviceClientBase::CameraDeviceClientBase(
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
@@ -439,7 +442,7 @@
CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
for (const auto& it : request.mPhysicalCameraSettings) {
- std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
+ const std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
if (it.settings.isEmpty()) {
ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
__FUNCTION__, mCameraIdStr.c_str());
@@ -465,18 +468,19 @@
}
}
+ const std::string &physicalId = resolvedId;
bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
mSupportedPhysicalRequestKeys.end();
bool hasTestPatternDataPhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_DATA) !=
mSupportedPhysicalRequestKeys.end();
- if (resolvedId != mDevice->getId()) {
+ if (physicalId != mDevice->getId()) {
auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
resolvedId);
if (found == requestedPhysicalIds.end()) {
ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
- __FUNCTION__, mCameraIdStr.c_str(), resolvedId.c_str());
+ __FUNCTION__, mCameraIdStr.c_str(), physicalId.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Invalid physical camera id");
}
@@ -538,6 +542,21 @@
if (entry.count == 1) {
mVideoStabilizationMode = entry.data.u8[0];
}
+ if (flags::log_ultrawide_usage()) {
+ entry = physicalSettingsList.begin()->metadata.find(
+ ANDROID_CONTROL_ZOOM_RATIO);
+ if (entry.count == 1 && entry.data.f[0] < 1.0f ) {
+ mUsedUltraWide = true;
+ }
+ }
+ if (!mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
+ entry = physicalSettingsList.begin()->metadata.find(
+ ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ if (entry.count == 1 && entry.data.i32[0] ==
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+ mUsedSettingsOverrideZoom = true;
+ }
+ }
}
mRequestIdCounter++;
@@ -2050,7 +2069,8 @@
}
}
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- fullStreamStats, mUserTag, mVideoStabilizationMode);
+ fullStreamStats, mUserTag, mVideoStabilizationMode, mUsedUltraWide,
+ mUsedSettingsOverrideZoom);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 1c19dbd..5dea3aa 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -370,9 +370,13 @@
std::string mUserTag;
// The last set video stabilization mode
int mVideoStabilizationMode = -1;
+ // Whether a zoom_ratio < 1.0 has been used during this session
+ bool mUsedUltraWide = false;
+ // Whether a zoom settings override has been used during this session
+ bool mUsedSettingsOverrideZoom = false;
// This only exists in case of camera ID Remapping.
- std::string mOriginalCameraId;
+ const std::string mOriginalCameraId;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 43eb181..c730b14 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -157,16 +157,6 @@
return res;
}
- /** Start watchdog thread */
- mCameraServiceWatchdog = new CameraServiceWatchdog(TClientBase::mCameraIdStr,
- mCameraServiceProxyWrapper);
- res = mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
- if (res != OK) {
- ALOGE("%s: Unable to start camera service watchdog thread: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
return OK;
}
@@ -178,11 +168,6 @@
disconnect();
- if (mCameraServiceWatchdog != NULL) {
- mCameraServiceWatchdog->requestExit();
- mCameraServiceWatchdog.clear();
- }
-
ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(),
TClientBase::mClientPackageName.c_str(),
@@ -268,17 +253,7 @@
template <typename TClientBase>
binder::Status Camera2ClientBase<TClientBase>::disconnect() {
- if (mCameraServiceWatchdog != nullptr && mDevice != nullptr) {
- // Timer for the disconnect call should be greater than getExpectedInFlightDuration
- // since this duration is used to error handle methods in the disconnect sequence
- // thus allowing existing error handling methods to execute first
- uint64_t maxExpectedDuration =
- ns2ms(mDevice->getExpectedInFlightDuration() + kBufferTimeDisconnectNs);
- // Initialization from hal succeeded, time disconnect.
- return mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(disconnectImpl(),
- maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
- }
return disconnectImpl();
}
@@ -403,7 +378,8 @@
void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::vector<hardware::CameraStreamStats>& streamStats,
- const std::string& userTag, int videoStabilizationMode) {
+ const std::string& userTag, int videoStabilizationMode, bool usedUltraWide,
+ bool usedZoomOverride) {
if (mDeviceActive) {
status_t res = TClientBase::finishCameraStreamingOps();
if (res != OK) {
@@ -412,7 +388,7 @@
}
mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
- streamStats);
+ usedUltraWide, usedZoomOverride, streamStats);
}
mDeviceActive = false;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 30c763d..bac4af8 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -97,7 +97,8 @@
void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
bool deviceError,
const std::vector<hardware::CameraStreamStats>& streamStats,
- const std::string& userTag, int videoStabilizationMode);
+ const std::string& userTag, int videoStabilizationMode,
+ bool usedUltraWide, bool usedZoomOverride);
int getCameraId() const;
const sp<CameraDeviceBase>&
@@ -137,9 +138,6 @@
protected:
- // Used for watchdog timeout to monitor disconnect
- static const nsecs_t kBufferTimeDisconnectNs = 3000000000; // 3 sec.
-
// The PID provided in the constructor call
pid_t mInitialClientPid;
bool mOverrideForPerfClass = false;
@@ -186,9 +184,6 @@
binder::Status disconnectImpl();
- // Watchdog thread
- sp<CameraServiceWatchdog> mCameraServiceWatchdog;
-
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 23051ef..54dc2bc 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -35,6 +35,7 @@
#include <android/binder_manager.h>
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
+#include <com_android_internal_camera_flags.h>
#include <functional>
#include <camera_metadata_hidden.h>
#include <android-base/parseint.h>
@@ -57,9 +58,12 @@
using std::literals::chrono_literals::operator""s;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+namespace flags = com::android::internal::camera::flags;
+
namespace {
const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
const std::string kExternalProviderName = "external/0";
+const std::string kVirtualProviderName = "virtual/0";
} // anonymous namespace
const float CameraProviderManager::kDepthARTolerance = .1f;
@@ -68,6 +72,8 @@
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
+CameraProviderManager::AidlServiceInteractionProxyImpl
+CameraProviderManager::sAidlServiceInteractionProxy{};
CameraProviderManager::~CameraProviderManager() {
}
@@ -130,6 +136,29 @@
return OK;
}
+std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+CameraProviderManager::AidlServiceInteractionProxyImpl::getAidlService(
+ const std::string& serviceName) {
+ using aidl::android::hardware::camera::provider::ICameraProvider;
+
+ AIBinder* binder = nullptr;
+ if (flags::lazy_aidl_wait_for_service()) {
+ binder = AServiceManager_waitForService(serviceName.c_str());
+ } else {
+ binder = AServiceManager_getService(serviceName.c_str());
+ }
+
+ if (binder == nullptr) {
+ ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+ serviceName.c_str());
+ return nullptr;
+ }
+ std::shared_ptr<ICameraProvider> interface =
+ ICameraProvider::fromBinder(ndk::SpAIBinder(binder));
+
+ return interface;
+};
+
static std::string getFullAidlProviderName(const std::string instance) {
std::string aidlHalServiceDescriptor =
std::string(aidl::android::hardware::camera::provider::ICameraProvider::descriptor);
@@ -142,6 +171,13 @@
auto sm = defaultServiceManager();
auto aidlProviders = sm->getDeclaredInstances(
String16(aidlHalServiceDescriptor));
+
+ if (isVirtualCameraHalEnabled()) {
+ // Virtual Camera provider is not declared in the VINTF manifest so we
+ // manually add it if the binary is present.
+ aidlProviders.push_back(String16(kVirtualProviderName.c_str()));
+ }
+
for (const auto &aidlInstance : aidlProviders) {
std::string aidlServiceName =
getFullAidlProviderName(toStdString(aidlInstance));
@@ -157,12 +193,19 @@
}
status_t CameraProviderManager::initialize(wp<CameraProviderManager::StatusListener> listener,
- HidlServiceInteractionProxy* hidlProxy) {
+ HidlServiceInteractionProxy* hidlProxy, AidlServiceInteractionProxy* aidlProxy) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
if (hidlProxy == nullptr) {
- ALOGE("%s: No valid service interaction proxy provided", __FUNCTION__);
+ ALOGE("%s: No valid service Hidl interaction proxy provided", __FUNCTION__);
return BAD_VALUE;
}
+
+ if (aidlProxy == nullptr) {
+ ALOGE("%s: No valid service Aidl interaction proxy provided", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ mAidlServiceProxy = aidlProxy;
+
mListener = listener;
mDeviceState = 0;
auto res = tryToInitAndAddHidlProvidersLocked(hidlProxy);
@@ -1487,6 +1530,58 @@
return res;
}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupManualFlashStrengthControlTags(
+ CameraMetadata& ch) {
+ status_t res = OK;
+ auto flashSingleStrengthMaxLevelEntry = ch.find(ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL);
+ if (flashSingleStrengthMaxLevelEntry.count == 0) {
+ int32_t flashSingleStrengthMaxLevel = 1;
+ res = ch.update(ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
+ &flashSingleStrengthMaxLevel, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL: %s (%d)",
+ __FUNCTION__,strerror(-res), res);
+ return res;
+ }
+ }
+ auto flashSingleStrengthDefaultLevelEntry = ch.find(
+ ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL);
+ if (flashSingleStrengthDefaultLevelEntry.count == 0) {
+ int32_t flashSingleStrengthDefaultLevel = 1;
+ res = ch.update(ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
+ &flashSingleStrengthDefaultLevel, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL: %s (%d)",
+ __FUNCTION__,strerror(-res), res);
+ return res;
+ }
+ }
+ auto flashTorchStrengthMaxLevelEntry = ch.find(ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL);
+ if (flashTorchStrengthMaxLevelEntry.count == 0) {
+ int32_t flashTorchStrengthMaxLevel = 1;
+ res = ch.update(ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
+ &flashTorchStrengthMaxLevel, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL: %s (%d)",
+ __FUNCTION__,strerror(-res), res);
+ return res;
+ }
+ }
+ auto flashTorchStrengthDefaultLevelEntry = ch.find(ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL);
+ if (flashTorchStrengthDefaultLevelEntry.count == 0) {
+ int32_t flashTorchStrengthDefaultLevel = 1;
+ res = ch.update(ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
+ &flashTorchStrengthDefaultLevel, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL: %s (%d)",
+ __FUNCTION__,strerror(-res), res);
+ return res;
+ }
+ }
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupMonochromeTags() {
status_t res = OK;
auto& c = mCameraCharacteristics;
@@ -1918,9 +2013,9 @@
status_t CameraProviderManager::tryToInitializeAidlProviderLocked(
const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
using aidl::android::hardware::camera::provider::ICameraProvider;
+
std::shared_ptr<ICameraProvider> interface =
- ICameraProvider::fromBinder(ndk::SpAIBinder(
- AServiceManager_getService(providerName.c_str())));
+ mAidlServiceProxy->getAidlService(providerName.c_str());
if (interface == nullptr) {
ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -1956,15 +2051,18 @@
bool providerPresent = false;
bool preexisting =
(mAidlProviderWithBinders.find(newProvider) != mAidlProviderWithBinders.end());
-
- // We need to use the extracted provider name here since 'newProvider' has
- // the fully qualified name of the provider service in case of AIDL. We want
- // just instance name.
using aidl::android::hardware::camera::provider::ICameraProvider;
- std::string extractedProviderName =
+ std::string providerNameUsed =
newProvider.substr(std::string(ICameraProvider::descriptor).size() + 1);
+ if (flags::lazy_aidl_wait_for_service()) {
+ // 'newProvider' has the fully qualified name of the provider service in case of AIDL.
+ // ProviderInfo::mProviderName also has the fully qualified name - so we just compare them
+ // here.
+ providerNameUsed = newProvider;
+ }
+
for (const auto& providerInfo : mProviders) {
- if (providerInfo->mProviderName == extractedProviderName) {
+ if (providerInfo->mProviderName == providerNameUsed) {
ALOGW("%s: Camera provider HAL with name '%s' already registered",
__FUNCTION__, newProvider.c_str());
// Do not add new instances for lazy HAL external provider or aidl
@@ -1981,7 +2079,7 @@
}
sp<AidlProviderInfo> providerInfo =
- new AidlProviderInfo(extractedProviderName, providerInstance, this);
+ new AidlProviderInfo(providerNameUsed, providerInstance, this);
if (!providerPresent) {
status_t res = tryToInitializeAidlProviderLocked(newProvider, providerInfo);
@@ -2061,6 +2159,9 @@
if (providerInfo->mProviderName == removedProviderName) {
IPCTransport providerTransport = providerInfo->getIPCTransport();
std::string removedAidlProviderName = getFullAidlProviderName(removedProviderName);
+ if (flags::lazy_aidl_wait_for_service()) {
+ removedAidlProviderName = removedProviderName;
+ }
switch(providerTransport) {
case IPCTransport::HIDL:
return tryToInitializeHidlProviderLocked(removedProviderName, providerInfo);
@@ -2230,7 +2331,13 @@
}
bool CameraProviderManager::ProviderInfo::isExternalLazyHAL() const {
- return kEnableLazyHal && (mProviderName == kExternalProviderName);
+ std::string providerName = mProviderName;
+ if (flags::lazy_aidl_wait_for_service() && getIPCTransport() == IPCTransport::AIDL) {
+ using aidl::android::hardware::camera::provider::ICameraProvider;
+ providerName =
+ mProviderName.substr(std::string(ICameraProvider::descriptor).size() + 1);
+ }
+ return kEnableLazyHal && (providerName == kExternalProviderName);
}
status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
@@ -3054,4 +3161,8 @@
}
}
+bool CameraProviderManager::isVirtualCameraHalEnabled() {
+ return flags::virtual_camera_service_discovery();
+}
+
} // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 28be652..fd04854 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -174,6 +174,24 @@
virtual hardware::hidl_vec<hardware::hidl_string> listServices() override;
};
+ // Proxy to inject fake services in test.
+ class AidlServiceInteractionProxy {
+ public:
+ // Returns the Aidl service with the given serviceName
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ getAidlService(const std::string& serviceName) = 0;
+
+ virtual ~AidlServiceInteractionProxy() = default;
+ };
+
+ // Standard use case - call into the normal static methods which invoke
+ // the real service manager
+ class AidlServiceInteractionProxyImpl : public AidlServiceInteractionProxy {
+ public:
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ getAidlService(const std::string& serviceName) override;
+ };
+
/**
* Listener interface for device/torch status changes
*/
@@ -209,7 +227,8 @@
* used for testing. The lifetime of the proxy must exceed the lifetime of the manager.
*/
status_t initialize(wp<StatusListener> listener,
- HidlServiceInteractionProxy *hidlProxy = &sHidlServiceInteractionProxy);
+ HidlServiceInteractionProxy* hidlProxy = &sHidlServiceInteractionProxy,
+ AidlServiceInteractionProxy* aidlProxy = &sAidlServiceInteractionProxy);
status_t getCameraIdIPCTransport(const std::string &id,
IPCTransport *providerTransport) const;
@@ -424,6 +443,7 @@
wp<StatusListener> mListener;
HidlServiceInteractionProxy* mHidlServiceProxy;
+ AidlServiceInteractionProxy* mAidlServiceProxy;
// Current overall Android device physical status
int64_t mDeviceState;
@@ -432,6 +452,7 @@
mutable std::mutex mProviderLifecycleLock;
static HidlServiceInteractionProxyImpl sHidlServiceInteractionProxy;
+ static AidlServiceInteractionProxyImpl sAidlServiceInteractionProxy;
struct HalCameraProvider {
// Empty parent struct for storing either aidl / hidl camera provider reference
@@ -489,7 +510,7 @@
CameraProviderManager *manager);
~ProviderInfo();
- virtual IPCTransport getIPCTransport() = 0;
+ virtual IPCTransport getIPCTransport() const = 0;
const std::string& getType() const;
@@ -683,6 +704,7 @@
SystemCameraKind getSystemCameraKind();
status_t fixupMonochromeTags();
status_t fixupTorchStrengthTags();
+ status_t fixupManualFlashStrengthControlTags(CameraMetadata& ch);
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
status_t deriveJpegRTags(bool maxResolution = false);
@@ -867,6 +889,8 @@
status_t usbDeviceDetached(const std::string &usbDeviceId);
ndk::ScopedAStatus onAidlRegistration(const std::string& in_name,
const ::ndk::SpAIBinder& in_binder);
+
+ static bool isVirtualCameraHalEnabled();
};
} // namespace android
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 5e79d6b..257103f 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -17,6 +17,7 @@
#include "common/HalConversionsTemplated.h"
#include "common/CameraProviderInfoTemplated.h"
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <aidlcommonsupport/NativeHandle.h>
@@ -34,7 +35,9 @@
namespace android {
+namespace flags = com::android::internal::camera::flags;
namespace SessionConfigurationUtils = ::android::camera3::SessionConfigurationUtils;
+namespace flags = com::android::internal::camera::flags;
using namespace aidl::android::hardware;
using namespace hardware::camera;
@@ -99,7 +102,14 @@
status_t AidlProviderInfo::initializeAidlProvider(
std::shared_ptr<ICameraProvider>& interface, int64_t currentDeviceState) {
- status_t res = parseProviderName(mProviderName, &mType, &mId);
+ using aidl::android::hardware::camera::provider::ICameraProvider;
+ std::string parsedProviderName = mProviderName;
+ if (flags::lazy_aidl_wait_for_service()) {
+ parsedProviderName =
+ mProviderName.substr(std::string(ICameraProvider::descriptor).size() + 1);
+ }
+
+ status_t res = parseProviderName(parsedProviderName, &mType, &mId);
if (res != OK) {
ALOGE("%s: Invalid provider name, ignoring", __FUNCTION__);
return BAD_VALUE;
@@ -120,11 +130,15 @@
}
mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(AIBinder_DeathRecipient_new(binderDied));
- auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(), this);
- if (link != STATUS_OK) {
- ALOGW("%s: Unable to link to provider '%s' death notifications",
- __FUNCTION__, mProviderName.c_str());
- return DEAD_OBJECT;
+
+ if (!flags::virtual_camera_service_discovery() || interface->isRemote()) {
+ binder_status_t link =
+ AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(), this);
+ if (link != STATUS_OK) {
+ ALOGW("%s: Unable to link to provider '%s' death notifications (%d)", __FUNCTION__,
+ mProviderName.c_str(), link);
+ return DEAD_OBJECT;
+ }
}
if (!kEnableLazyHal) {
@@ -274,10 +288,12 @@
if (interface == nullptr) {
ALOGV("Camera provider actually needs restart, calling getService(%s)",
mProviderName.c_str());
- interface =
- ICameraProvider::fromBinder(
- ndk::SpAIBinder(
- AServiceManager_getService(mProviderName.c_str())));
+ interface = mManager->mAidlServiceProxy->getAidlService(mProviderName.c_str());
+
+ if (interface == nullptr) {
+ ALOGD("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
+ return nullptr;
+ }
// Set all devices as ENUMERATING, provider should update status
// to PRESENT after initializing.
@@ -494,6 +510,15 @@
__FUNCTION__, strerror(-res), res);
return;
}
+ if (flags::camera_manual_flash_strength_control()) {
+ res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return;
+ }
+ }
+
auto stat = addDynamicDepthTags();
if (OK != stat) {
ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
@@ -607,8 +632,9 @@
aidl::android::hardware::camera::device::CameraMetadata pChars;
status = interface->getPhysicalCameraCharacteristics(id, &pChars);
if (!status.isOk()) {
- ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
- __FUNCTION__, id.c_str(), id.c_str(), status.getMessage());
+ ALOGE("%s: Transaction error getting physical camera %s characteristics for "
+ "logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+ status.getMessage());
return;
}
std::vector<uint8_t> &pMetadata = pChars.metadata;
@@ -630,6 +656,15 @@
ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+
+ if (flags::camera_manual_flash_strength_control()) {
+ res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return;
+ }
+ }
}
}
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index 97a8fed..90bc627 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -49,7 +49,7 @@
static void binderDied(void *cookie);
- virtual IPCTransport getIPCTransport() override {return IPCTransport::AIDL;}
+ virtual IPCTransport getIPCTransport() const override {return IPCTransport::AIDL;}
const std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
startProviderInterface();
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index bf7a471..e4bd503 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -17,6 +17,7 @@
#include "common/HalConversionsTemplated.h"
#include "common/CameraProviderInfoTemplated.h"
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <android/hardware/ICameraService.h>
@@ -44,6 +45,7 @@
using StatusListener = CameraProviderManager::StatusListener;
using HalDeviceStatusType = android::hardware::camera::common::V1_0::CameraDeviceStatus;
+namespace flags = com::android::internal::camera::flags;
using hardware::camera::provider::V2_5::DeviceState;
using hardware::ICameraService;
@@ -613,6 +615,15 @@
__FUNCTION__, strerror(-res), res);
return;
}
+ if (flags::camera_manual_flash_strength_control()) {
+ res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return;
+ }
+ }
+
auto stat = addDynamicDepthTags();
if (OK != stat) {
ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
@@ -743,8 +754,9 @@
});
if (!ret.isOk()) {
- ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
- __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
+ ALOGE("%s: Transaction error getting physical camera %s characteristics for"
+ " logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+ ret.description().c_str());
return;
}
if (status != Status::OK) {
@@ -760,6 +772,15 @@
ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+
+ if (flags::camera_manual_flash_strength_control()) {
+ res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return;
+ }
+ }
}
}
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
index e0f1646..fa6f4d4 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -45,7 +45,7 @@
sp<hardware::camera::provider::V2_4::ICameraProvider>& interface,
int64_t currentDeviceState);
- IPCTransport getIPCTransport() override {return IPCTransport::HIDL;}
+ IPCTransport getIPCTransport() const override {return IPCTransport::HIDL;}
const sp<hardware::camera::provider::V2_4::ICameraProvider> startProviderInterface();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ee4d855..6765c1d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -63,9 +63,9 @@
#include "device3/Camera3InputStream.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3SharedOutputStream.h"
-#include "mediautils/SchedulingPolicyService.h"
#include "utils/CameraThreadState.h"
#include "utils/CameraTraces.h"
+#include "utils/SchedulingPolicyUtils.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/TraceHFR.h"
@@ -297,13 +297,6 @@
}
}
}
- // Signal to request thread that we're not expecting any
- // more requests. This will be true since once we're in
- // disconnect and we've cleared off the request queue, the
- // request thread can't receive any new requests through
- // binder calls - since disconnect holds
- // mBinderSerialization lock.
- mRequestThread->setRequestClearing();
}
if (mStatus == STATUS_ERROR) {
@@ -2632,8 +2625,8 @@
if (disableFifo != 1) {
// Boost priority of request thread to SCHED_FIFO.
pid_t requestThreadTid = mRequestThread->getTid();
- res = requestPriority(getpid(), requestThreadTid,
- kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
+ res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
+ kRequestThreadPriority);
if (res != OK) {
ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
strerror(-res), res);
@@ -3303,8 +3296,12 @@
}
void Camera3Device::RequestThread::requestExit() {
- // Call parent to set up shutdown
- Thread::requestExit();
+ {
+ Mutex::Autolock l(mRequestLock);
+ mRequestClearing = true;
+ // Call parent to set up shutdown
+ Thread::requestExit();
+ }
// The exit from any possible waits
mDoPauseSignal.signal();
mRequestSignal.signal();
@@ -4436,11 +4433,6 @@
return;
}
-void Camera3Device::RequestThread::setRequestClearing() {
- Mutex::Autolock l(mRequestLock);
- mRequestClearing = true;
-}
-
sp<Camera3Device::CaptureRequest>
Camera3Device::RequestThread::waitForNextRequestLocked() {
status_t res;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index b36a60a..d812c01 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -941,9 +941,6 @@
*/
void setPaused(bool paused);
- // set mRequestClearing - no new requests are expected to be queued to RequestThread
- void setRequestClearing();
-
/**
* Wait until thread processes the capture request with settings'
* android.request.id == requestId.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 1e7e337..450f3dd 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -159,6 +159,23 @@
return res;
}
+status_t fixupManualFlashStrengthControlTags(CameraMetadata& resultMetadata) {
+ status_t res = OK;
+ camera_metadata_entry strengthLevelEntry =
+ resultMetadata.find(ANDROID_FLASH_STRENGTH_LEVEL);
+ if (strengthLevelEntry.count == 0) {
+ const int32_t defaultStrengthLevelEntry = ANDROID_FLASH_STRENGTH_LEVEL;
+ res = resultMetadata.update(ANDROID_FLASH_STRENGTH_LEVEL, &defaultStrengthLevelEntry, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_FLASH_STRENGTH_LEVEL: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ return res;
+}
+
void correctMeteringRegions(camera_metadata_t *meta) {
if (meta == nullptr) return;
@@ -261,7 +278,7 @@
auto mapper = states.rotateAndCropMappers.find(states.cameraId);
if (mapper != states.rotateAndCropMappers.end()) {
- const auto& remappedKeys = iter->second.getRemappedKeys();
+ const auto& remappedKeys = mapper->second.getRemappedKeys();
keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
}
@@ -385,6 +402,22 @@
}
}
+ // Fix up manual flash strength control metadata
+ res = fixupManualFlashStrengthControlTags(captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set flash strength level defaults in result metadata: %s (%d)",
+ strerror(-res), res);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ res = fixupManualFlashStrengthControlTags(physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set flash strength level defaults in physical result"
+ " metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+ }
+
// Fix up autoframing metadata
res = fixupAutoframingTags(captureResult.mMetadata);
if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 23afa6e..701c472 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -772,7 +772,8 @@
// Buffer status may be changed, so make a copy of the stream_buffer struct.
camera_stream_buffer b = buffer;
- if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp) {
+ if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp
+ && b.status != CAMERA_BUFFER_STATUS_ERROR) {
ALOGE("%s: Stream %d: timestamp %" PRId64 " is not increasing. Prev timestamp %" PRId64,
__FUNCTION__, mId, timestamp, mLastTimestamp);
b.status = CAMERA_BUFFER_STATUS_ERROR;
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5e2a3fb..d600d42 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -31,11 +31,14 @@
"libmedia_headers",
],
+ defaults: [
+ "libcameraservice_deps",
+ ],
+
shared_libs: [
"libbase",
"libbinder",
"libcutils",
- "libcameraservice",
"libhidlbase",
"liblog",
"libcamera_client",
@@ -45,20 +48,23 @@
"libjpeg",
"libexif",
"android.hardware.camera.common@1.0",
- "android.hardware.camera.provider@2.4",
- "android.hardware.camera.provider@2.5",
- "android.hardware.camera.provider@2.6",
- "android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.7",
"android.hidl.token@1.0-utils",
+ "camera_platform_flags_c_lib",
],
static_libs: [
+ "android.hardware.camera.provider@2.4",
+ "android.hardware.camera.provider@2.5",
+ "android.hardware.camera.provider@2.6",
+ "android.hardware.camera.provider@2.7",
+ "android.hardware.camera.provider-V2-ndk",
+ "libcameraservice",
"libgmock",
+ "libflagtest",
],
srcs: [
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index c0cd1a9..151c5ce 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -18,24 +18,40 @@
#define LOG_TAG "CameraProviderManagerTest"
#include "../common/CameraProviderManager.h"
-#include <android/hidl/manager/1.0/IServiceManager.h>
-#include <android/hidl/manager/1.0/IServiceNotification.h>
+#include <aidl/android/hardware/camera/device/BnCameraDevice.h>
+#include <aidl/android/hardware/camera/provider/BnCameraProvider.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_libbinder.h>
+#include <android/binder_manager.h>
+#include <android/binder_parcel.h>
#include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
#include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android/hidl/manager/1.0/IServiceNotification.h>
+#include <binder/IServiceManager.h>
#include <camera_metadata_hidden.h>
-#include <hidl/HidlBinderSupport.h>
+#include <com_android_internal_camera_flags.h>
+#include <flag_macros.h>
+#include <gmock/gmock.h>
#include <gtest/gtest.h>
+#include <hidl/HidlBinderSupport.h>
#include <utility>
using namespace android;
using namespace android::hardware::camera;
+using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
+using android::hardware::camera::common::V1_0::CameraMetadataType;
using android::hardware::camera::common::V1_0::Status;
using android::hardware::camera::common::V1_0::VendorTag;
using android::hardware::camera::common::V1_0::VendorTagSection;
-using android::hardware::camera::common::V1_0::CameraMetadataType;
using android::hardware::camera::device::V3_2::ICameraDeviceCallback;
using android::hardware::camera::device::V3_2::ICameraDeviceSession;
using android::hardware::camera::provider::V2_5::DeviceState;
+using ::testing::ElementsAre;
+
+namespace flags = com::android::internal::camera::flags;
/**
* Basic test implementation of a camera ver. 3.2 device interface
@@ -241,13 +257,111 @@
hardware::hidl_bitfield<DeviceState> mCurrentState = 0xFFFFFFFF; // Unlikely to be a real state
};
+struct TestAidlCameraDevice : public aidl::android::hardware::camera::device::BnCameraDevice {
+ ::ndk::ScopedAStatus getCameraCharacteristics(
+ ::aidl::android::hardware::camera::device::CameraMetadata*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getPhysicalCameraCharacteristics(
+ const std::string&,
+ ::aidl::android::hardware::camera::device::CameraMetadata*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getResourceCost(
+ ::aidl::android::hardware::camera::common::CameraResourceCost* aidl_return) override {
+ auto cost = ::aidl::android::hardware::camera::common::CameraResourceCost();
+ aidl_return->resourceCost = 100;
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus isStreamCombinationSupported(
+ const ::aidl::android::hardware::camera::device::StreamConfiguration&, bool*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus
+ open(const std::shared_ptr<::aidl::android::hardware::camera::device::ICameraDeviceCallback>&,
+ std::shared_ptr<::aidl::android::hardware::camera::device::ICameraDeviceSession>*)
+ override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus openInjectionSession(
+ const std::shared_ptr<
+ ::aidl::android::hardware::camera::device::ICameraDeviceCallback>&,
+ std::shared_ptr<::aidl::android::hardware::camera::device::ICameraInjectionSession>*)
+ override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus setTorchMode(bool) override { return ndk::ScopedAStatus::ok(); }
+ ::ndk::ScopedAStatus turnOnTorchWithStrengthLevel(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getTorchStrengthLevel(int32_t*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+/**
+ * Basic test implementation of a AIDL camera provider
+ */
+class TestAidlICameraProvider : public aidl::android::hardware::camera::provider::BnCameraProvider {
+ public:
+ std::shared_ptr<ICameraProviderCallback> mCallback;
+ std::vector<std::string> mDeviceNames;
+
+ TestAidlICameraProvider(const std::vector<std::string>& deviceNames) {
+ mDeviceNames = deviceNames;
+ }
+
+ ::ndk::ScopedAStatus setCallback(
+ const std::shared_ptr<
+ ::aidl::android::hardware::camera::provider::ICameraProviderCallback>& callback)
+ override {
+ mCallback = callback;
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getVendorTags(
+ std::vector<::aidl::android::hardware::camera::common::VendorTagSection>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getCameraIdList(std::vector<std::string>* camera_list) override {
+ ALOGW("getCameraIdList");
+ for (size_t i = 0; i < mDeviceNames.size(); i++) {
+ camera_list->push_back(mDeviceNames.at(i));
+ }
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getCameraDeviceInterface(
+ const std::string&,
+ std::shared_ptr<::aidl::android::hardware::camera::device::ICameraDevice>* device)
+ override {
+ *device = ndk::SharedRefBase::make<TestAidlCameraDevice>();
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus notifyDeviceStateChange(int64_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus getConcurrentCameraIds(
+ std::vector<
+ ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination>*)
+ override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ::ndk::ScopedAStatus isConcurrentStreamCombinationSupported(
+ const std::vector<
+ ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination>&,
+ bool*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
/**
* Simple test version of the interaction proxy, to use to inject onRegistered calls to the
* CameraProviderManager
*/
-struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
+struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy,
+ public CameraProviderManager::AidlServiceInteractionProxy {
sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
sp<TestICameraProvider> mTestCameraProvider;
+ std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
TestInteractionProxy() {}
@@ -255,6 +369,10 @@
mTestCameraProvider = provider;
}
+ void setAidlProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
+ mTestAidlCameraProvider = provider;
+ }
+
std::vector<std::string> mLastRequestedServiceNames;
virtual ~TestInteractionProxy() {}
@@ -295,6 +413,10 @@
return ret;
}
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ getAidlService(const std::string&) {
+ return mTestAidlCameraProvider;
+ }
};
struct TestStatusListener : public CameraProviderManager::StatusListener {
@@ -713,3 +835,49 @@
ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
<< "Unavailable physical camera Ids not set properly.";
}
+TEST_WITH_FLAGS(CameraProviderManagerTest, AidlVirtualCameraProviderDiscovered,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(flags, virtual_camera_service_discovery))) {
+ sp<CameraProviderManager> providerManager = new CameraProviderManager();
+ sp<TestStatusListener> statusListener = new TestStatusListener();
+ TestInteractionProxy serviceProxy;
+
+ status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+ ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+ std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
+
+ std::shared_ptr<TestAidlICameraProvider> aidlProvider =
+ ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
+ ndk::SpAIBinder spBinder = aidlProvider->asBinder();
+ AIBinder* aiBinder = spBinder.get();
+ serviceProxy.setAidlProvider(aidlProvider);
+ providerManager->onServiceRegistration(
+ String16("android.hardware.camera.provider.ICameraProvider/virtual/0"),
+ AIBinder_toPlatformBinder(aiBinder));
+
+ std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
+ auto cameraIds = providerManager->getCameraDeviceIds(&unavailableDeviceIds);
+
+ EXPECT_THAT(cameraIds, ElementsAre("123"));
+}
+
+TEST_WITH_FLAGS(CameraProviderManagerTest, AidlVirtualCameraProviderDiscoveredOnInit,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(flags, virtual_camera_service_discovery))) {
+ sp<CameraProviderManager> providerManager = new CameraProviderManager();
+ sp<TestStatusListener> statusListener = new TestStatusListener();
+ TestInteractionProxy serviceProxy;
+
+ std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
+
+ std::shared_ptr<TestAidlICameraProvider> aidlProvider =
+ ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
+ serviceProxy.setAidlProvider(aidlProvider);
+
+ status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+ ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+ std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
+ std::vector<std::string> cameraIds = providerManager->getCameraDeviceIds(&unavailableDeviceIds);
+
+ EXPECT_THAT(cameraIds, ElementsAre("123"));
+}
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index d07bf6d..65e93a9 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -94,8 +94,8 @@
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag, int32_t videoStabilizationMode,
- const std::vector<hardware::CameraStreamStats>& streamStats) {
+ const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
+ bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
@@ -104,6 +104,8 @@
mSessionStats.mDeviceError = deviceError;
mSessionStats.mUserTag = userTag;
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
+ mSessionStats.mUsedUltraWide = usedUltraWide;
+ mSessionStats.mUsedZoomOverride = usedZoomOverride;
mSessionStats.mStreamStats = streamStats;
updateProxyDeviceState(proxyBinder);
@@ -278,8 +280,8 @@
void CameraServiceProxyWrapper::logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag, int32_t videoStabilizationMode,
- const std::vector<hardware::CameraStreamStats>& streamStats) {
+ const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
+ bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
@@ -304,7 +306,7 @@
sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
- videoStabilizationMode, streamStats);
+ videoStabilizationMode, usedUltraWide, usedZoomOverride, streamStats);
}
void CameraServiceProxyWrapper::logOpen(const std::string& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 1afe5b3..49b7a8c 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -61,8 +61,8 @@
void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag, int32_t videoStabilizationMode,
- const std::vector<hardware::CameraStreamStats>& streamStats);
+ const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
+ bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
std::string updateExtensionSessionStats(
const hardware::CameraExtensionSessionStats& extStats);
@@ -110,8 +110,8 @@
// Session state becomes idle
void logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag, int32_t videoStabilizationMode,
- const std::vector<hardware::CameraStreamStats>& streamStats);
+ const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
+ bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
void pingCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
new file mode 100644
index 0000000..92a1030
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SchedulingPolicyUtils.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <sched.h>
+
+#include "CameraThreadState.h"
+#include <private/android_filesystem_config.h>
+#include <processgroup/processgroup.h>
+#include <processgroup/sched_policy.h>
+#include <procinfo/process.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+int requestPriorityDirect(int pid, int tid, int prio) {
+ android::procinfo::ProcessInfo processInfo;
+ static const int kMinPrio = 1;
+ static const int kMaxPrio = 3;
+
+ if (!android::procinfo::GetProcessInfo(tid, &processInfo)) {
+ ALOGE("%s: Error getting process info", __FUNCTION__);
+ return -EPERM;
+ }
+
+ if (prio < kMinPrio || prio > kMaxPrio || processInfo.pid != pid) {
+ ALOGE("%s: Invalid parameter prio=%d pid=%d procinfo.pid=%d", __FUNCTION__, prio, pid,
+ processInfo.pid);
+ return -EPERM;
+ }
+
+ // Set the thread group as audio system thread group in consistent with the
+ // implementation in SchedulingPolicyService.java when isApp is false in
+ // requestPriority method.
+ if (!SetTaskProfiles(tid, {get_sched_policy_profile_name(SP_AUDIO_SYS)},
+ /*use_fd_cache*/ true)) {
+ ALOGE("%s:Error in SetTaskProfiles", __FUNCTION__);
+ return -EPERM;
+ }
+
+ struct sched_param param;
+ param.sched_priority = prio;
+ return sched_setscheduler(tid, SCHED_FIFO | SCHED_RESET_ON_FORK, ¶m);
+}
+
+} // namespace SchedulingPolicyUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
new file mode 100644
index 0000000..f71fddf
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+#define ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+/**
+ * Request elevated priority for thread tid, whose thread group leader must be pid.
+ * Instead of using scheduling policy service, this method uses direct system calls.
+ * The priority parameter is currently restricted from 1 to 3 matching
+ * scheduling policy service implementation.
+ */
+int requestPriorityDirect(int pid, int tid, int prio);
+
+} // SchedulingPolicyUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 59d1ae4..bd4ac38 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -242,6 +242,7 @@
"channel_count_hardware",
"sample_rate_hardware",
"uid",
+ "sample_rate_client",
};
static constexpr const char * HeadTrackerDeviceEnabledFields[] {
@@ -1379,6 +1380,10 @@
const auto uid = item->getUid();
+ int32_t sampleRateClient = 0;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_SAMPLERATECLIENT, &sampleRateClient);
+
LOG(LOG_LEVEL) << "key:" << key
<< " path:" << path
<< " direction:" << direction << "(" << directionStr << ")"
@@ -1402,7 +1407,8 @@
<< " format_hardware:" << formatHardware << "(" << formatHardwareStr << ")"
<< " channel_count_hardware:" << channelCountHardware
<< " sample_rate_hardware: " << sampleRateHardware
- << " uid: " << uid;
+ << " uid: " << uid
+ << " sample_rate_client: " << sampleRateClient;
if (mAudioAnalytics.mDeliverStatistics) {
const stats::media_metrics::BytesField bf_serialized(
@@ -1431,6 +1437,7 @@
, channelCountHardware
, sampleRateHardware
, uid
+ , sampleRateClient
);
std::stringstream ss;
ss << "result:" << result;
@@ -1458,6 +1465,7 @@
, channelCountHardware
, sampleRateHardware
, uid
+ , sampleRateClient
);
ss << " " << fieldsStr;
std::string str = ss.str();
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 1b5255a..f81db53 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,8 +524,8 @@
"audiotrack",
// other media
"codec",
- "freeze",
- "judder",
+ "videofreeze",
+ "videojudder",
"extractor",
"mediadrm",
"mediaparser",
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 83b30f3..844f204 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -187,10 +187,12 @@
const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
AStatsEvent_writeInt64(event, timestampNanos);
- std::string packageName = item->getPkgName();
+ // packageName deprecated for calling_uid and statsd support as of U-QPR2
+ std::string packageName = "";
AStatsEvent_writeString(event, packageName.c_str());
- int64_t packageVersionCode = item->getPkgVersionCode();
+ // packageVersion depreccated for calling_uid and statsd support as of U-QPR2
+ int64_t packageVersionCode = 0;
AStatsEvent_writeInt64(event, packageVersionCode);
int64_t mediaApexVersion = 0;
@@ -864,6 +866,7 @@
<< " original_qp_p_max:" << qpPMaxOri
<< " original_qp_b_min:" << qpBMinOri
<< " original_qp_b_max:" << qpBMaxOri
+ << " app_uid:" << app_uid
<< " }";
statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9552e25..42e53b0 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -54,23 +54,20 @@
std::weak_ptr<DeathNotifier> mDeathNotifier;
};
public:
+ static std::shared_ptr<DeathNotifier> Create(
+ const std::shared_ptr<IResourceManagerClient>& client,
+ const std::shared_ptr<ResourceManagerService>& service,
+ const ClientInfoParcel& clientInfo,
+ bool overrideProcessInfo = false);
+
DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
const std::shared_ptr<ResourceManagerService>& service,
- const ClientInfoParcel& clientInfo,
- AIBinder_DeathRecipient* recipient);
+ const ClientInfoParcel& clientInfo);
virtual ~DeathNotifier() {
unlink();
}
- void unlink() {
- if (mClient != nullptr) {
- // Register for the callbacks by linking to death notification.
- AIBinder_unlinkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
- mClient = nullptr;
- }
- }
-
// Implement death recipient
static void BinderDiedCallback(void* cookie);
static void BinderUnlinkedCallback(void* cookie);
@@ -82,24 +79,34 @@
// The context gets deleted at BinderUnlinkedCallback.
mCookie = new BinderDiedContext{.mDeathNotifier = weak_from_this()};
// Register for the callbacks by linking to death notification.
- AIBinder_linkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
+ AIBinder_linkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+ }
+
+ void unlink() {
+ if (mClient != nullptr) {
+ // Unlink from the death notification.
+ AIBinder_unlinkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+ mClient = nullptr;
+ }
}
protected:
std::shared_ptr<IResourceManagerClient> mClient;
std::weak_ptr<ResourceManagerService> mService;
const ClientInfoParcel mClientInfo;
- AIBinder_DeathRecipient* mRecipient;
BinderDiedContext* mCookie;
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
};
DeathNotifier::DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
const std::shared_ptr<ResourceManagerService>& service,
- const ClientInfoParcel& clientInfo,
- AIBinder_DeathRecipient* recipient)
+ const ClientInfoParcel& clientInfo)
: mClient(client), mService(service), mClientInfo(clientInfo),
- mRecipient(recipient), mCookie(nullptr) {
- link();
+ mCookie(nullptr),
+ mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
+ AIBinder_DeathRecipient_new(BinderDiedCallback))) {
+ // Setting callback notification when DeathRecipient gets deleted.
+ AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
}
//static
@@ -141,9 +148,8 @@
public:
OverrideProcessInfoDeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
const std::shared_ptr<ResourceManagerService>& service,
- const ClientInfoParcel& clientInfo,
- AIBinder_DeathRecipient* recipient)
- : DeathNotifier(client, service, clientInfo, recipient) {}
+ const ClientInfoParcel& clientInfo)
+ : DeathNotifier(client, service, clientInfo) {}
virtual ~OverrideProcessInfoDeathNotifier() {}
@@ -161,6 +167,26 @@
service->removeProcessInfoOverride(mClientInfo.pid);
}
+std::shared_ptr<DeathNotifier> DeathNotifier::Create(
+ const std::shared_ptr<IResourceManagerClient>& client,
+ const std::shared_ptr<ResourceManagerService>& service,
+ const ClientInfoParcel& clientInfo,
+ bool overrideProcessInfo) {
+ std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+ if (overrideProcessInfo) {
+ deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
+ client, service, clientInfo);
+ } else {
+ deathNotifier = std::make_shared<DeathNotifier>(client, service, clientInfo);
+ }
+
+ if (deathNotifier) {
+ deathNotifier->link();
+ }
+
+ return deathNotifier;
+}
+
static void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
static const char* const kServiceName = "media_resource_monitor";
sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
@@ -297,9 +323,7 @@
mServiceLog(new ServiceLog()),
mSupportsMultipleSecureCodecs(true),
mSupportsSecureWithNonSecureCodec(true),
- mCpuBoostCount(0),
- mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
- AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback))) {
+ mCpuBoostCount(0) {
mSystemCB->noteResetVideo();
// Create ResourceManagerMetrics that handles all the metrics.
mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
@@ -454,8 +478,8 @@
}
}
if (info.deathNotifier == nullptr && client != nullptr) {
- info.deathNotifier = std::make_shared<DeathNotifier>(
- client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
+ info.deathNotifier = DeathNotifier::Create(
+ client, ref<ResourceManagerService>(), clientInfo);
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
mObserverService->onResourceAdded(uid, pid, resourceAdded);
@@ -824,8 +848,8 @@
.uid = 0,
.id = 0,
.name = "<unknown client>"};
- auto deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
- client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
+ auto deathNotifier = DeathNotifier::Create(
+ client, ref<ResourceManagerService>(), clientInfo, true);
mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index aa88ac6..637525d 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -204,7 +204,6 @@
bool mSupportsMultipleSecureCodecs;
bool mSupportsSecureWithNonSecureCodec;
int32_t mCpuBoostCount;
- ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
struct ProcessInfoOverride {
std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
std::shared_ptr<IResourceManagerClient> client;
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index a46d87a..bbbc737 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -27,21 +27,18 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_fuzz {
- name: "mediaresourcemanager_fuzzer",
- srcs: [
- "mediaresourcemanager_fuzzer.cpp",
+cc_defaults {
+ name: "mediaresourcemanager_fuzzer_defaults",
+ defaults: [
+ "service_fuzzer_defaults",
],
static_libs: [
"liblog",
"libresourcemanagerservice",
],
shared_libs: [
- "libbinder",
- "libbinder_ndk",
"libmedia",
"libmediautils",
- "libutils",
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
@@ -62,3 +59,39 @@
fuzzed_code_usage: "shipped",
},
}
+
+cc_fuzz {
+ name: "mediaresourcemanager_fuzzer",
+ defaults: [
+ "mediaresourcemanager_fuzzer_defaults",
+ ],
+ srcs: [
+ "mediaresourcemanager_fuzzer.cpp",
+ ],
+}
+
+cc_fuzz {
+ name: "resourcemanager_service_fuzzer",
+ defaults: [
+ "mediaresourcemanager_fuzzer_defaults",
+ ],
+ srcs: [
+ "resourcemanager_service_fuzzer.cpp",
+ ],
+}
+
+cc_fuzz {
+ name: "resourceobserver_service_fuzzer",
+ defaults: [
+ "mediaresourcemanager_fuzzer_defaults",
+ ],
+ static_libs: [
+ "resourceobserver_aidl_interface-V1-ndk",
+ ],
+ srcs: [
+ "resourceobserver_service_fuzzer.cpp",
+ ],
+ fuzz_config: {
+ triage_assignee: "waghpawan@google.com",
+ },
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
new file mode 100644
index 0000000..ca10d20
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceManagerService.h"
+
+using android::fuzzService;
+using android::ResourceManagerService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ auto service = SharedRefBase::make<ResourceManagerService>();
+ fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+ return 0;
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
new file mode 100644
index 0000000..e69368d
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceObserverService.h"
+
+using android::fuzzService;
+using android::ResourceObserverService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ auto service = SharedRefBase::make<ResourceObserverService>();
+ fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+ return 0;
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 7f228c7..2ef6fe5 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -45,6 +45,8 @@
#define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (3 * AAUDIO_NANOS_PER_MILLISECOND)
#define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS (-1 * AAUDIO_NANOS_PER_MILLISECOND)
+#define AAUDIO_MAX_OPEN_ATTEMPTS 10
+
using namespace android; // TODO just import names needed
using namespace aaudio; // TODO just import names needed
@@ -77,14 +79,23 @@
{AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
};
-audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
- if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
- return returnedFromAPM;
- }
+audio_format_t getNextFormatToTry(audio_format_t curFormat) {
const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
- return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+ return it != NEXT_FORMAT_TO_TRY.end() ? it->second : curFormat;
}
+struct configComp {
+ bool operator() (const audio_config_base_t& lhs, const audio_config_base_t& rhs) const {
+ if (lhs.sample_rate != rhs.sample_rate) {
+ return lhs.sample_rate < rhs.sample_rate;
+ } else if (lhs.channel_mask != rhs.channel_mask) {
+ return lhs.channel_mask < rhs.channel_mask;
+ } else {
+ return lhs.format < rhs.format;
+ }
+ }
+};
+
} // namespace
aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
@@ -101,60 +112,66 @@
legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
audio_format_t audioFormat = getFormat();
- std::set<audio_format_t> formatsTried;
- while (true) {
- if (formatsTried.find(audioFormat) != formatsTried.end()) {
+ int32_t sampleRate = getSampleRate();
+ if (sampleRate == AAUDIO_UNSPECIFIED) {
+ sampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
+ }
+
+ const aaudio_direction_t direction = getDirection();
+ audio_config_base_t config;
+ config.format = audioFormat;
+ config.sample_rate = sampleRate;
+ config.channel_mask = AAudio_getChannelMaskForOpen(
+ getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
+
+ std::set<audio_config_base_t, configComp> configsTried;
+ int32_t numberOfAttempts = 0;
+ while (numberOfAttempts < AAUDIO_MAX_OPEN_ATTEMPTS) {
+ if (configsTried.find(config) != configsTried.end()) {
// APM returning something that has already tried.
- ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
+ ALOGW("Have already tried to open with format=%#x and sr=%d, but failed before",
+ config.format, config.sample_rate);
break;
}
- formatsTried.insert(audioFormat);
+ configsTried.insert(config);
- audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
- result = openWithFormat(audioFormat, &nextFormatToTry);
+ audio_config_base_t previousConfig = config;
+ result = openWithConfig(&config);
if (result != AAUDIO_ERROR_UNAVAILABLE) {
// Return if it is successful or there is an error that is not
// AAUDIO_ERROR_UNAVAILABLE happens.
- ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+ ALOGI("Opened format=%#x sr=%d, with result=%d", previousConfig.format,
+ previousConfig.sample_rate, result);
break;
}
- nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
- ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
- __func__, audioFormat, nextFormatToTry);
- audioFormat = nextFormatToTry;
- if (audioFormat == AUDIO_FORMAT_DEFAULT) {
- // Nothing else to try
- break;
+ // Try other formats if the config from APM is the same as our current config.
+ // Some HALs may report its format support incorrectly.
+ if ((previousConfig.format == config.format) &&
+ (previousConfig.sample_rate == config.sample_rate)) {
+ config.format = getNextFormatToTry(config.format);
}
+
+ ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
+ __func__, previousConfig.format, previousConfig.sample_rate, config.format,
+ config.sample_rate);
+ numberOfAttempts++;
}
return result;
}
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
- audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithConfig(
+ audio_config_base_t* config) {
aaudio_result_t result = AAUDIO_OK;
- audio_config_base_t config;
+ audio_config_base_t currentConfig = *config;
audio_port_handle_t deviceId;
const audio_attributes_t attributes = getAudioAttributesFrom(this);
deviceId = mRequestedDeviceId;
- // Fill in config
- config.format = audioFormat;
-
- int32_t aaudioSampleRate = getSampleRate();
- if (aaudioSampleRate == AAUDIO_UNSPECIFIED) {
- aaudioSampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
- }
- config.sample_rate = aaudioSampleRate;
-
const aaudio_direction_t direction = getDirection();
- config.channel_mask = AAudio_getChannelMaskForOpen(
- getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
-
if (direction == AAUDIO_DIRECTION_OUTPUT) {
mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
@@ -177,11 +194,11 @@
// Open HAL stream. Set mMmapStream
ALOGD("%s trying to open MMAP stream with format=%#x, "
"sample_rate=%u, channel_mask=%#x, device=%d",
- __func__, config.format, config.sample_rate,
- config.channel_mask, deviceId);
+ __func__, config->format, config->sample_rate,
+ config->channel_mask, deviceId);
const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
&attributes,
- &config,
+ config,
mMmapClient,
&deviceId,
&sessionId,
@@ -195,9 +212,9 @@
// not match the hardware.
ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
"channel_mask=%#x",
- __func__, status, config.format, config.sample_rate, config.channel_mask);
- *nextFormatToTry = config.format != audioFormat ? config.format
- : *nextFormatToTry;
+ __func__, status, config->format, config->sample_rate, config->channel_mask);
+ // Keep the channel mask of the current config
+ config->channel_mask = currentConfig.channel_mask;
return AAUDIO_ERROR_UNAVAILABLE;
}
@@ -217,7 +234,7 @@
setSessionId(actualSessionId);
ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
- __func__, audioFormat, getDeviceId(), getSessionId());
+ __func__, config->format, getDeviceId(), getSessionId());
// Create MMAP/NOIRQ buffer.
result = createMmapBuffer();
@@ -227,11 +244,11 @@
// Get information about the stream and pass it back to the caller.
setChannelMask(AAudioConvert_androidToAAudioChannelMask(
- config.channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
- AAudio_isChannelIndexMask(config.channel_mask)));
+ config->channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
+ AAudio_isChannelIndexMask(config->channel_mask)));
- setFormat(config.format);
- setSampleRate(config.sample_rate);
+ setFormat(config->format);
+ setSampleRate(config->sample_rate);
setHardwareSampleRate(getSampleRate());
setHardwareFormat(getFormat());
setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 38cf0ba..f19005c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -97,7 +97,7 @@
private:
- aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
+ aaudio_result_t openWithConfig(audio_config_base_t* config);
aaudio_result_t createMmapBuffer();
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 65854c8..5fb152e 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -647,6 +647,10 @@
setSuspended(true);
return AAUDIO_ERROR_WOULD_BLOCK;
} else {
+ if (isSuspended()) {
+ ALOGW("%s(): Queue no longer full. Un-suspending the stream.", __func__);
+ setSuspended(false);
+ }
return AAUDIO_OK;
}
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index bc7ccde..8f51ce4 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -220,18 +220,6 @@
return mSuspended;
}
- bool isCloseNeeded() const {
- return mCloseNeeded.load();
- }
-
- /**
- * Mark this stream as needing to be closed.
- * Once marked for closing, it cannot be unmarked.
- */
- void markCloseNeeded() {
- mCloseNeeded.store(true);
- }
-
virtual const char *getTypeText() const { return "Base"; }
protected:
@@ -419,12 +407,8 @@
aaudio_handle_t mHandle = -1;
bool mFlowing = false;
- // This indicates that a stream that is being referenced by a binder call
- // and needs to closed.
- std::atomic<bool> mCloseNeeded{false}; // TODO remove
-
// This indicate that a running stream should not be processed because of an error,
- // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
+ // for example a full message queue.
std::atomic<bool> mSuspended{false};
bool mDisconnected GUARDED_BY(mLock) {false};
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 549fa59..502d773 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -75,7 +75,9 @@
aaudio_result_t AAudioThread::stop() {
if (!mHasThread) {
- ALOGE("stop() but no thread running");
+ // There can be cases that the thread is just created but not started.
+ // Logging as warning to attract attention but not too serious.
+ ALOGW("stop() but no thread running");
return AAUDIO_ERROR_INVALID_STATE;
}
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index c5080a4..3521979 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -78,12 +78,38 @@
]
+cc_defaults {
+ name: "libaaudioservice_dependencies",
-cc_library {
+ shared_libs: [
+ "libaaudio_internal",
+ "libaudioclient",
+ "libaudioutils",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
+ "libbase",
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libutils",
+ "aaudio-aidl-cpp",
+ "framework-permission-aidl-cpp",
+ "libaudioclient_aidl_conversion",
+ "packagemanager_aidl-cpp",
+ ],
+
+ static_libs: [
+ "libaudioflinger",
+ ]
+}
+
+cc_library_static {
name: "libaaudioservice",
defaults: [
+ "libaaudioservice_dependencies",
"latest_android_media_audio_common_types_cpp_shared",
],
@@ -116,25 +142,6 @@
"-Werror",
],
- shared_libs: [
- "libaaudio_internal",
- "libaudioclient",
- "libaudioflinger",
- "libaudioutils",
- "libmedia_helper",
- "libmediametrics",
- "libmediautils",
- "libbase",
- "libbinder",
- "libcutils",
- "liblog",
- "libutils",
- "aaudio-aidl-cpp",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "packagemanager_aidl-cpp",
- ],
-
export_shared_lib_headers: [
"libaaudio_internal",
"framework-permission-aidl-cpp",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f047065..f5c2e6c 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -403,13 +403,6 @@
request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
- request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
- request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
- request.getConfiguration().setHardwareFormat((audio_format_t)(
- fdp.ConsumeBool()
- ? fdp.ConsumeIntegral<int32_t>()
- : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
-
auto streamHandleInfo = mClient->openStream(request, configurationOutput);
if (streamHandleInfo.getHandle() < 0) {
// invalid request, stream not opened.
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index ea5139d..e29d520 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -86,6 +86,7 @@
"android.hardware.tv.tuner@1.1",
"android.hardware.tv.tuner-V2-ndk",
"libbase",
+ "libcutils",
"libbinder",
"libfmq",
"libhidlbase",
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index 617622d..be482c4 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -355,6 +355,11 @@
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
+ // Call to HAL to make sure the transport FD was able to be closed by binder.
+ // This is a tricky workaround for a problem in Binder.
+ // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+ mFilter_1_1->getId([&](HidlResult /* r */, uint32_t /* filterId*/){});
+
return ::ndk::ScopedAStatus::ok();
}
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index 90f1731..6dee324 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -17,6 +17,7 @@
#include <android-base/logging.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
#include <utils/Log.h>
#include <hidl/HidlTransportSupport.h>
@@ -31,6 +32,12 @@
int main() {
ALOGD("Tuner service starting");
+ if (!property_get_bool("tuner.server.enable", false)
+ && !property_get_bool("ro.tuner.lazyhal", false)) {
+ ALOGD("tuner is not enabled, terminating");
+ return 0;
+ }
+
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm = defaultServiceManager();
hardware::configureRpcThreadpool(16, true);