Merge "Merge tm-qpr-dev-plus-aosp-without-vendor@9467136" into stage-aosp-master
diff --git a/camera/Android.bp b/camera/Android.bp
index e44202b..3e28e4f 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -93,6 +93,7 @@
"libgui",
"libcamera_metadata",
"libnativewindow",
+ "lib-platform-compat-native-api",
],
include_dirs: [
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 604dbb8..d1618e4 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,10 @@
}
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion)
+ int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion);
+ clientPid, targetSdkVersion, overrideToPortrait);
}
status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 24c9108..0a5bc12 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -23,6 +23,7 @@
#include <cutils/properties.h>
#include <android/hardware/ICameraService.h>
+#include <com/android/internal/compat/IPlatformCompatNative.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
@@ -161,7 +162,8 @@
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion)
+ int clientUid, int clientPid, int targetSdkVersion,
+ bool overrideToPortrait)
{
ALOGV("%s: connect", __FUNCTION__);
sp<TCam> c = new TCam(cameraId);
@@ -171,8 +173,9 @@
binder::Status ret;
if (cs != nullptr) {
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
+ ALOGI("Connect camera (legacy API) - overrideToPortrait %d", overrideToPortrait);
ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, /*out*/ &c->mCamera);
+ clientPid, targetSdkVersion, overrideToPortrait, /*out*/ &c->mCamera);
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -273,10 +276,11 @@
// this can be in BaseCamera but it should be an instance method
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
+ bool overrideToPortrait,
struct hardware::CameraInfo* cameraInfo) {
const sp<::android::hardware::ICameraService> cs = getCameraService();
if (cs == 0) return UNKNOWN_ERROR;
- binder::Status res = cs->getCameraInfo(cameraId, cameraInfo);
+ binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
return res.isOk() ? OK : res.serviceSpecificErrorCode();
}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 1e748c7..01baba1 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -67,7 +67,7 @@
/**
* Fetch basic camera information for a camera device
*/
- CameraInfo getCameraInfo(int cameraId);
+ CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
/**
* Default UID/PID values for non-privileged callers of
@@ -83,7 +83,8 @@
int cameraId,
String opPackageName,
int clientUid, int clientPid,
- int targetSdkVersion);
+ int targetSdkVersion,
+ boolean overrideToPortrait);
/**
* Open a camera device through the new camera API
@@ -94,7 +95,8 @@
String opPackageName,
@nullable String featureId,
int clientUid, int oomScoreOffset,
- int targetSdkVersion);
+ int targetSdkVersion,
+ boolean overrideToPortrait);
/**
* Add listener for changes to camera device and flashlight state.
@@ -135,7 +137,8 @@
* Read the static camera metadata for a camera device.
* Only supported for device HAL versions >= 3.2
*/
- CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion);
+ CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion,
+ boolean overrideToPortrait);
/**
* Read in the vendor tag descriptors from the camera module HAL.
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 58ccd69..26c36a7 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
typedef ::android::hardware::ICameraClient TCamCallbacks;
typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
(const sp<::android::hardware::ICameraClient>&,
- int, const String16&, int, int, int,
+ int, const String16&, int, int, int, bool,
/*out*/
sp<::android::hardware::ICamera>*);
static TCamConnectService fnConnectService;
@@ -81,7 +81,8 @@
static sp<Camera> create(const sp<::android::hardware::ICamera>& camera);
static sp<Camera> connect(int cameraId,
const String16& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion);
+ int clientUid, int clientPid, int targetSdkVersion,
+ bool overrideToPortrait);
virtual ~Camera();
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 8e53968..9d0721b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -119,7 +119,8 @@
static sp<TCam> connect(int cameraId,
const String16& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion);
+ int clientUid, int clientPid, int targetSdkVersion,
+ bool overrideToPortrait);
virtual void disconnect();
void setListener(const sp<TCamListener>& listener);
@@ -127,6 +128,7 @@
static int getNumberOfCameras();
static status_t getCameraInfo(int cameraId,
+ bool overrideToPortrait,
/*out*/
struct hardware::CameraInfo* cameraInfo);
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5892f1a..23d90cc 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -692,10 +692,11 @@
ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
return ACAMERA_ERROR_CAMERA_DISCONNECTED;
}
+
CameraMetadata rawMetadata;
int targetSdkVersion = android_get_application_target_sdk_version();
binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
- targetSdkVersion, &rawMetadata);
+ targetSdkVersion, /*overrideToPortrait*/true, &rawMetadata);
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -747,7 +748,7 @@
binder::Status serviceRet = cs->connectDevice(
callbacks, String16(cameraId), String16(""), {},
hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
- targetSdkVersion, /*out*/&deviceRemote);
+ targetSdkVersion, /*overrideToPortrait*/true, /*out*/&deviceRemote);
if (!serviceRet.isOk()) {
ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 17ea512..1af5637 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -370,7 +370,7 @@
// Check metadata binder call
CameraMetadata metadata;
res = service->getCameraCharacteristics(cameraId,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(metadata.isEmpty());
@@ -386,7 +386,8 @@
sp<hardware::camera2::ICameraDeviceUser> device;
res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
{}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/false, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
ASSERT_NE(nullptr, device.get());
device->disconnect();
@@ -429,7 +430,8 @@
SCOPED_TRACE("openNewDevice");
binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
{}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/false, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
}
auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 76dc38c..f2fa48c 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -74,7 +74,8 @@
CameraMetadata metadata;
std::vector<int32_t> tagsNeedingPermission;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/false, &metadata);
ASSERT_TRUE(rc.isOk());
EXPECT_FALSE(metadata.isEmpty());
EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index efd9dae..bdfb84a 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -182,7 +182,8 @@
CameraMetadata metadata;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+ &metadata);
if (!rc.isOk()) {
// The test is relevant only for cameras with Hal 3.x
// support.
@@ -209,7 +210,8 @@
rc = mCameraService->connect(this, cameraId,
String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
hardware::ICameraService::USE_CALLING_PID,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/false, &cameraDevice);
EXPECT_TRUE(rc.isOk());
CameraParameters params(cameraDevice->getParameters());
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 2e0b678..d866c18 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -13,6 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include <algorithm>
+#include <string_view>
+#include <type_traits>
#include <assert.h>
#include <ctype.h>
@@ -100,7 +103,6 @@
static const uint32_t kFallbackHeight = 720;
static const char* kMimeTypeAvc = "video/avc";
static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
-static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
@@ -354,14 +356,15 @@
}
/*
- * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
* of the platform endianness.
*/
-template <typename UINT>
-static void writeValueLE(UINT value, uint8_t* buffer) {
- for (int i = 0; i < sizeof(UINT); ++i) {
- buffer[i] = static_cast<uint8_t>(value);
- value >>= 8;
+template <typename T>
+static void writeValueLE(T value, uint8_t* buffer) {
+ std::remove_const_t<T> temp = value;
+ for (int i = 0; i < sizeof(T); ++i) {
+ buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
+ temp >>= 8;
}
}
@@ -377,16 +380,18 @@
* - for every frame its presentation time relative to the elapsed realtime clock in microseconds
* (as little endian uint64).
*/
-static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
- ALOGV("Writing metadata");
+ static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
+
+ ALOGV("Writing winscope metadata legacy");
int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
- systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
- + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+ + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
uint8_t* pos = buffer->data();
- strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
- pos += strlen(kWinscopeMagicString);
+ strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
+ pos += strlen(kWinscopeMagicStringLegacy);
writeValueLE<uint32_t>(timestamps.size(), pos);
pos += sizeof(uint32_t);
for (size_t idx = 0; idx < timestamps.size(); ++idx) {
@@ -395,10 +400,79 @@
pos += sizeof(uint64_t);
}
AMediaCodecBufferInfo bufferInfo = {
- 0,
+ 0 /* offset */,
static_cast<int32_t>(buffer->size()),
- timestamps[0],
- 0
+ timestamps[0] /* presentationTimeUs */,
+ 0 /* flags */
+ };
+ return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
+ * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
+ *
+ * The metadata (version 2) is written as a binary array with the following format:
+ * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
+ * - for each recorded frame:
+ * - System time in elapsed clock timebase in nanoseconds (8B little endian).
+ *
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
+ */
+static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
+ const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+ ALOGV("Writing winscope metadata");
+
+ static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
+ static constexpr std::uint32_t metadataVersion = 2;
+
+ const auto elapsedTimeNs = android::elapsedRealtimeNano();
+ const std::int64_t elapsedToMonotonicTimeOffsetNs =
+ elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+ const std::int64_t realToElapsedTimeOffsetNs =
+ systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
+ const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
+
+ sp<ABuffer> buffer = new ABuffer(
+ kWinscopeMagicString.size() +
+ sizeof(decltype(metadataVersion)) +
+ sizeof(decltype(realToElapsedTimeOffsetNs)) +
+ sizeof(decltype(framesCount)) +
+ framesCount * sizeof(std::uint64_t)
+ );
+ std::uint8_t* pos = buffer->data();
+
+ std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
+ pos += kWinscopeMagicString.size();
+
+ writeValueLE(metadataVersion, pos);
+ pos += sizeof(decltype(metadataVersion));
+
+ writeValueLE(realToElapsedTimeOffsetNs, pos);
+ pos += sizeof(decltype(realToElapsedTimeOffsetNs));
+
+ writeValueLE(framesCount, pos);
+ pos += sizeof(decltype(framesCount));
+
+ for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
+ const auto timestampElapsedNs =
+ elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+ writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
+ pos += sizeof(std::uint64_t);
+ }
+
+ AMediaCodecBufferInfo bufferInfo = {
+ 0 /* offset */,
+ static_cast<std::int32_t>(buffer->size()),
+ timestampsMonotonicUs[0] /* presentationTimeUs */,
+ 0 /* flags */
};
return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
}
@@ -418,11 +492,12 @@
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
+ ssize_t metaLegacyTrackIdx = -1;
ssize_t metaTrackIdx = -1;
uint32_t debugNumFrames = 0;
int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
- Vector<int64_t> timestamps;
+ Vector<int64_t> timestampsMonotonicUs;
bool firstFrame = true;
assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -520,9 +595,9 @@
sp<ABuffer> buffer = new ABuffer(
buffers[bufIndex]->data(), buffers[bufIndex]->size());
AMediaCodecBufferInfo bufferInfo = {
- 0,
+ 0 /* offset */,
static_cast<int32_t>(buffer->size()),
- ptsUsec,
+ ptsUsec /* presentationTimeUs */,
flags
};
err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
@@ -532,7 +607,7 @@
return err;
}
if (gOutputFormat == FORMAT_MP4) {
- timestamps.add(ptsUsec);
+ timestampsMonotonicUs.add(ptsUsec);
}
}
debugNumFrames++;
@@ -565,6 +640,7 @@
if (gOutputFormat == FORMAT_MP4) {
AMediaFormat *metaFormat = AMediaFormat_new();
AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+ metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
AMediaFormat_delete(metaFormat);
}
@@ -604,10 +680,16 @@
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
fflush(stdout);
}
- if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
- err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+ if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
+ err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
if (err != NO_ERROR) {
- fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+ fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
+ return err;
+ }
+
+ err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
return err;
}
}
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 270bbf4..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,6 +432,10 @@
if (fence) {
static constexpr int kFenceWaitTimeMs = 10;
+ if (bufferNeedsReallocation) {
+ mBuffers[slot].clear();
+ }
+
status_t status = fence->wait(kFenceWaitTimeMs);
if (status == -ETIME) {
// fence is not signalled yet.
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 99bccac..bf4ca32 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
}
HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+ if (o->size() < sizeof(C2SyncVariables)) {
+ android_errorWriteLog(0x534e4554, "240140929");
+ return nullptr;
+ }
+
void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
if (ptr == MAP_FAILED) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index a0bc8ca..6497b58 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -129,6 +129,7 @@
mRTPCVOExtMap(-1),
mRTPCVODegrees(0),
mRTPSockDscp(0),
+ mRTPSockOptEcn(0),
mRTPSockNetwork(0),
mLastSeqNo(0),
mStarted(false),
@@ -910,6 +911,13 @@
return OK;
}
+status_t StagefrightRecorder::setParamRtpEcn(int32_t ecn) {
+ ALOGV("setParamRtpEcn: %d", ecn);
+
+ mRTPSockOptEcn = ecn;
+ return OK;
+}
+
status_t StagefrightRecorder::requestIDRFrame() {
status_t ret = BAD_VALUE;
if (mVideoEncoderSource != NULL) {
@@ -1091,6 +1099,11 @@
if (safe_strtoi32(value.string(), &dscp)) {
return setParamRtpDscp(dscp);
}
+ } else if (key == "rtp-param-set-socket-ecn") {
+ int32_t targetEcn;
+ if (safe_strtoi32(value.string(), &targetEcn)) {
+ return setParamRtpEcn(targetEcn);
+ }
} else if (key == "rtp-param-set-socket-network") {
int64_t networkHandle;
if (safe_strtoi64(value.string(), &networkHandle)) {
@@ -1272,6 +1285,9 @@
if (mRTPSockDscp > 0) {
meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
}
+ if (mRTPSockOptEcn > 0) {
+ meta->setInt32(kKeyRtpEcn, mRTPSockOptEcn);
+ }
status = mWriter->start(meta.get());
break;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index d7785da..0801101 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -153,6 +153,7 @@
int32_t mRTPCVOExtMap;
int32_t mRTPCVODegrees;
int32_t mRTPSockDscp;
+ int32_t mRTPSockOptEcn;
int64_t mRTPSockNetwork;
uint32_t mLastSeqNo;
@@ -247,6 +248,7 @@
status_t setRTPCVOExtMap(int32_t extmap);
status_t setRTPCVODegrees(int32_t cvoDegrees);
status_t setParamRtpDscp(int32_t dscp);
+ status_t setParamRtpEcn(int32_t ecn);
status_t setSocketNetwork(int64_t networkHandle);
status_t requestIDRFrame();
void clipVideoBitRate();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 6a17972..fd03150 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -115,7 +115,7 @@
int sockRtp, sockRtcp;
ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
- info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+ info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
sp<AMessage> notify = new AMessage('accu', this);
@@ -125,6 +125,8 @@
mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
mRTPConn->setSelfID(info->mSelfID);
mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+ mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
+ mRTPConn->setIsIPv6(info->mLocalIp);
unsigned long PT;
AString formatDesc, formatParams;
@@ -719,6 +721,8 @@
} else if (key == "rtp-param-set-socket-network") {
int64_t networkHandle = atoll(value);
setSocketNetwork(networkHandle);
+ } else if (key == "rtp-param-set-socket-ecn") {
+ info->mRtpSockOptEcn = atoi(value);
} else if (key == "rtp-param-jitter-buffer-time") {
// clamping min at 40, max at 3000
info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 7d9bb8f..b2afe86 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -121,6 +121,8 @@
uint32_t mSelfID;
/* extmap:<value> for CVO will be set to here */
int32_t mCVOExtMap;
+ /* To check ECN is supported or not */
+ int32_t mRtpSockOptEcn;
/* a copy of TrackInfo in RTSPSource */
sp<AnotherPacketSource> mSource;
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9607425..842327d 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,7 @@
if (camera == 0) {
mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e995931..f67f717 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -354,8 +354,11 @@
}
//static
-Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
-std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
+// these are no_destroy to keep them from being destroyed at process exit
+// where some thread calls exit() while other threads are still running.
+// see b/194783918
+[[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
+[[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
//static
void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
@@ -807,9 +810,7 @@
mWidth(0),
mHeight(0),
mRotationDegrees(0),
- mConfigColorTransfer(-1),
- mHDRStaticInfo(false),
- mHDR10PlusInfo(false),
+ mHdrInfoFlags(0),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -864,6 +865,9 @@
return NAME_NOT_FOUND;
};
}
+
+ // we want an empty metrics record for any early getMetrics() call
+ // this should be the *only* initMediametrics() call that's not on the Looper thread
initMediametrics();
}
@@ -872,8 +876,17 @@
mResourceManagerProxy->removeClient();
flushMediametrics();
+
+ // clean any saved metrics info we stored as part of configure()
+ if (mConfigureMsg != nullptr) {
+ mediametrics_handle_t metricsHandle;
+ if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+ mediametrics_delete(metricsHandle);
+ }
+ }
}
+// except for in constructor, called from the looper thread (and therefore mutexed)
void MediaCodec::initMediametrics() {
if (mMetricsHandle == 0) {
mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -903,11 +916,12 @@
}
void MediaCodec::updateMediametrics() {
- ALOGV("MediaCodec::updateMediametrics");
if (mMetricsHandle == 0) {
return;
}
+ Mutex::Autolock _lock(mMetricsLock);
+
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -955,29 +969,73 @@
mIndexOfFirstFrameWhenLowLatencyOn);
}
- mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
- mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
#if 0
// enable for short term, only while debugging
updateEphemeralMediametrics(mMetricsHandle);
#endif
}
-void MediaCodec::updateHDRFormatMetric() {
+void MediaCodec::updateHdrMetrics(bool isConfig) {
+ if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
+ return;
+ }
+
+ int32_t colorStandard = -1;
+ if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+ mediametrics_setInt32(mMetricsHandle,
+ isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
+ }
+ int32_t colorRange = -1;
+ if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+ mediametrics_setInt32(mMetricsHandle,
+ isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
+ }
+ int32_t colorTransfer = -1;
+ if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+ mediametrics_setInt32(mMetricsHandle,
+ isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
+ }
+ HDRStaticInfo info;
+ if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
+ && ColorUtils::isHDRStaticInfoValid(&info)) {
+ mHdrInfoFlags |= kFlagHasHdrStaticInfo;
+ }
+ mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
+ (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
+ sp<ABuffer> hdr10PlusInfo;
+ if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+ && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+ mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
+ }
+ mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
+ (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
+
+ // hdr format
+ sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
+
+ AString mime;
int32_t profile = -1;
- AString mediaType;
- if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
- && mOutputFormat->findString("mime", &mediaType)) {
- hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+
+ if (codedFormat->findString("mime", &mime)
+ && codedFormat->findInt32(KEY_PROFILE, &profile)
+ && colorTransfer != -1) {
+ hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
}
}
-hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
- const AString &mediaType) {
- switch (transfer) {
+hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer) {
+ return (mFlags & kFlagIsEncoder)
+ ? getHdrFormatForEncoder(mime, profile, colorTransfer)
+ : getHdrFormatForDecoder(mime, profile, colorTransfer);
+}
+
+hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer) {
+ switch (colorTransfer) {
case COLOR_TRANSFER_ST2084:
- if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+ if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
switch (profile) {
case VP9Profile2HDR:
return HDR_FORMAT_HDR10;
@@ -986,7 +1044,7 @@
default:
return HDR_FORMAT_NONE;
}
- } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+ } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
switch (profile) {
case AV1ProfileMain10HDR10:
return HDR_FORMAT_HDR10;
@@ -995,7 +1053,7 @@
default:
return HDR_FORMAT_NONE;
}
- } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
switch (profile) {
case HEVCProfileMain10HDR10:
return HDR_FORMAT_HDR10;
@@ -1008,7 +1066,7 @@
return HDR_FORMAT_NONE;
}
case COLOR_TRANSFER_HLG:
- if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
return HDR_FORMAT_HLG;
} else {
// TODO: DOLBY format
@@ -1019,7 +1077,50 @@
}
}
+hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer) {
+ switch (colorTransfer) {
+ case COLOR_TRANSFER_ST2084:
+ if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
+ return HDR_FORMAT_NONE;
+ }
+ return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
+ case COLOR_TRANSFER_HLG:
+ if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ return HDR_FORMAT_HLG;
+ }
+ // TODO: DOLBY format
+ }
+ return HDR_FORMAT_NONE;
+}
+bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
+ if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+ return true;
+ } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+ switch (profile) {
+ case VP9Profile2:
+ case VP9Profile3:
+ case VP9Profile2HDR:
+ case VP9Profile3HDR:
+ case VP9Profile2HDR10Plus:
+ case VP9Profile3HDR10Plus:
+ return true;
+ }
+ } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ switch (profile) {
+ case HEVCProfileMain10:
+ case HEVCProfileMain10HDR10:
+ case HEVCProfileMain10HDR10Plus:
+ return true;
+ }
+ }
+ return false;
+}
+
+
+// called to update info being passed back via getMetrics(), which is a
+// unique copy for that call, no concurrent access worries.
void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
ALOGD("MediaCodec::updateEphemeralMediametrics()");
@@ -1059,7 +1160,14 @@
}
void MediaCodec::flushMediametrics() {
+ ALOGD("flushMediametrics");
+
+ // update does its own mutex locking
updateMediametrics();
+
+ // ensure mutex while we do our own work
+ Mutex::Autolock _lock(mMetricsLock);
+ mHdrInfoFlags = 0;
if (mMetricsHandle != 0) {
if (mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
@@ -1575,6 +1683,8 @@
}
msg->setString("name", name);
+ // initial naming setup covers the period before the first call to ::configure().
+ // after that, we manage this through ::configure() and the setup message.
if (mMetricsHandle != 0) {
mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
@@ -1647,23 +1757,28 @@
const sp<IDescrambler> &descrambler,
uint32_t flags) {
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+ mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
// TODO: validity check log-session-id: it should be a 32-hex-digit.
format->findString("log-session-id", &mLogSessionId);
- if (mMetricsHandle != 0) {
+ if (nextMetricsHandle != 0) {
int32_t profile = 0;
if (format->findInt32("profile", &profile)) {
- mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
+ mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
}
int32_t level = 0;
if (format->findInt32("level", &level)) {
- mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
+ mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
}
- mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
+ mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
(flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
- mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+ mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+
+ // moved here from ::init()
+ mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
+ mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
}
if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
@@ -1673,58 +1788,40 @@
mRotationDegrees = 0;
}
- if (mMetricsHandle != 0) {
- mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
- mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
- mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
+ if (nextMetricsHandle != 0) {
+ mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
+ mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
int32_t maxWidth = 0;
if (format->findInt32("max-width", &maxWidth)) {
- mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
+ mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
}
int32_t maxHeight = 0;
if (format->findInt32("max-height", &maxHeight)) {
- mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
+ mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
}
int32_t colorFormat = -1;
if (format->findInt32("color-format", &colorFormat)) {
- mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
+ mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
}
if (mDomain == DOMAIN_VIDEO) {
float frameRate = -1.0;
if (format->findFloat("frame-rate", &frameRate)) {
- mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+ mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
}
float captureRate = -1.0;
if (format->findFloat("capture-rate", &captureRate)) {
- mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+ mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
}
float operatingRate = -1.0;
if (format->findFloat("operating-rate", &operatingRate)) {
- mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+ mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
}
int32_t priority = -1;
if (format->findInt32("priority", &priority)) {
- mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+ mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
}
}
- int32_t colorStandard = -1;
- if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
- mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
- }
- int32_t colorRange = -1;
- if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
- mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
- }
- int32_t colorTransfer = -1;
- if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
- mConfigColorTransfer = colorTransfer;
- mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
- }
- HDRStaticInfo info;
- if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
- && ColorUtils::isHDRStaticInfoValid(&info)) {
- mHDRStaticInfo = true;
- }
}
// Prevent possible integer overflow in downstream code.
@@ -1735,14 +1832,14 @@
}
} else {
- if (mMetricsHandle != 0) {
+ if (nextMetricsHandle != 0) {
int32_t channelCount;
if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
- mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+ mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
}
int32_t sampleRate;
if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
- mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+ mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
}
}
}
@@ -1752,41 +1849,41 @@
enableMediaFormatShapingDefault);
if (!enableShaping) {
ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
- if (mMetricsHandle != 0) {
- mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
+ if (nextMetricsHandle != 0) {
+ mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
}
} else {
- (void) shapeMediaFormat(format, flags);
+ (void) shapeMediaFormat(format, flags, nextMetricsHandle);
// XXX: do we want to do this regardless of shaping enablement?
mapFormat(mComponentName, format, nullptr, false);
}
}
// push min/max QP to MediaMetrics after shaping
- if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
+ if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
int32_t qpIMin = -1;
if (format->findInt32("video-qp-i-min", &qpIMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
}
int32_t qpIMax = -1;
if (format->findInt32("video-qp-i-max", &qpIMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
}
int32_t qpPMin = -1;
if (format->findInt32("video-qp-p-min", &qpPMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
}
int32_t qpPMax = -1;
if (format->findInt32("video-qp-p-max", &qpPMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
}
int32_t qpBMin = -1;
if (format->findInt32("video-qp-b-min", &qpBMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
}
int32_t qpBMax = -1;
if (format->findInt32("video-qp-b-max", &qpBMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
+ mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
}
}
@@ -1802,13 +1899,23 @@
} else {
msg->setPointer("descrambler", descrambler.get());
}
- if (mMetricsHandle != 0) {
- mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
+ if (nextMetricsHandle != 0) {
+ mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
}
} else if (mFlags & kFlagIsSecure) {
ALOGW("Crypto or descrambler should be given for secure codec");
}
+ if (mConfigureMsg != nullptr) {
+ // if re-configuring, we have one of these from before.
+ // Recover the space before we discard the old mConfigureMsg
+ mediametrics_handle_t metricsHandle;
+ if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+ mediametrics_delete(metricsHandle);
+ }
+ }
+ msg->setInt64("metrics", nextMetricsHandle);
+
// save msg for reset
mConfigureMsg = msg;
@@ -2135,7 +2242,8 @@
status_t MediaCodec::shapeMediaFormat(
const sp<AMessage> &format,
- uint32_t flags) {
+ uint32_t flags,
+ mediametrics_handle_t metricsHandle) {
ALOGV("shapeMediaFormat entry");
if (!(flags & CONFIGURE_FLAG_ENCODE)) {
@@ -2191,39 +2299,39 @@
sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
size_t changeCount = deltas->countEntries();
ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
- if (mMetricsHandle != 0) {
- mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+ if (metricsHandle != 0) {
+ mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
}
if (changeCount > 0) {
- if (mMetricsHandle != 0) {
+ if (metricsHandle != 0) {
// save some old properties before we fold in the new ones
int32_t bitrate;
if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
}
int32_t qpIMin = -1;
if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
}
int32_t qpIMax = -1;
if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
}
int32_t qpPMin = -1;
if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
}
int32_t qpPMax = -1;
if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
}
int32_t qpBMin = -1;
if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
}
int32_t qpBMax = -1;
if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
- mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
+ mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
}
}
// NB: for any field in both format and deltas, the deltas copy wins
@@ -2809,26 +2917,44 @@
return OK;
}
+// this is the user-callable entry point
status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
reply = 0;
- // shouldn't happen, but be safe
- if (mMetricsHandle == 0) {
- return UNKNOWN_ERROR;
+ sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
}
- // update any in-flight data that's not carried within the record
- updateMediametrics();
-
- // send it back to the caller.
- reply = mediametrics_dup(mMetricsHandle);
-
- updateEphemeralMediametrics(reply);
+ CHECK(response->findInt64("metrics", &reply));
return OK;
}
+// runs on the looper thread (for mutex purposes)
+void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
+
+ mediametrics_handle_t results = 0;
+
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mMetricsHandle != 0) {
+ updateMediametrics();
+ results = mediametrics_dup(mMetricsHandle);
+ updateEphemeralMediametrics(results);
+ } else {
+ results = mediametrics_dup(mMetricsHandle);
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt64("metrics", results);
+ response->postReply(replyID);
+}
+
status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
msg->setInt32("portIndex", kPortIndexInput);
@@ -3382,8 +3508,6 @@
CHECK(msg->findMessage("input-format", &mInputFormat));
CHECK(msg->findMessage("output-format", &mOutputFormat));
- updateHDRFormatMetric();
-
// limit to confirming the opt-in behavior to minimize any behavioral change
if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
// signal frame dropping mode in the input format as this may also be
@@ -3426,6 +3550,7 @@
if (interestingFormat->findInt32("level", &level)) {
mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
}
+ updateHdrMetrics(true /* isConfig */);
// bitrate and bitrate mode, encoder only
if (mFlags & kFlagIsEncoder) {
// encoder specific values
@@ -3465,7 +3590,6 @@
mComponentName.c_str(),
mInputFormat->debugString(4).c_str(),
mOutputFormat->debugString(4).c_str());
- updateHDRFormatMetric();
CHECK(obj != NULL);
response->setObject("input-surface", obj);
mHaveInputSurface = true;
@@ -3490,7 +3614,6 @@
if (!msg->findInt32("err", &err)) {
CHECK(msg->findMessage("input-format", &mInputFormat));
CHECK(msg->findMessage("output-format", &mOutputFormat));
- updateHDRFormatMetric();
mHaveInputSurface = true;
} else {
response->setInt32("err", err);
@@ -3890,6 +4013,13 @@
break;
}
+ case kWhatGetMetrics:
+ {
+ onGetMetrics(msg);
+ break;
+ }
+
+
case kWhatConfigure:
{
if (mState != INITIALIZED) {
@@ -3910,6 +4040,18 @@
sp<AMessage> format;
CHECK(msg->findMessage("format", &format));
+ // start with a copy of the passed metrics info for use in this run
+ mediametrics_handle_t handle;
+ CHECK(msg->findInt64("metrics", &handle));
+ if (handle != 0) {
+ if (mMetricsHandle != 0) {
+ flushMediametrics();
+ }
+ mMetricsHandle = mediametrics_dup(handle);
+ // and set some additional metrics values
+ initMediametrics();
+ }
+
int32_t push;
if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
mFlags |= kFlagPushBlankBuffersOnShutdown;
@@ -4684,7 +4826,6 @@
buffer->meta()->setObject("changedKeys", changedKeys);
}
mOutputFormat = format;
- updateHDRFormatMetric();
mapFormat(mComponentName, format, nullptr, true);
ALOGV("[%s] output format changed to: %s",
mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4710,9 +4851,6 @@
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
setNativeWindowHdrMetadata(mSurface.get(), &info);
- if (ColorUtils::isHDRStaticInfoValid(&info)) {
- mHDRStaticInfo = true;
- }
}
}
@@ -4721,7 +4859,6 @@
&& hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
hdr10PlusInfo->size(), hdr10PlusInfo->data());
- mHDR10PlusInfo = true;
}
if (mime.startsWithIgnoreCase("video/")) {
@@ -4767,21 +4904,8 @@
}
}
- if (mMetricsHandle != 0) {
- int32_t colorStandard = -1;
- if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
- mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
- }
- int32_t colorRange = -1;
- if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
- mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
- }
- int32_t colorTransfer = -1;
- if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
- mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
- }
- }
-}
+ updateHdrMetrics(false /* isConfig */);
+ }
void MediaCodec::extractCSD(const sp<AMessage> &format) {
mCSD.clear();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 6f6a4e6..edb3786 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -357,6 +357,7 @@
kWhatSetNotification = 'setN',
kWhatDrmReleaseCrypto = 'rDrm',
kWhatCheckBatteryStats = 'chkB',
+ kWhatGetMetrics = 'getM',
};
enum {
@@ -427,6 +428,7 @@
sp<Surface> mSurface;
SoftwareRenderer *mSoftRenderer;
+ Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
nsecs_t mLifetimeStartNs = 0;
void initMediametrics();
@@ -434,6 +436,7 @@
void flushMediametrics();
void updateEphemeralMediametrics(mediametrics_handle_t item);
void updateLowLatency(const sp<AMessage> &msg);
+ void onGetMetrics(const sp<AMessage>& msg);
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
void updatePlaybackDuration(const sp<AMessage> &msg);
@@ -454,12 +457,19 @@
int32_t mRotationDegrees;
int32_t mAllowFrameDroppingBySurface;
- int32_t mConfigColorTransfer;
- bool mHDRStaticInfo;
- bool mHDR10PlusInfo;
- void updateHDRFormatMetric();
- hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
- const AString &mediaType);
+ enum {
+ kFlagHasHdrStaticInfo = 1,
+ kFlagHasHdr10PlusInfo = 2,
+ };
+ uint32_t mHdrInfoFlags;
+ void updateHdrMetrics(bool isConfig);
+ hdr_format getHdrFormat(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer);
+ hdr_format getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer);
+ hdr_format getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+ const int32_t colorTransfer);
+ bool profileSupport10Bits(const AString &mime, const int32_t profile);
// initial create parameters
AString mInitName;
@@ -472,7 +482,8 @@
// the (possibly) updated format is returned in place.
status_t shapeMediaFormat(
const sp<AMessage> &format,
- uint32_t flags);
+ uint32_t flags,
+ mediametrics_handle_t handle);
// populate the format shaper library with information for this codec encoding
// for the indicated media type
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 33f224c..2ca0e33 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -267,6 +267,7 @@
kKeyRtpExtMap = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
kKeyRtpCvoDegrees = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
kKeyRtpDscp = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+ kKeyRtpEcn = 'sEcn', // int32_t, ECN (Explicit Congestion Notification) of RFC 3168
kKeySocketNetwork = 'sNet', // int64_t, socket will be bound to network handle.
// Slow-motion markers
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ddf797c..100c0cd 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -46,7 +46,6 @@
mFirstIFrameProvided(false),
mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
- mLastRtpTimeJitterDataUs(0),
mWidth(0),
mHeight(0) {
}
@@ -123,20 +122,11 @@
}
sp<ABuffer> buffer = *queue->begin();
+ uint32_t seqNum = (uint32_t)buffer->int32Data();
buffer->meta()->setObject("source", source);
- /**
- * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
- * But that is not useful as an ingredient of buffering time.
- * Instead, we calculates the time only for all 'NAL units'.
- */
int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
int64_t nowTimeUs = ALooper::GetNowUs();
- if (rtpTime != mLastRtpTimeJitterDataUs) {
- source->putBaseJitterData(rtpTime, nowTimeUs);
- mLastRtpTimeJitterDataUs = rtpTime;
- }
- source->putInterArrivalJitterData(rtpTime, nowTimeUs);
const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -168,7 +158,7 @@
const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
/* Fundamental jitter time */
- const int32_t jitterTimeMs = baseJbTimeMs;
+ const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
// Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -177,7 +167,7 @@
bool isExpired = (diffTimeRtp >= 0); // It's expired if T is passed away
// From (T), this assembler tries to complete the NAL till (T + try)
- int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+ int32_t tryJbTimeMs = dynamicJbTimeMs;
int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
@@ -208,10 +198,10 @@
String8 info;
info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
- "JitterMs %d + (%d + %d * %.3f)",
+ "JitterMs [%d + (~%d~)] + %d * %.3f",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
- buffer->int32Data(), mNextExpectedSeqNo,
- jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ seqNum, mNextExpectedSeqNo,
+ baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
if (isSecondLineBroken) {
ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -223,6 +213,9 @@
}
if (mNextExpectedSeqNoValid) {
+ if (mNextExpectedSeqNo > seqNum) {
+ ALOGE("Reversed exp seq# %d \t current head %d", mNextExpectedSeqNo, seqNum);
+ }
mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
@@ -241,10 +234,10 @@
if (!mNextExpectedSeqNoValid) {
mNextExpectedSeqNoValid = true;
- mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
- } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
- ALOGV("Not the sequence number I expected");
-
+ mNextExpectedSeqNo = seqNum;
+ } else if (seqNum != mNextExpectedSeqNo) {
+ ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+ mNextExpectedSeqNo, seqNum);
return WRONG_SEQUENCE_NUMBER;
}
@@ -332,6 +325,11 @@
}
bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ ALOGE("b/230630526 buffer->size() == 0");
+ android_errorWriteLog(0x534e4554, "230630526");
+ return false;
+ }
const uint8_t *data = buffer->data();
unsigned nalType = data[0] & 0x1f;
if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -624,8 +622,7 @@
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- int32_t nalType = buffer->size() >= 1 ? buffer->data()[0] & 0x1f : -1;
- if (nalType != 28 || (buffer->size() >= 2 && buffer->data()[1] & 0x80)) {
+ if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
return firstSeqNo;
}
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..7b5c24a 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -53,7 +53,6 @@
mFirstIFrameProvided(false),
mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
- mLastRtpTimeJitterDataUs(0),
mWidth(0),
mHeight(0) {
@@ -133,20 +132,11 @@
}
sp<ABuffer> buffer = *queue->begin();
+ uint32_t seqNum = (uint32_t)buffer->int32Data();
buffer->meta()->setObject("source", source);
- /**
- * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
- * But that is not useful as an ingredient of buffering time.
- * Instead, we calculates the time only for all 'NAL units'.
- */
int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
int64_t nowTimeUs = ALooper::GetNowUs();
- if (rtpTime != mLastRtpTimeJitterDataUs) {
- source->putBaseJitterData(rtpTime, nowTimeUs);
- mLastRtpTimeJitterDataUs = rtpTime;
- }
- source->putInterArrivalJitterData(rtpTime, nowTimeUs);
const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -178,7 +168,7 @@
const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
/* Fundamental jitter time */
- const int32_t jitterTimeMs = baseJbTimeMs;
+ const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
// Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -187,7 +177,7 @@
bool isExpired = (diffTimeRtp >= 0); // It's expired if T is passed away
// From (T), this assembler tries to complete the NAL till (T + try)
- int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+ int32_t tryJbTimeMs = dynamicJbTimeMs;
int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
@@ -218,10 +208,10 @@
String8 info;
info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
- "JitterMs %d + (%d + %d * %.3f)",
+ "JitterMs [%d + (~%d~)] + %d * %.3f",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
- buffer->int32Data(), mNextExpectedSeqNo,
- jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ seqNum, mNextExpectedSeqNo,
+ baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
if (isSecondLineBroken) {
ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -251,10 +241,10 @@
if (!mNextExpectedSeqNoValid) {
mNextExpectedSeqNoValid = true;
- mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
- } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
- ALOGV("Not the sequence number I expected");
-
+ mNextExpectedSeqNo = seqNum;
+ } else if (seqNum != mNextExpectedSeqNo) {
+ ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+ mNextExpectedSeqNo, seqNum);
return WRONG_SEQUENCE_NUMBER;
}
@@ -629,13 +619,13 @@
int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ CHECK(!queue->empty());
// pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- unsigned nalType = buffer->data()[0] & 0x1f;
- if (nalType != 28 || buffer->data()[2] & 0x80) {
+ if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
return firstSeqNo;
}
@@ -645,7 +635,7 @@
if (rtpTime + jit >= play) {
break;
}
- if ((data[2] & 0x80)) {
+ if (it->size() >= 3 && (data[2] & 0x80)) {
const int32_t seqNo = it->int32Data();
ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
firstSeqNo = seqNo;
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a61f48f..165c336 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -16,6 +16,12 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ARTPConnection"
+#define INET_ECN_NOT_ECT 0x00 /* ECN was not enabled */
+#define INET_ECN_ECT_1 0x01 /* ECN capable packet */
+#define INET_ECN_ECT_0 0x02 /* ECN capable packet */
+#define INET_ECN_CE 0x03 /* ECN congestion */
+#define INET_ECN_MASK 0x03 /* Mask of ECN bits */
+
#include <utils/Log.h>
#include <media/stagefright/rtsp/ARTPAssembler.h>
@@ -56,6 +62,7 @@
// static
const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
+const int64_t ARTPConnection::kMinOneSecondNotifyDelayUs = 100000ll;
struct ARTPConnection::StreamInfo {
bool isIPv6;
@@ -84,7 +91,10 @@
mPollEventPending(false),
mLastReceiverReportTimeUs(-1),
mLastBitrateReportTimeUs(-1),
+ mLastCongestionNotifyTimeUs(-1),
mTargetBitrate(-1),
+ mRtpSockOptEcn(0),
+ mIsIPv6(false),
mStaticJitterTimeMs(kStaticJitterTimeMs) {
}
@@ -175,7 +185,7 @@
// static
void ARTPConnection::MakeRTPSocketPair(
int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
- unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+ unsigned localPort, unsigned remotePort, int64_t socketNetwork, int32_t sockOptEcn) {
bool isIPv6 = false;
if (strchr(localIp, ':') != NULL)
isIPv6 = true;
@@ -204,6 +214,24 @@
}
}
+ if (sockOptEcn != 0) {
+ int sockOptForTOS = 1;
+ if (setsockopt(*rtpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+ isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+ (int *)&sockOptForTOS, sizeof(sockOptForTOS)) < 0) {
+ ALOGE("failed to set recv sockopt TOS on rtpsock(%d). err=%s", *rtpSocket,
+ strerror(errno));
+ } else {
+ ALOGD("successfully set recv sockopt TOS on rtpsock(%d)", *rtpSocket);
+ int result = setsockopt(*rtcpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+ isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+ (int *)&sockOptForTOS, sizeof(sockOptForTOS));
+ if (result >= 0) {
+ ALOGD("successfully set recv sockopt TOS on rtcpsock(%d).", *rtcpSocket);
+ }
+ }
+ }
+
bumpSocketBufferSize(*rtcpSocket);
struct sockaddr *addr;
@@ -593,32 +621,25 @@
sp<ABuffer> buffer = new ABuffer(65536);
- struct sockaddr *pRemoteRTCPAddr;
- int sizeSockSt;
- if (s->isIPv6) {
- pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
- sizeSockSt = sizeof(struct sockaddr_in6);
- } else {
- pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
- sizeSockSt = sizeof(struct sockaddr_in);
- }
- socklen_t remoteAddrLen =
- (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
- ? sizeSockSt : 0;
+ struct msghdr sMsg = {};
+ struct iovec sIov[1] = {};
- if (mFlags & kViLTEConnection) {
- remoteAddrLen = 0;
- }
+ sIov[0].iov_base = (char *) buffer->data();
+ sIov[0].iov_len = buffer->capacity();
+
+ sMsg.msg_iov = sIov;
+ sMsg.msg_iovlen = 1;
+
+ int cMsgSize = sizeof(struct cmsghdr) + sizeof(uint8_t);
+ char buf[CMSG_SPACE(cMsgSize)];
+ sMsg.msg_control = buf;
+ sMsg.msg_controllen = sizeof(buf);
+ sMsg.msg_flags = 0;
ssize_t nbytes;
do {
- nbytes = recvfrom(
- receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
- buffer->data(),
- buffer->capacity(),
- 0,
- remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
- remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+ // Used recvmsg to get the TOS header of incoming packet
+ nbytes = recvmsg(receiveRTP ? s->mRTPSocket : s->mRTCPSocket, &sMsg, 0);
mCumulativeBytes += nbytes;
} while (nbytes < 0 && errno == EINTR);
@@ -633,6 +654,10 @@
}
}
+ if (nbytes > 0) {
+ handleIpHeadersIfReceived(s, sMsg);
+ }
+
buffer->setRange(0, nbytes);
// ALOGI("received %d bytes.", buffer->size());
@@ -647,13 +672,68 @@
return err;
}
+/* This function will check if TOS is present or not in received IP packet.
+ * After that if it is present then it will notify about congestion to upper
+ * layer if CE bit is set in TOS header.
+ **/
+void ARTPConnection::handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg) {
+ struct cmsghdr *cMsg;
+ cMsg = CMSG_FIRSTHDR(&sMsg);
+
+ if (cMsg == NULL) {
+ ALOGV("cmsg is null");
+ }
+
+ for (; cMsg != NULL; cMsg = CMSG_NXTHDR(&sMsg, cMsg)) {
+ bool isTOSHeader = ((cMsg->cmsg_level == (mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP))
+ && (cMsg->cmsg_type == (mIsIPv6 ? IPV6_TCLASS : IP_TOS))
+ && (cMsg->cmsg_len));
+ if (isTOSHeader) {
+ uint8_t receivedTOS;
+ receivedTOS = *((uint8_t *) CMSG_DATA(cMsg));
+ // checking CE bit is set
+ bool isCEBitMarked = ((receivedTOS & INET_ECN_MASK) == INET_ECN_CE);
+
+ ALOGV("receivedTos(value -> %d)", receivedTOS);
+
+ if (isCEBitMarked) {
+ ALOGD("receivedTos(value -> %d), is ECN CE marked = %d",
+ receivedTOS, isCEBitMarked);
+ notifyCongestionToUpperLayerIfNeeded(s);
+ }
+ break;
+ }
+ }
+}
+
+/* this function will be use to notify congestion in video call to upper layer */
+void ARTPConnection::notifyCongestionToUpperLayerIfNeeded(StreamInfo *s) {
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mLastCongestionNotifyTimeUs <= 0) {
+ mLastCongestionNotifyTimeUs = nowUs;
+ }
+
+ bool isNeedToUpdate = (mLastCongestionNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs);
+ ALOGD("ECN info set by upper layer=%d, isNeedToUpdate=%d", mRtpSockOptEcn, isNeedToUpdate);
+
+ if ((mRtpSockOptEcn != 0) && (isNeedToUpdate)) {
+ sp<AMessage> notify = s->mNotifyMsg->dup();
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTP_QUALITY_CD);
+ notify->post();
+ mLastCongestionNotifyTimeUs = nowUs;
+ ALOGD("Congestion detected in n/w, Notify upper layer");
+ }
+}
+
ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
struct sockaddr* pRemoteRTCPAddr;
int sizeSockSt;
/* It seems this isIPv6 variable is useless.
* We should remove it to prevent confusion */
- if (info->isIPv6) {
+ if (mIsIPv6) {
pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
sizeSockSt = sizeof(struct sockaddr_in6);
} else {
@@ -1215,12 +1295,20 @@
mTargetBitrate = targetBitrate;
}
+void ARTPConnection::setRtpSockOptEcn(int32_t sockOptEcn) {
+ mRtpSockOptEcn = sockOptEcn;
+}
+
+void ARTPConnection::setIsIPv6(const char *localIp) {
+ mIsIPv6 = (strchr(localIp, ':') != nullptr);
+}
+
void ARTPConnection::checkRxBitrate(int64_t nowUs) {
if (mLastBitrateReportTimeUs <= 0) {
mCumulativeBytes = 0;
mLastBitrateReportTimeUs = nowUs;
}
- else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+ else if (mLastEarlyNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs) {
int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
mLastEarlyNotifyTimeUs = nowUs;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 717d8af..c5b0a1e 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -264,12 +264,12 @@
bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
int64_t nowUs = ALooper::GetNowUs();
+ int64_t rtpTime = 0;
uint32_t seqNum = (uint32_t)buffer->int32Data();
- int32_t ssrc = 0, rtpTime = 0;
+ int32_t ssrc = 0;
buffer->meta()->findInt32("ssrc", &ssrc);
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- mLatestRtpTime = rtpTime;
if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
mFirstSysTime = nowUs;
@@ -277,7 +277,7 @@
mLastSysAnchorTimeUpdatedUs = nowUs;
mHighestSeqNumber = seqNum;
mBaseSeqNumber = seqNum;
- mFirstRtpTime = rtpTime;
+ mFirstRtpTime = (uint32_t)rtpTime;
mFirstSsrc = ssrc;
ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
@@ -352,6 +352,18 @@
mQueue.insert(it, buffer);
+ /**
+ * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+ * We calculate anothor jitter only for all 'Head NAL units'
+ */
+ ALOGV("<======== Insert %d", seqNum);
+ rtpTime = mAssembler->findRTPTime(mFirstRtpTime, buffer);
+ if (rtpTime != mLatestRtpTime) {
+ mJitterCalc->putBaseData(rtpTime, nowUs);
+ }
+ mJitterCalc->putInterArrivalData(rtpTime, nowUs);
+ mLatestRtpTime = rtpTime;
+
return true;
}
@@ -680,14 +692,6 @@
mStaticJbTimeMs = jbTimeMs;
}
-void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
- mJitterCalc->putBaseData(timeStamp, arrivalTime);
-}
-
-void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
- mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
-}
-
void ARTPSource::setJbTimer(const sp<AMessage> timer) {
mJbTimer = timer;
}
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8990f0c..41f2d67 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -255,9 +255,34 @@
if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
mRTPCVODegrees = rtpCVODegrees;
+ bool needToSetSockOpt = false;
int32_t dscp = 0;
- if (params->findInt32(kKeyRtpDscp, &dscp))
- updateSocketDscp(dscp);
+ if (params->findInt32(kKeyRtpDscp, &dscp)) {
+ mRtpLayer3Dscp = dscp << 2;
+ needToSetSockOpt = true;
+ }
+
+ int32_t ecn = 0;
+ if (params->findInt32(kKeyRtpEcn, &ecn)) {
+ /*
+ * @ecn, possible value for ECN.
+ * +-----+-----+
+ * | ECN FIELD |
+ * +-----+-----+
+ * ECT CE [Obsolete] RFC 2481 names for the ECN bits.
+ * 0 0 Not-ECT
+ * 0 1 ECT (ECN-Capable Transport) (1)
+ * 1 0 ECT (ECN-Capable Transport) (0)
+ * 1 1 CE (Congestion Experienced)
+ *
+ */
+ mRtpSockOptEcn = ecn;
+ needToSetSockOpt = true;
+ }
+
+ if (needToSetSockOpt) {
+ updateSocketOpt();
+ }
int64_t sockNetwork = 0;
if (params->findInt64(kKeySocketNetwork, &sockNetwork))
@@ -1438,18 +1463,29 @@
mPayloadType = payloadType;
}
-void ARTPWriter::updateSocketDscp(int32_t dscp) {
- mRtpLayer3Dscp = dscp << 2;
+/*
+ * This function will set socket option in IP header
+ */
+void ARTPWriter::updateSocketOpt() {
+ /*
+ * 0 1 2 3 4 5 6 7
+ * +-----+-----+-----+-----+-----+-----+-----+-----+
+ * | DS FIELD, DSCP | ECN FIELD |
+ * +-----+-----+-----+-----+-----+-----+-----+-----+
+ */
+ int sockOpt = mRtpLayer3Dscp ^ mRtpSockOptEcn;
+ ALOGD("Update socket opt with sockopt=%d, mRtpLayer3Dscp=%d, mRtpSockOptEcn=%d",
+ sockOpt, mRtpLayer3Dscp, mRtpSockOptEcn);
- /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
- if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
- (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
- ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+ /* sockOpt will be used to set socket option in IP header */
+ if (setsockopt(mRTPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+ (int *)&sockOpt, sizeof(sockOpt)) < 0) {
+ ALOGE("failed to set sockopt on rtpsock. err=%s", strerror(errno));
} else {
- ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
- setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
- (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
- ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+ ALOGD("successfully set sockopt. opt=%d", sockOpt);
+ setsockopt(mRTCPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+ (int *)&sockOpt, sizeof(sockOpt));
+ ALOGD("successfully set sockopt rtcpsock. opt=%d", sockOpt);
}
}
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
index 2f8b8ba..70ce388 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
@@ -50,7 +50,6 @@
bool mFirstIFrameProvided;
int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
- int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
int32_t mHeight;
List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
index 9575d8c..ed3f1ae 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
@@ -51,7 +51,6 @@
bool mFirstIFrameProvided;
int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
- int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
int32_t mHeight;
List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index 39161b6..8f87642 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -44,6 +44,13 @@
virtual void onByeReceived() = 0;
virtual bool initCheck() { return true; }
+ // Utility functions
+ inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+ inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+ inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+ inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+ inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
protected:
virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source) = 0;
virtual void packetLost() = 0;
@@ -64,13 +71,6 @@
bool mShowQueue;
int32_t mShowQueueCnt;
- // Utility functions
- inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
- inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
- inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
- inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
- inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
-
private:
int64_t mFirstFailureTimeUs;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 73d2866..250de71 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -20,6 +20,7 @@
#include <media/stagefright/foundation/AHandler.h>
#include <utils/List.h>
+#include <sys/socket.h>
namespace android {
@@ -48,6 +49,8 @@
void setSelfID(const uint32_t selfID);
void setStaticJitterTimeMs(const uint32_t jbTimeMs);
void setTargetBitrate(int32_t targetBitrate);
+ void setRtpSockOptEcn(int32_t sockOptEcn);
+ void setIsIPv6(const char *localIp);
// Creates a pair of UDP datagram sockets bound to adjacent ports
// (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -60,7 +63,8 @@
static void MakeRTPSocketPair(
int *rtpSocket, int *rtcpSocket,
const char *localIp, const char *remoteIp,
- unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
+ unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0,
+ int32_t sockOptEcn = 0);
protected:
virtual ~ARTPConnection();
@@ -77,6 +81,7 @@
};
static const int64_t kSelectTimeoutUs;
+ static const int64_t kMinOneSecondNotifyDelayUs;
uint32_t mFlags;
@@ -87,9 +92,12 @@
int64_t mLastReceiverReportTimeUs;
int64_t mLastBitrateReportTimeUs;
int64_t mLastEarlyNotifyTimeUs;
+ int64_t mLastCongestionNotifyTimeUs;
int32_t mSelfID;
int32_t mTargetBitrate;
+ int32_t mRtpSockOptEcn;
+ bool mIsIPv6;
uint32_t mStaticJitterTimeMs;
@@ -103,6 +111,8 @@
void onInjectPacket(const sp<AMessage> &msg);
void onSendReceiverReports();
void checkRxBitrate(int64_t nowUs);
+ void notifyCongestionToUpperLayerIfNeeded(StreamInfo *s);
+ void handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg);
status_t receive(StreamInfo *info, bool receiveRTP);
ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index e9b4942..7d1faf2 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -50,6 +50,7 @@
RTCP_FIRST_PACKET = 101,
RTP_QUALITY = 102,
RTP_QUALITY_EMC = 103,
+ RTP_QUALITY_CD = 104,
RTCP_SR = 200,
RTCP_RR = 201,
RTCP_TSFB = 205,
@@ -81,8 +82,6 @@
int32_t getBaseJitterTimeMs();
int32_t getInterArrivalJitterTimeMs();
void setStaticJitterTimeMs(const uint32_t jbTimeMs);
- void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
- void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
void setJbTimer(const sp<AMessage> timer);
void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
index 2982cf6..ecd29d0 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
@@ -50,7 +50,7 @@
virtual status_t pause();
void updateCVODegrees(int32_t cvoDegrees);
void updatePayloadType(int32_t payloadType);
- void updateSocketDscp(int32_t dscp);
+ void updateSocketOpt();
void updateSocketNetwork(int64_t socketNetwork);
uint32_t getSequenceNum();
virtual uint64_t getAccumulativeBytes() override;
@@ -98,6 +98,7 @@
struct sockaddr_in6 mRTPAddr6;
struct sockaddr_in6 mRTCPAddr6;
int32_t mRtpLayer3Dscp;
+ int32_t mRtpSockOptEcn;
net_handle_t mRTPSockNetwork;
AString mProfileLevel;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 07f4529..83b84e3 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -103,11 +103,10 @@
AttributionSourceState myAttributionSource;
myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
- if (callerAttributionSource.token != nullptr) {
- myAttributionSource.token = callerAttributionSource.token;
- } else {
- myAttributionSource.token = sp<BBinder>::make();
- }
+ // Create a static token for audioserver requests, which identifies the
+ // audioserver to the app ops system
+ static sp<BBinder> appOpsToken = sp<BBinder>::make();
+ myAttributionSource.token = appOpsToken;
myAttributionSource.next.push_back(nextAttributionSource);
return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e4c4107..f1f8a8f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -288,7 +288,6 @@
return opPackageLegacy == package; }) == packages.end()) {
ALOGW("The package name(%s) provided does not correspond to the uid %d",
attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
- checkedAttributionSource.packageName = std::optional<std::string>();
}
}
return checkedAttributionSource;
@@ -588,6 +587,33 @@
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_attributes_t localAttr = *attr;
+
+ // TODO b/182392553: refactor or make clearer
+ pid_t clientPid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+ bool updatePid = (clientPid == (pid_t)-1);
+ const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+
+ AttributionSourceState adjAttributionSource = client.attributionSource;
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ uid_t clientUid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+ ALOGW_IF(clientUid != callingUid,
+ "%s uid %d tried to pass itself off as %d",
+ __FUNCTION__, callingUid, clientUid);
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+ updatePid = true;
+ }
+ if (updatePid) {
+ const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, clientPid);
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+ }
+ adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ adjAttributionSource);
+
if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
fullConfig.sample_rate = config->sample_rate;
@@ -597,7 +623,7 @@
bool isSpatialized;
ret = AudioSystem::getOutputForAttr(&localAttr, &io,
actualSessionId,
- &streamType, client.attributionSource,
+ &streamType, adjAttributionSource,
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
@@ -608,7 +634,7 @@
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
actualSessionId,
- client.attributionSource,
+ adjAttributionSource,
config,
AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
}
@@ -1057,7 +1083,7 @@
audio_attributes_t localAttr = input.attr;
AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
- if (!isAudioServerOrMediaServerUid(callingUid)) {
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
ALOGW_IF(clientUid != callingUid,
"%s uid %d tried to pass itself off as %d",
__FUNCTION__, callingUid, clientUid);
@@ -1073,6 +1099,8 @@
clientPid = callingPid;
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
+ adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ adjAttributionSource);
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -2317,7 +2345,7 @@
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
adjAttributionSource.uid));
- if (!isAudioServerOrMediaServerUid(callingUid)) {
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
ALOGW_IF(currentUid != callingUid,
"%s uid %d tried to pass itself off as %d",
__FUNCTION__, callingUid, currentUid);
@@ -2333,7 +2361,8 @@
__func__, callingUid, callingPid, currentPid);
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
-
+ adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ adjAttributionSource);
// we don't yet support anything other than linear PCM
if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3967,7 +3996,7 @@
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
- if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+ if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
const pid_t callingPid = IPCThreadState::self()->getCallingPid();
ALOGW_IF(currentPid != -1 && currentPid != callingPid,
"%s uid %d pid %d tried to pass itself off as pid %d",
@@ -3975,6 +4004,7 @@
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
currentPid = callingPid;
}
+ adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..b54b41f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -313,12 +313,19 @@
patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+ audio_source_t source = AUDIO_SOURCE_MIC;
+ // For telephony patches, propagate voice communication use case to record side
+ if (patch->num_sources == 2
+ && patch->sources[1].ext.mix.usecase.stream
+ == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
&input,
&config,
device,
address,
- AUDIO_SOURCE_MIC,
+ source,
flags,
outputDevice,
outputDeviceAddress);
@@ -516,9 +523,14 @@
audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+ audio_source_t source = AUDIO_SOURCE_DEFAULT;
if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
// "reuse one existing output mix" case
streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+ // For telephony patches, propagate voice communication use case to record side
+ if (streamType == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
}
if (mPlayback.thread()->hasFastMixer()) {
// Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +558,8 @@
inChannelMask,
format,
frameCount,
- inputFlags);
+ inputFlags,
+ source);
} else {
// use a pseudo LCM between input and output framecount
int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +579,9 @@
frameCount,
nullptr,
(size_t)0 /* bufferSize */,
- inputFlags);
+ inputFlags,
+ {} /* timeout */,
+ source);
}
status = mRecord.checkTrack(tempRecordTrack.get());
if (status != NO_ERROR) {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index e8552c4..daec57e 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -87,6 +87,10 @@
&& (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
}
+ using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+ using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+ virtual void copyMetadataTo(MetadataInserter& backInserter) const;
+
private:
friend class AudioFlinger; // for mState
@@ -134,7 +138,8 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout = {});
+ const Timeout& timeout = {},
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
virtual ~PatchRecord();
virtual Source* getSource() { return nullptr; }
@@ -166,7 +171,8 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags);
+ audio_input_flags_t flags,
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
Source* getSource() override { return static_cast<Source*>(this); }
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c0e612d..f759d58 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8368,8 +8368,6 @@
audio_input_flags_t inputFlags = mInput->flags;
audio_input_flags_t requestedFlags = *flags;
uint32_t sampleRate;
- AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- attributionSource);
lStatus = initCheck();
if (lStatus != NO_ERROR) {
@@ -8384,7 +8382,7 @@
}
if (maxSharedAudioHistoryMs != 0) {
- if (!captureHotwordAllowed(checkedAttributionSource)) {
+ if (!captureHotwordAllowed(attributionSource)) {
lStatus = PERMISSION_DENIED;
goto Exit;
}
@@ -8505,16 +8503,16 @@
Mutex::Autolock _l(mLock);
int32_t startFrames = -1;
if (!mSharedAudioPackageName.empty()
- && mSharedAudioPackageName == checkedAttributionSource.packageName
+ && mSharedAudioPackageName == attributionSource.packageName
&& mSharedAudioSessionId == sessionId
- && captureHotwordAllowed(checkedAttributionSource)) {
+ && captureHotwordAllowed(attributionSource)) {
startFrames = mSharedAudioStartFrames;
}
track = new RecordTrack(this, client, attr, sampleRate,
format, channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
- checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+ attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
startFrames);
lStatus = track->initCheck();
@@ -8814,21 +8812,9 @@
return; // nothing to do
}
StreamInHalInterface::SinkMetadata metadata;
+ auto backInserter = std::back_inserter(metadata.tracks);
for (const sp<RecordTrack> &track : mActiveTracks) {
- // Do not forward PatchRecord metadata to audio HAL
- if (track->isPatchTrack()) {
- continue;
- }
- // No track is invalid as this is called after prepareTrack_l in the same critical section
- record_track_metadata_v7_t trackMetadata;
- trackMetadata.base = {
- .source = track->attributes().source,
- .gain = 1, // capture tracks do not have volumes
- };
- trackMetadata.channel_mask = track->channelMask(),
- strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
- metadata.tracks.push_back(trackMetadata);
+ track->copyMetadataTo(backInserter);
}
mInput->stream->updateSinkMetadata(metadata);
}
@@ -10284,19 +10270,22 @@
void AudioFlinger::MmapThread::checkInvalidTracks_l()
{
+ sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
if (track->isInvalid()) {
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback != 0) {
- mLock.unlock();
- callback->onTearDown(track->portId());
- mLock.lock();
- } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
- ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+ callback = mCallback.promote();
+ if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
}
+ break;
}
}
+ if (callback != 0) {
+ mLock.unlock();
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ mLock.lock();
+ }
}
void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 02c4aaf..a3a5df4 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -531,10 +531,7 @@
id, attr.flags);
return nullptr;
}
-
- AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- attributionSource);
- return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
+ return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
}
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
@@ -1482,7 +1479,7 @@
}
}
- metadata.channel_mask = mChannelMask,
+ metadata.channel_mask = mChannelMask;
strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
*backInserter++ = metadata;
}
@@ -2019,7 +2016,6 @@
{
Buffer *pInBuffer;
Buffer inBuffer;
- bool outputBufferFull = false;
inBuffer.frameCount = frames;
inBuffer.raw = data;
@@ -2049,7 +2045,6 @@
ALOGV("%s(%d): thread %d no more output buffers; status %d",
__func__, mId,
(int)mThreadIoHandle, status);
- outputBufferFull = true;
break;
}
uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2746,6 +2741,25 @@
}
}
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+ // Do not forward PatchRecord metadata with unspecified audio source
+ if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+ return;
+ }
+
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ record_track_metadata_v7_t metadata;
+ metadata.base = {
+ .source = mAttr.source,
+ .gain = 1, // capture tracks do not have volumes
+ };
+ metadata.channel_mask = mChannelMask;
+ strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+ *backInserter++ = metadata;
+}
// ----------------------------------------------------------------------------
#undef LOG_TAG
@@ -2759,9 +2773,10 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout)
+ const Timeout& timeout,
+ audio_source_t source)
: RecordTrack(recordThread, NULL,
- audio_attributes_t{} /* currently unused for patch track */,
+ audio_attributes_t{ .source = source } ,
sampleRate, format, channelMask, frameCount,
buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2872,9 +2887,10 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ audio_source_t source)
: PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
- nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+ nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
mPatchRecordAudioBufferProvider(*this),
mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0431619..7119b85 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -239,12 +239,13 @@
}
void setUseSwBridge() { mUseSwBridge = true; }
bool useSwBridge() const { return mUseSwBridge; }
+ bool canCloseOutput() const { return mCloseOutput; }
bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
- void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
+ void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
@@ -258,6 +259,15 @@
wp<SwAudioOutputDescriptor> mSwOutput;
wp<HwAudioOutputDescriptor> mHwOutput;
bool mUseSwBridge = false;
+ /**
+ * For either HW bridge associated to a SwOutput for activity / volume or SwBridge for also
+ * sample rendering / activity & volume, an existing playback thread may be reused (e.g.
+ * not already opened at APM startup or Direct Output).
+ * If reusing an already opened output, when this output is not used anymore, the AudioFlinger
+ * patch must be updated to refine the output device(s) information and ensure the right
+ * behavior of AudioDeviceCallback.
+ */
+ bool mCloseOutput = false;
};
/**
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 713b0ac..8b6866e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -105,9 +105,11 @@
{
}
-void SourceClientDescriptor::setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput)
+void SourceClientDescriptor::setSwOutput(
+ const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput)
{
mSwOutput = swOutput;
+ mCloseOutput = closeOutput;
}
void SourceClientDescriptor::setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 3518037..c9ba603 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -782,7 +782,8 @@
ALOGV("%s between source %s and sink %s", __func__,
srcDevice->toString().c_str(), sinkDevice->toString().c_str());
auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
- const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+ const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
struct audio_port_config source = {};
srcDevice->toAudioPortConfig(&source);
mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -3023,6 +3024,10 @@
status_t status = NO_ERROR;
IVolumeCurves &curves = getVolumeCurves(attributes);
VolumeSource vs = toVolumeSource(group);
+ // AUDIO_STREAM_BLUETOOTH_SCO is only used for volume control so we remap
+ // to AUDIO_STREAM_VOICE_CALL to match with relevant playback activity
+ VolumeSource activityVs = (vs == toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false)) ?
+ toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
status = setVolumeCurveIndex(index, device, curves);
@@ -3061,7 +3066,8 @@
if (curDevices.erase(AUDIO_DEVICE_OUT_SPEAKER_SAFE)) {
curDevices.insert(AUDIO_DEVICE_OUT_SPEAKER);
}
- if (!(desc->isActive(vs) || isInCall())) {
+
+ if (!(desc->isActive(activityVs) || isInCallOrScreening())) {
continue;
}
if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME &&
@@ -3095,7 +3101,7 @@
bool isPreempted = false;
bool isHigherPriority = productStrategy < strategy;
for (const auto &client : activeClients) {
- if (isHigherPriority && (client->volumeSource() != vs)) {
+ if (isHigherPriority && (client->volumeSource() != activityVs)) {
ALOGV("%s: Strategy=%d (\nrequester:\n"
" group %d, volumeGroup=%d attributes=%s)\n"
" higher priority source active:\n"
@@ -3108,7 +3114,7 @@
break;
}
// However, continue for loop to ensure no higher prio clients running on output
- if (client->volumeSource() == vs) {
+ if (client->volumeSource() == activityVs) {
applyVolume = true;
}
}
@@ -4549,7 +4555,7 @@
// In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
// in config XML to reach the sink so that is can be declared as available.
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+ sp<SwAudioOutputDescriptor> outputDesc;
if (!sourceDesc->isInternal()) {
// take care of dynamic routing for SwOutput selection,
audio_attributes_t attributes = sourceDesc->attributes();
@@ -4578,7 +4584,8 @@
ALOGE("%s output is duplicated", __func__);
return INVALID_OPERATION;
}
- sourceDesc->setSwOutput(outputDesc);
+ bool closeOutput = outputDesc->mDirectOpenCount != 0;
+ sourceDesc->setSwOutput(outputDesc, closeOutput);
} else {
// Same for "raw patches" aka created from createAudioPatch API
SortedVector<audio_io_handle_t> outputs =
@@ -4597,7 +4604,7 @@
__func__, sinkDevice->toString().c_str());
return INVALID_OPERATION;
}
- sourceDesc->setSwOutput(outputDesc);
+ sourceDesc->setSwOutput(outputDesc, /* closeOutput= */ false);
}
// create a software bridge in PatchPanel if:
// - source and sink devices are on different HW modules OR
@@ -4619,7 +4626,8 @@
audio_port_config srcMixPortConfig = {};
outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
// for volume control, we may need a valid stream
- srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+ srcMixPortConfig.ext.mix.usecase.stream =
+ (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
AUDIO_STREAM_PATCH;
patchBuilder.addSource(srcMixPortConfig);
@@ -4726,17 +4734,29 @@
// releaseOutput has already called closeOutput in case of direct output
return NO_ERROR;
}
- if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
- resetOutputDevice(outputDesc);
- } else {
- // Reuse patch handle if still valid / do not force rerouting if still routed
- patchHandle = outputDesc->getPatchHandle();
- setOutputDevices(outputDesc,
- getNewOutputDevices(outputDesc, true /*fromCache*/),
- patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
- 0,
- patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
- }
+ patchHandle = outputDesc->getPatchHandle();
+ // When a Sw bridge is released, the mixer used by this bridge will release its
+ // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
+ // Reuse patch handle to force audio flinger removing initial mixer patch removal
+ // updating hal patch handle (prevent leaks).
+ // While using a HwBridge, force reconsidering device only if not reusing an existing
+ // output and no more activity on output (will force to close).
+ bool force = sourceDesc->useSwBridge() ||
+ (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+ // APM pattern is to have always outputs opened / patch realized for reachable devices.
+ // Update device may result to NONE (empty), coupled with force, it releases the patch.
+ // Reconsider device only for cases:
+ // 1 / Active Output
+ // 2 / Inactive Output previously hosting HwBridge
+ // 3 / Inactive Output previously hosting SwBridge that can be closed.
+ bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
+ sourceDesc->canCloseOutput();
+ setOutputDevices(outputDesc,
+ updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
+ outputDesc->devices(),
+ force,
+ 0,
+ patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
} else {
return BAD_VALUE;
}
@@ -6343,10 +6363,10 @@
SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
uint32_t maxLatency = 0;
- bool invalidate = false;
+ std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
// take into account dynamic audio policies related changes: if a client is now associated
// to a different policy mix than at creation time, invalidate corresponding stream
- for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+ for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
if (desc->isDuplicated()) {
continue;
@@ -6362,16 +6382,15 @@
continue;
}
if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
- invalidate = true;
- if (desc->isStrategyActive(psId)) {
+ if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
maxLatency = desc->latency();
}
- break;
+ invalidatedOutputs.push_back(desc);
}
}
}
- if (srcOutputs != dstOutputs || invalidate) {
+ if (srcOutputs != dstOutputs || !invalidatedOutputs.empty()) {
// get maximum latency of all source outputs to determine the minimum mute time guaranteeing
// audio from invalidated tracks will be rendered when unmuting
for (audio_io_handle_t srcOut : srcOutputs) {
@@ -6382,8 +6401,7 @@
maxLatency = desc->latency();
}
- if (invalidate) continue;
-
+ bool invalidate = false;
for (auto client : desc->clientsList(false /*activeOnly*/)) {
if (desc->isDuplicated() || !desc->mProfile->isDirectOutput()) {
// a client on a non direct outputs has necessarily a linear PCM format
@@ -6411,21 +6429,14 @@
}
}
}
- }
-
- ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
- "%s: strategy %d, moving from output %s to output %s", __func__, psId,
- std::to_string(srcOutputs[0]).c_str(),
- std::to_string(dstOutputs[0]).c_str());
- // mute strategy while moving tracks from one output to another
- for (audio_io_handle_t srcOut : srcOutputs) {
- sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
- if (desc == nullptr) continue;
-
- if (desc->isStrategyActive(psId)) {
- setStrategyMute(psId, true, desc);
- setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
- newDevices.types());
+ // mute strategy while moving tracks from one output to another
+ if (invalidate) {
+ invalidatedOutputs.push_back(desc);
+ if (desc->isStrategyActive(psId)) {
+ setStrategyMute(psId, true, desc);
+ setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
+ newDevices.types());
+ }
}
sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
@@ -6433,19 +6444,21 @@
}
}
+ ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
+ "%s: strategy %d, moving from output %s to output %s", __func__, psId,
+ std::to_string(srcOutputs[0]).c_str(),
+ std::to_string(dstOutputs[0]).c_str());
+
// Move effects associated to this stream from previous output to new output
if (followsSameRouting(attr, attributes_initializer(AUDIO_USAGE_MEDIA))) {
selectOutputForMusicEffects();
}
// Move tracks associated to this stream (and linked) from previous output to new output
- if (invalidate) {
+ if (!invalidatedOutputs.empty()) {
for (auto stream : mEngine->getStreamTypesForProductStrategy(psId)) {
mpClientInterface->invalidateStream(stream);
}
- for (audio_io_handle_t srcOut : srcOutputs) {
- sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
- if (desc == nullptr) continue;
-
+ for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
desc->setTracksInvalidatedStatusByStrategy(psId);
}
}
@@ -7506,14 +7519,18 @@
return is_state_in_call(state);
}
-bool AudioPolicyManager::isCallAudioAccessible()
-{
+bool AudioPolicyManager::isCallAudioAccessible() const {
audio_mode_t mode = mEngine->getPhoneState();
return (mode == AUDIO_MODE_IN_CALL)
|| (mode == AUDIO_MODE_CALL_SCREEN)
|| (mode == AUDIO_MODE_CALL_REDIRECT);
}
+bool AudioPolicyManager::isInCallOrScreening() const {
+ audio_mode_t mode = mEngine->getPhoneState();
+ return isStateInCall(mode) || mode == AUDIO_MODE_CALL_SCREEN;
+}
+
void AudioPolicyManager::cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc)
{
for (ssize_t i = (ssize_t)mAudioSources.size() - 1; i >= 0; i--) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 87e6974..a69e088 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -602,7 +602,9 @@
// true if given state represents a device in a telephony or VoIP call
virtual bool isStateInCall(int state) const;
// true if playback to call TX or capture from call RX is possible
- bool isCallAudioAccessible();
+ bool isCallAudioAccessible() const;
+ // true if device is in a telephony or VoIP call or call screening is active
+ bool isInCallOrScreening() const;
// when a device is connected, checks if an open output can be routed
// to this device. If none is open, tries to open one of the available outputs.
@@ -639,6 +641,10 @@
return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
}
+ bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+ return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+ }
+
void connectTelephonyRxAudioSource();
void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index df49bba..49224c5 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -352,31 +352,20 @@
ALOGV("%s()", __func__);
Mutex::Autolock _l(mLock);
- // TODO b/182392553: refactor or remove
- AttributionSourceState adjAttributionSource = attributionSource;
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
- int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_uid_t_int32_t(callingUid));
- ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
- "%s uid %d tried to pass itself off as %d", __func__,
- callingUidAidl, attributionSource.uid);
- adjAttributionSource.uid = callingUidAidl;
- }
if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+ aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
}
if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
- && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+ && !bypassInterruptionPolicyAllowed(attributionSource)) {
attr.flags = static_cast<audio_flags_mask_t>(
attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
}
if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
- if (!accessUltrasoundAllowed(adjAttributionSource)) {
+ if (!accessUltrasoundAllowed(attributionSource)) {
ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
- __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
}
@@ -386,7 +375,7 @@
bool isSpatialized = false;
status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
&stream,
- adjAttributionSource,
+ attributionSource,
&config,
&flags, &selectedDeviceId, &portId,
&secondaryOutputs,
@@ -401,20 +390,20 @@
break;
case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
- && !callAudioInterceptionAllowed(adjAttributionSource)) {
+ && !callAudioInterceptionAllowed(attributionSource)) {
ALOGE("%s() permission denied: call redirection not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
- } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+ } else if (!modifyPhoneStateAllowed(attributionSource)) {
ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
}
break;
case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
- if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
+ if (!modifyAudioRoutingAllowed(attributionSource)) {
ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
}
break;
@@ -427,7 +416,7 @@
if (result == NO_ERROR) {
sp<AudioPlaybackClient> client =
- new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+ new AudioPlaybackClient(attr, output, attributionSource, session,
portId, selectedDeviceId, stream, isSpatialized);
mAudioPlaybackClients.add(portId, client);
@@ -613,33 +602,8 @@
return binderStatusFromStatusT(BAD_VALUE);
}
- // Make sure attribution source represents the current caller
- AttributionSourceState adjAttributionSource = attributionSource;
- // TODO b/182392553: refactor or remove
- bool updatePid = (attributionSource.pid == -1);
- const uid_t callingUid =IPCThreadState::self()->getCallingUid();
- const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
- attributionSource.uid));
- if (!isAudioServerOrMediaServerUid(callingUid)) {
- ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
- "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
- currentUid);
- adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
- callingUid));
- updatePid = true;
- }
-
- if (updatePid) {
- const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
- IPCThreadState::self()->getCallingPid()));
- ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
- adjAttributionSource.pid = callingPid;
- }
-
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
- adjAttributionSource)));
+ attributionSource)));
// check calling permissions.
// Capturing from the following sources does not require permission RECORD_AUDIO
@@ -650,17 +614,17 @@
// type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
// is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
// - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
- if (!(recordingAllowed(adjAttributionSource, inputSource)
+ if (!(recordingAllowed(attributionSource, inputSource)
|| inputSource == AUDIO_SOURCE_FM_TUNER
|| inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
|| inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
ALOGE("%s permission denied: recording not allowed for %s",
- __func__, adjAttributionSource.toString().c_str());
+ __func__, attributionSource.toString().c_str());
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
- bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+ bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
+ bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
|| inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
|| inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -674,11 +638,11 @@
}
if (inputSource == AUDIO_SOURCE_FM_TUNER
&& !canCaptureOutput
- && !captureTunerAudioInputAllowed(adjAttributionSource)) {
+ && !captureTunerAudioInputAllowed(attributionSource)) {
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
+ bool canCaptureHotword = captureHotwordAllowed(attributionSource);
if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
return binderStatusFromStatusT(PERMISSION_DENIED);
}
@@ -686,14 +650,14 @@
if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
&& !canCaptureHotword) {
ALOGE("%s: permission denied: hotword mode not allowed"
- " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
- if (!accessUltrasoundAllowed(adjAttributionSource)) {
+ if (!accessUltrasoundAllowed(attributionSource)) {
ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
- __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
}
@@ -708,7 +672,7 @@
AutoCallerClear acc;
// the audio_in_acoustics_t parameter is ignored by get_input()
status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
- adjAttributionSource, &config,
+ attributionSource, &config,
flags, &selectedDeviceId,
&inputType, &portId);
@@ -737,7 +701,7 @@
}
break;
case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
- if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+ if (!(modifyAudioRoutingAllowed(attributionSource)
|| ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
&& canInterceptCallAudio))) {
ALOGE("%s permission denied for remote submix capture", __func__);
@@ -760,7 +724,7 @@
}
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
- selectedDeviceId, adjAttributionSource,
+ selectedDeviceId, attributionSource,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
mAudioRecordClients.add(portId, client);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 281785e..09b6f3b 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1816,12 +1816,14 @@
void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
SensorPrivacyManager spm;
mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
+ (void)spm.addToggleSensorPrivacyListener(this);
spm.addSensorPrivacyListener(this);
}
void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
SensorPrivacyManager spm;
spm.removeSensorPrivacyListener(this);
+ spm.removeToggleSensorPrivacyListener(this);
}
bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2388b79..2a04658 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -203,6 +203,7 @@
status_t res;
std::vector<std::string> deviceIds;
+ std::unordered_map<std::string, std::set<std::string>> unavailPhysicalIds;
{
Mutex::Autolock l(mServiceLock);
@@ -233,7 +234,7 @@
ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
}
- deviceIds = mCameraProviderManager->getCameraDeviceIds();
+ deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
}
@@ -242,6 +243,12 @@
if (getCameraState(id8) == nullptr) {
onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
}
+ if (unavailPhysicalIds.count(cameraId) > 0) {
+ for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
+ String8 physicalId8 = String8(physicalId.c_str());
+ onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+ }
+ }
}
// Derive primary rear/front cameras, and filter their charactierstics.
@@ -335,7 +342,9 @@
int facing = -1;
int orientation = 0;
String8 cameraId8(cameraId.c_str());
- getDeviceVersion(cameraId8, /*out*/&facing, /*out*/&orientation);
+ int portraitRotation;
+ getDeviceVersion(cameraId8, /*overrideToPortrait*/false, /*out*/&portraitRotation,
+ /*out*/&facing, /*out*/&orientation);
if (facing == -1) {
ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
return;
@@ -495,7 +504,7 @@
if (state == nullptr) {
ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
- __FUNCTION__, id.string(), physicalId.string());
+ __FUNCTION__, physicalId.string(), id.string());
return;
}
@@ -675,7 +684,7 @@
return Status::ok();
}
-Status CameraService::getCameraInfo(int cameraId,
+Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
CameraInfo* cameraInfo) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
@@ -703,8 +712,9 @@
}
Status ret = Status::ok();
+ int portraitRotation;
status_t err = mCameraProviderManager->getCameraInfo(
- cameraIdStr.c_str(), cameraInfo);
+ cameraIdStr.c_str(), overrideToPortrait, &portraitRotation, cameraInfo);
if (err != OK) {
ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
"Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -739,7 +749,7 @@
}
Status CameraService::getCameraCharacteristics(const String16& cameraId,
- int targetSdkVersion, CameraMetadata* cameraInfo) {
+ int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
ATRACE_CALL();
if (!cameraInfo) {
ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
@@ -766,7 +776,7 @@
SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
cameraIdStr, targetSdkVersion);
status_t res = mCameraProviderManager->getCameraCharacteristics(
- cameraIdStr, overrideForPerfClass, cameraInfo);
+ cameraIdStr, overrideForPerfClass, cameraInfo, overrideToPortrait);
if (res != OK) {
if (res == NAME_NOT_FOUND) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -887,8 +897,8 @@
BasicClient::BasicClient::sCameraService = nullptr;
}
-std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
- int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId,
+ bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
ATRACE_CALL();
int deviceVersion = 0;
@@ -907,7 +917,8 @@
hardware::CameraInfo info;
if (facing) {
- res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
+ res = mCameraProviderManager->getCameraInfo(cameraId.string(), overrideToPortrait,
+ portraitRotation, &info);
if (res != OK) {
return std::make_pair(-1, IPCTransport::INVALID);
}
@@ -942,7 +953,8 @@
const std::optional<String16>& featureId, const String8& cameraId,
int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
- apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+ apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+ /*out*/sp<BasicClient>* client) {
// For HIDL devices
if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
// Create CameraClient based on device version reported by the HAL.
@@ -975,13 +987,16 @@
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, featureId,
cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
- servicePid, overrideForPerfClass);
+ servicePid, overrideForPerfClass, overrideToPortrait);
+ ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
+ overrideToPortrait);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
*client = new CameraDeviceClient(cameraService, tmp, packageName,
systemNativeClient, featureId, cameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass);
+ clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+ ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
}
return Status::ok();
}
@@ -1071,7 +1086,7 @@
sp<ICameraClient>{nullptr}, id, cameraId,
internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true, /*out*/ tmp)
).isOk()) {
ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
}
@@ -1587,6 +1602,7 @@
int clientUid,
int clientPid,
int targetSdkVersion,
+ bool overrideToPortrait,
/*out*/
sp<ICamera>* device) {
@@ -1597,7 +1613,8 @@
sp<Client> client = nullptr;
ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
- /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
+ /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
+ overrideToPortrait, /*out*/client);
if(!ret.isOk()) {
logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1672,6 +1689,7 @@
const String16& clientPackageName,
const std::optional<String16>& clientFeatureId,
int clientUid, int oomScoreOffset, int targetSdkVersion,
+ bool overrideToPortrait,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device) {
@@ -1725,7 +1743,7 @@
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
/*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
- targetSdkVersion, /*out*/client);
+ targetSdkVersion, overrideToPortrait, /*out*/client);
if(!ret.isOk()) {
logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1787,7 +1805,7 @@
int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
- /*out*/sp<CLIENT>& device) {
+ bool overrideToPortrait, /*out*/sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
bool isNonSystemNdk = false;
@@ -1886,8 +1904,10 @@
// give flashlight a chance to close devices if necessary.
mFlashlight->prepareDeviceOpen(cameraId);
+ int portraitRotation;
auto deviceVersionAndTransport =
- getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+ getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+ /*out*/&facing, /*out*/&orientation);
if (facing == -1) {
ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.string());
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1901,7 +1921,7 @@
clientFeatureId, cameraId, api1CameraId, facing, orientation,
clientPid, clientUid, getpid(),
deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
- /*out*/&tmp)).isOk()) {
+ overrideToPortrait, /*out*/&tmp)).isOk()) {
return ret;
}
client = static_cast<CLIENT*>(tmp.get());
@@ -1961,8 +1981,25 @@
// Set rotate-and-crop override behavior
if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+ } else if (overrideToPortrait && portraitRotation != 0) {
+ uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+ switch (portraitRotation) {
+ case 90:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+ break;
+ case 180:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+ break;
+ case 270:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+ break;
+ default:
+ ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+ break;
+ }
+ client->setRotateAndCropOverride(rotateAndCropMode);
} else {
- client->setRotateAndCropOverride(
+ client->setRotateAndCropOverride(
CameraServiceProxyWrapper::getRotateAndCropOverride(
clientPackageName, facing, multiuser_get_user_id(clientUid)));
}
@@ -2466,6 +2503,11 @@
ATRACE_CALL();
+ {
+ Mutex::Autolock lock(mServiceLock);
+ mDeviceState = newState;
+ }
+
mCameraProviderManager->notifyDeviceStateChange(newState);
return Status::ok();
@@ -2499,12 +2541,12 @@
for (auto& current : clients) {
if (current != nullptr) {
const auto basicClient = current->getValue();
- if (basicClient.get() != nullptr) {
- basicClient->setRotateAndCropOverride(
- CameraServiceProxyWrapper::getRotateAndCropOverride(
- basicClient->getPackageName(),
- basicClient->getCameraFacing(),
- multiuser_get_user_id(basicClient->getClientUid())));
+ if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
+ basicClient->setRotateAndCropOverride(
+ CameraServiceProxyWrapper::getRotateAndCropOverride(
+ basicClient->getPackageName(),
+ basicClient->getCameraFacing(),
+ multiuser_get_user_id(basicClient->getClientUid())));
}
}
}
@@ -2776,7 +2818,8 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- auto deviceVersionAndTransport = getDeviceVersion(id);
+ int portraitRotation;
+ auto deviceVersionAndTransport = getDeviceVersion(id, false, &portraitRotation);
if (deviceVersionAndTransport.first == -1) {
String8 msg = String8::format("Unknown camera ID %s", id.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -3261,13 +3304,13 @@
const String8& cameraIdStr,
int api1CameraId, int cameraFacing, int sensorOrientation,
int clientPid, uid_t clientUid,
- int servicePid) :
+ int servicePid, bool overrideToPortrait) :
CameraService::BasicClient(cameraService,
IInterface::asBinder(cameraClient),
clientPackageName, systemNativeClient, clientFeatureId,
cameraIdStr, cameraFacing, sensorOrientation,
clientPid, clientUid,
- servicePid),
+ servicePid, overrideToPortrait),
mCameraId(api1CameraId)
{
int callingPid = CameraThreadState::getCallingPid();
@@ -3297,7 +3340,7 @@
const String16& clientPackageName, bool nativeClient,
const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
- int servicePid):
+ int servicePid, bool overrideToPortrait):
mDestructionStarted(false),
mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -3305,6 +3348,7 @@
mClientPid(clientPid), mClientUid(clientUid),
mServicePid(servicePid),
mDisconnected(false), mUidIsTrusted(false),
+ mOverrideToPortrait(overrideToPortrait),
mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
mRemoteBinder(remoteCallback),
mOpsActive(false),
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f2d15ef..840e9b6 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -127,10 +127,10 @@
// ICameraService
virtual binder::Status getNumberOfCameras(int32_t type, int32_t* numCameras);
- virtual binder::Status getCameraInfo(int cameraId,
- hardware::CameraInfo* cameraInfo);
+ virtual binder::Status getCameraInfo(int cameraId, bool overrideToPortrait,
+ hardware::CameraInfo* cameraInfo) override;
virtual binder::Status getCameraCharacteristics(const String16& cameraId,
- int targetSdkVersion, CameraMetadata* cameraInfo);
+ int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
virtual binder::Status getCameraVendorTagDescriptor(
/*out*/
hardware::camera2::params::VendorTagDescriptor* desc);
@@ -141,13 +141,14 @@
virtual binder::Status connect(const sp<hardware::ICameraClient>& cameraClient,
int32_t cameraId, const String16& clientPackageName,
int32_t clientUid, int clientPid, int targetSdkVersion,
+ bool overrideToPortrait,
/*out*/
- sp<hardware::ICamera>* device);
+ sp<hardware::ICamera>* device) override;
virtual binder::Status connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
- int32_t clientUid, int scoreOffset, int targetSdkVersion,
+ int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -243,8 +244,9 @@
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
- std::pair<int, IPCTransport> getDeviceVersion(const String8& cameraId, int* facing = nullptr,
- int* orientation = nullptr);
+ std::pair<int, IPCTransport> getDeviceVersion(const String8& cameraId,
+ bool overrideToPortrait, int* portraitRotation,
+ int* facing = nullptr, int* orientation = nullptr);
/////////////////////////////////////////////////////////////////////
// Methods to be used in CameraService class tests only
@@ -282,6 +284,10 @@
return mRemoteBinder;
}
+ bool getOverrideToPortrait() const {
+ return mOverrideToPortrait;
+ }
+
// Disallows dumping over binder interface
virtual status_t dump(int fd, const Vector<String16>& args);
// Internal dump method to be called by CameraService
@@ -361,7 +367,8 @@
int sensorOrientation,
int clientPid,
uid_t clientUid,
- int servicePid);
+ int servicePid,
+ bool overrideToPortrait);
virtual ~BasicClient();
@@ -384,6 +391,7 @@
const pid_t mServicePid;
bool mDisconnected;
bool mUidIsTrusted;
+ bool mOverrideToPortrait;
mutable Mutex mAudioRestrictionLock;
int32_t mAudioRestriction;
@@ -473,7 +481,8 @@
int sensorOrientation,
int clientPid,
uid_t clientUid,
- int servicePid);
+ int servicePid,
+ bool overrideToPortrait);
~Client();
// return our camera client
@@ -843,7 +852,7 @@
int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
- /*out*/sp<CLIENT>& device);
+ bool overrideToPortrait, /*out*/sp<CLIENT>& device);
// Lock guarding camera service state
Mutex mServiceLock;
@@ -1258,7 +1267,7 @@
const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
int clientPid, uid_t clientUid, int servicePid,
std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
- bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
+ bool overrideForPerfClass, bool overrideToPortrait, /*out*/sp<BasicClient>* client);
status_t checkCameraAccess(const String16& opPackageName);
@@ -1355,6 +1364,9 @@
// Guard mInjectionInternalCamId and mInjectionInitPending.
Mutex mInjectionParametersLock;
+ // Track the folded/unfoled device state. 0 == UNFOLDED, 4 == FOLDED
+ int64_t mDeviceState;
+
void updateTorchUidMapLocked(const String16& cameraId, int uid);
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 20bf73d..0887ced 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -61,11 +61,13 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideForPerfClass):
+ bool overrideForPerfClass,
+ bool overrideToPortrait):
Camera2ClientBase(cameraService, cameraClient, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
+ clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ /*legacyClient*/ true),
mParameters(api1CameraId, cameraFacing)
{
ATRACE_CALL();
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 8081efa..9c540a4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -107,7 +107,8 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideForPerfClass);
+ bool overrideForPerfClass,
+ bool overrideToPortrait);
virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 2daacd1..74423e5 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -59,6 +59,8 @@
m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+
+ mLastFocalLength = l.mParameters.params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
}
}
@@ -92,9 +94,32 @@
client->notifyRequestId(mCurrentRequestId);
}
+ processLensState(frame.mMetadata, client);
+
return FrameProcessorBase::processSingleFrame(frame, device);
}
+void FrameProcessor::processLensState(const CameraMetadata &frame,
+ const sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ camera_metadata_ro_entry_t entry;
+
+ entry = frame.find(ANDROID_LENS_FOCAL_LENGTH);
+ if (entry.count == 0) {
+ return;
+ }
+
+ if (fabs(entry.data.f[0] - mLastFocalLength) > 0.001f) {
+ SharedParameters::Lock l(client->getParameters());
+ l.mParameters.params.setFloat(
+ CameraParameters::KEY_FOCAL_LENGTH,
+ entry.data.f[0]);
+ l.mParameters.paramsFlattened = l.mParameters.params.flatten();
+
+ mLastFocalLength = entry.data.f[0];
+ }
+}
+
status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
const sp<Camera2Client> &client) {
status_t res = BAD_VALUE;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index bb985f6..6c8d221 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -57,6 +57,9 @@
virtual bool processSingleFrame(CaptureResult &frame,
const sp<FrameProducer> &device);
+ void processLensState(const CameraMetadata &frame,
+ const sp<Camera2Client> &client);
+
status_t processFaceDetect(const CameraMetadata &frame,
const sp<Camera2Client> &client);
@@ -110,6 +113,9 @@
// Emit FaceDetection event to java if faces changed
void callbackFaceDetection(const sp<Camera2Client>& client,
const camera_frame_metadata &metadata);
+
+ // Track most recent focal length sent by the camera device
+ float mLastFocalLength;
};
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 2a8a103..dfc451b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -66,7 +66,8 @@
int sensorOrientation,
int clientPid,
uid_t clientUid,
- int servicePid) :
+ int servicePid,
+ bool overrideToPortrait) :
BasicClient(cameraService,
IInterface::asBinder(remoteCallback),
clientPackageName,
@@ -77,7 +78,8 @@
sensorOrientation,
clientPid,
clientUid,
- servicePid),
+ servicePid,
+ overrideToPortrait),
mRemoteCallback(remoteCallback) {
// We don't need it for API2 clients, but Camera2ClientBase requires it.
(void) api1CameraId;
@@ -96,10 +98,11 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideForPerfClass) :
+ bool overrideForPerfClass,
+ bool overrideToPortrait) :
Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass),
+ clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45915ba..6bb64d6 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -58,7 +58,8 @@
int sensorOrientation,
int clientPid,
uid_t clientUid,
- int servicePid);
+ int servicePid,
+ bool overrideToPortrait);
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
};
@@ -187,7 +188,8 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideForPerfClass);
+ bool overrideForPerfClass,
+ bool overrideToPortrait);
virtual ~CameraDeviceClient();
virtual status_t initialize(sp<CameraProviderManager> manager,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 9ea1093..8edb64a 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -56,7 +56,8 @@
IInterface::asBinder(remoteCallback),
// (v)ndk doesn't have offline session support
clientPackageName, /*overridePackageName*/false, clientFeatureId,
- cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
+ cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
+ /*overrideToPortrait*/false),
mRemoteCallback(remoteCallback), mOfflineSession(session),
mCompositeStreamMap(offlineCompositeStreamMap) {}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 71965f2..cd57299 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1161,11 +1161,13 @@
inputFrame.fileFd = -1;
// Fill in HEIC header
- uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
- CameraBlob *blobHeader = (CameraBlob *)header;
// Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
- blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
- blobHeader->blobSizeBytes = fSize;
+ uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+ CameraBlob blobHeader = {
+ .blobId = static_cast<CameraBlobId>(0x00FE),
+ .blobSizeBytes = static_cast<int32_t>(fSize)
+ };
+ memcpy(header, &blobHeader, sizeof(CameraBlob));
res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
if (res != OK) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 7d98a0b..ad24392 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -60,10 +60,11 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
+ bool overrideToPortrait,
bool legacyClient):
TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid),
+ clientUid, servicePid, overrideToPortrait),
mSharedCameraCallbacks(remoteCallback),
mDeviceActive(false), mApi1CameraId(api1CameraId)
{
@@ -117,12 +118,12 @@
case IPCTransport::HIDL:
mDevice =
new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
- mLegacyClient);
+ TClientBase::mOverrideToPortrait, mLegacyClient);
break;
case IPCTransport::AIDL:
mDevice =
new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
- mLegacyClient);
+ TClientBase::mOverrideToPortrait, mLegacyClient);
break;
default:
ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index e51d25d..d2dcdb1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -59,6 +59,7 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
+ bool overrideToPortrait,
bool legacyClient = false);
virtual ~Camera2ClientBase();
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index cd23250..3aab0b1 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -197,12 +197,17 @@
return std::make_pair(systemCameraCount, publicCameraCount);
}
-std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds(std::unordered_map<
+ std::string, std::set<std::string>>* unavailablePhysicalIds) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
std::vector<std::string> deviceIds;
for (auto& provider : mProviders) {
for (auto& id : provider->mUniqueCameraIds) {
deviceIds.push_back(id);
+ if (unavailablePhysicalIds != nullptr &&
+ provider->mUnavailablePhysicalCameras.count(id) > 0) {
+ (*unavailablePhysicalIds)[id] = provider->mUnavailablePhysicalCameras.at(id);
+ }
}
}
return deviceIds;
@@ -318,13 +323,13 @@
}
status_t CameraProviderManager::getCameraInfo(const std::string &id,
- hardware::CameraInfo* info) const {
+ bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
- return deviceInfo->getCameraInfo(info);
+ return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
}
status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -356,9 +361,11 @@
}
status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
- bool overrideForPerfClass, CameraMetadata* characteristics) const {
+ bool overrideForPerfClass, CameraMetadata* characteristics,
+ bool overrideToPortrait) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
- return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
+ return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
+ overrideToPortrait);
}
status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -843,9 +850,6 @@
void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
const std::vector<std::string> &devices) {
-
- sp<StatusListener> listener = mManager->getStatusListener();
-
for (auto& device : devices) {
std::string id;
status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
@@ -860,38 +864,22 @@
mProviderName.c_str(), mDevices.size());
// Process cached status callbacks
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
- std::make_unique<std::vector<CameraStatusInfoT>>();
{
std::lock_guard<std::mutex> lock(mInitLock);
for (auto& statusInfo : mCachedStatus) {
std::string id, physicalId;
- status_t res = OK;
if (statusInfo.isPhysicalCameraStatus) {
- res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+ physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
} else {
- res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
- }
- if (res == OK) {
- cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
- id.c_str(), physicalId.c_str(), statusInfo.status);
+ cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
}
}
mCachedStatus.clear();
mInitialized = true;
}
-
- // The cached status change callbacks cannot be fired directly from this
- // function, due to same-thread deadlock trying to acquire mInterfaceMutex
- // twice.
- if (listener != nullptr) {
- mInitialStatusCallbackFuture = std::async(std::launch::async,
- &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
- listener, std::move(cachedStatus));
- }
}
CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
@@ -1961,6 +1949,7 @@
for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
if ((*it)->mId == id) {
mUniqueCameraIds.erase(id);
+ mUnavailablePhysicalCameras.erase(id);
if ((*it)->isAPI1Compatible()) {
mUniqueAPI1CompatibleCameraIds.erase(std::remove(
mUniqueAPI1CompatibleCameraIds.begin(),
@@ -2031,7 +2020,9 @@
dprintf(fd, " Has a flash unit: %s\n",
device->hasFlashUnit() ? "true" : "false");
hardware::CameraInfo info;
- status_t res = device->getCameraInfo(&info);
+ int portraitRotation;
+ status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+ &info);
if (res != OK) {
dprintf(fd, " <Error reading camera info: %s (%d)>\n",
strerror(-res), res);
@@ -2041,7 +2032,8 @@
dprintf(fd, " Orientation: %d\n", info.orientation);
}
CameraMetadata info2;
- res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2);
+ res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
+ /*overrideToPortrait*/true);
if (res == INVALID_OPERATION) {
dprintf(fd, " API2 not directly supported\n");
} else if (res != OK) {
@@ -2228,6 +2220,15 @@
return BAD_VALUE;
}
+ if (mUnavailablePhysicalCameras.count(cameraId) == 0) {
+ mUnavailablePhysicalCameras.emplace(cameraId, std::set<std::string>{});
+ }
+ if (newStatus != CameraDeviceStatus::PRESENT) {
+ mUnavailablePhysicalCameras[cameraId].insert(physicalCameraDeviceName);
+ } else {
+ mUnavailablePhysicalCameras[cameraId].erase(physicalCameraDeviceName);
+ }
+
*id = cameraId;
*physicalId = physicalCameraDeviceName.c_str();
return OK;
@@ -2286,20 +2287,6 @@
}
}
-void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
- sp<StatusListener> listener,
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
- for (auto& statusInfo : *cachedStatus) {
- if (statusInfo.isPhysicalCameraStatus) {
- listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
- String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
- } else {
- listener->onDeviceStatusChanged(
- String8(statusInfo.cameraId.c_str()), statusInfo.status);
- }
- }
-}
-
CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
const metadata_vendor_id_t tagId, const std::string &id,
uint16_t minorVersion,
@@ -2318,6 +2305,7 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
+ bool overrideToPortrait, int *portraitRotation,
hardware::CameraInfo *info) const {
if (info == nullptr) return BAD_VALUE;
@@ -2348,6 +2336,17 @@
return NAME_NOT_FOUND;
}
+ if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+ *portraitRotation = 90;
+ if (info->facing == hardware::CAMERA_FACING_FRONT) {
+ info->orientation = (360 + info->orientation - 90) % 360;
+ } else {
+ info->orientation = (360 + info->orientation + 90) % 360;
+ }
+ } else {
+ *portraitRotation = 0;
+ }
+
return OK;
}
bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
@@ -2373,7 +2372,7 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
- bool overrideForPerfClass, CameraMetadata *characteristics) const {
+ bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
if (characteristics == nullptr) return BAD_VALUE;
if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2382,6 +2381,35 @@
*characteristics = mCameraCharacteristics;
}
+ if (overrideToPortrait) {
+ const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
+ const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
+ if (lensFacingEntry.count > 0 && sensorOrientationEntry.count > 0) {
+ uint8_t lensFacing = lensFacingEntry.data.u8[0];
+ int32_t sensorOrientation = sensorOrientationEntry.data.i32[0];
+ int32_t newSensorOrientation = sensorOrientation;
+
+ if (sensorOrientation == 0 || sensorOrientation == 180) {
+ if (lensFacing == ANDROID_LENS_FACING_FRONT) {
+ newSensorOrientation = (360 + sensorOrientation - 90) % 360;
+ } else if (lensFacing == ANDROID_LENS_FACING_BACK) {
+ newSensorOrientation = (360 + sensorOrientation + 90) % 360;
+ }
+ }
+
+ if (newSensorOrientation != sensorOrientation) {
+ ALOGV("%s: Update ANDROID_SENSOR_ORIENTATION for lens facing %d "
+ "from %d to %d", __FUNCTION__, lensFacing, sensorOrientation,
+ newSensorOrientation);
+ characteristics->update(ANDROID_SENSOR_ORIENTATION, &newSensorOrientation, 1);
+ }
+ }
+
+ if (characteristics->exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+ characteristics->erase(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+ }
+ }
+
return OK;
}
@@ -2649,9 +2677,6 @@
}
CameraProviderManager::ProviderInfo::~ProviderInfo() {
- if (mInitialStatusCallbackFuture.valid()) {
- mInitialStatusCallbackFuture.wait();
- }
// Destruction of ProviderInfo is only supposed to happen when the respective
// CameraProvider interface dies, so do not unregister callbacks.
}
@@ -2714,10 +2739,12 @@
}
status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
- bool overrideForPerfClass, CameraMetadata* characteristics) const {
+ bool overrideForPerfClass, CameraMetadata* characteristics,
+ bool overrideToPortrait) const {
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo != nullptr) {
- return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
+ return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
+ overrideToPortrait);
}
// Find hidden physical camera characteristics
@@ -2752,7 +2779,9 @@
combo.push_back(deviceId);
hardware::CameraInfo info;
- status_t res = deviceInfo->getCameraInfo(&info);
+ int portraitRotation;
+ status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+ &info);
if (res != OK) {
ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index d049aff..e8d9a37 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -23,7 +23,6 @@
#include <set>
#include <string>
#include <mutex>
-#include <future>
#include <camera/camera2/ConcurrentCamera.h>
#include <camera/CameraParameters2.h>
@@ -220,7 +219,14 @@
*/
std::pair<int, int> getCameraCount() const;
- std::vector<std::string> getCameraDeviceIds() const;
+ /**
+ * Upon the function return, if unavailablePhysicalIds is not nullptr, it
+ * will contain all of the unavailable physical camera Ids represented in
+ * the form of:
+ * {[logicalCamera, {physicalCamera1, physicalCamera2, ...}], ...}.
+ */
+ std::vector<std::string> getCameraDeviceIds(std::unordered_map<
+ std::string, std::set<std::string>>* unavailablePhysicalIds = nullptr) const;
/**
* Retrieve the number of API1 compatible cameras; these are internal and
@@ -251,14 +257,15 @@
* Return the old camera API camera info
*/
status_t getCameraInfo(const std::string &id,
- hardware::CameraInfo* info) const;
+ bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
/**
* Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
* not have a v3 or newer HAL version.
*/
status_t getCameraCharacteristics(const std::string &id,
- bool overrideForPerfClass, CameraMetadata* characteristics) const;
+ bool overrideForPerfClass, CameraMetadata* characteristics,
+ bool overrideToPortrait) const;
status_t isConcurrentSessionConfigurationSupported(
const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -560,13 +567,16 @@
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
- virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+ virtual status_t getCameraInfo(bool overrideToPortrait,
+ int *portraitRotation,
+ hardware::CameraInfo *info) const = 0;
virtual bool isAPI1Compatible() const = 0;
virtual status_t dumpState(int fd) = 0;
virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
- CameraMetadata *characteristics) const {
+ CameraMetadata *characteristics, bool overrideToPortrait) {
(void) overrideForPerfClass;
(void) characteristics;
+ (void) overrideToPortrait;
return INVALID_OPERATION;
}
virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
@@ -607,6 +617,7 @@
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
+ std::unordered_map<std::string, std::set<std::string>> mUnavailablePhysicalCameras;
int mUniqueDeviceCount;
std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
// The initial public camera IDs published by the camera provider.
@@ -622,12 +633,15 @@
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
- virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+ virtual status_t getCameraInfo(bool overrideToPortrait,
+ int *portraitRotation,
+ hardware::CameraInfo *info) const override;
virtual bool isAPI1Compatible() const override;
virtual status_t dumpState(int fd) = 0;
virtual status_t getCameraCharacteristics(
bool overrideForPerfClass,
- CameraMetadata *characteristics) const override;
+ CameraMetadata *characteristics,
+ bool overrideToPortrait) override;
virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
CameraMetadata *characteristics) const override;
virtual status_t isSessionConfigurationSupported(
@@ -715,8 +729,6 @@
std::vector<CameraStatusInfoT> mCachedStatus;
// End of scope for mInitLock
- std::future<void> mInitialStatusCallbackFuture;
-
std::unique_ptr<ProviderInfo::DeviceInfo>
virtual initializeDeviceInfo(
const std::string &name, const metadata_vendor_id_t tagId,
@@ -724,9 +736,6 @@
virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
- void notifyInitialStatusChange(sp<StatusListener> listener,
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
-
std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
// Parse provider instance name for type and id
@@ -830,7 +839,7 @@
const hardware::camera::common::V1_0::TorchModeStatus&);
status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
- CameraMetadata* characteristics) const;
+ CameraMetadata* characteristics, bool overrideToPortrait) const;
void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index ef68f28..2c035de 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -759,7 +759,8 @@
bool overrideForPerfClass =
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
- res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+ res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+ /*overrideToPortrait*/true);
if (res != OK) {
return res;
}
@@ -767,7 +768,7 @@
[this](const String8 &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
- &physicalDeviceInfo);
+ &physicalDeviceInfo, /*overrideToPortrait*/true);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d60565f..1df6ec4 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -919,7 +919,8 @@
bool overrideForPerfClass =
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
- res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+ res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+ /*overrideToPortrait*/true);
if (res != OK) {
return res;
}
@@ -927,7 +928,7 @@
[this](const String8 &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
- &physicalDeviceInfo);
+ &physicalDeviceInfo, /*overrideToPortrait*/true);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 445b397..5e7fe7f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -73,7 +73,8 @@
namespace android {
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool legacyClient):
+Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient):
mId(id),
mLegacyClient(legacyClient),
mOperatingMode(NO_MODE),
@@ -94,7 +95,8 @@
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
mLastTemplateId(-1),
mNeedFixupMonochromeTags(false),
- mOverrideForPerfClass(overrideForPerfClass)
+ mOverrideForPerfClass(overrideForPerfClass),
+ mOverrideToPortrait(overrideToPortrait)
{
ATRACE_CALL();
ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -166,7 +168,7 @@
/** Start up request queue thread */
mRequestThread = createNewRequestThread(
this, mStatusTracker, mInterface, sessionParamKeys,
- mUseHalBufManager, mSupportCameraMute);
+ mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -2887,7 +2889,8 @@
sp<StatusTracker> statusTracker,
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) :
+ bool supportCameraMute,
+ bool overrideToPortrait) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
@@ -2916,7 +2919,8 @@
mSessionParamKeys(sessionParamKeys),
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager),
- mSupportCameraMute(supportCameraMute){
+ mSupportCameraMute(supportCameraMute),
+ mOverrideToPortrait(overrideToPortrait) {
mStatusId = statusTracker->addComponent("RequestThread");
}
@@ -3581,9 +3585,9 @@
mPrevTriggers = triggerCount;
// Do not override rotate&crop for stream configurations that include
- // SurfaceViews(HW_COMPOSER) output. The display rotation there will be
- // compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
- bool rotateAndCropChanged = mComposerOutput ? false :
+ // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+ // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+ bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
overrideAutoRotateAndCrop(captureRequest);
bool testPatternChanged = overrideTestPattern(captureRequest);
@@ -4629,6 +4633,15 @@
const sp<CaptureRequest> &request) {
ATRACE_CALL();
+ if (mOverrideToPortrait) {
+ Mutex::Autolock l(mTriggerMutex);
+ uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
+ &rotateAndCrop_u8, 1);
+ return true;
+ }
+
if (request->mRotateAndCropAuto) {
Mutex::Autolock l(mTriggerMutex);
CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 7dff809..bc759c5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -82,7 +82,8 @@
friend class AidlCamera3Device;
public:
- explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
+ explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient = false);
virtual ~Camera3Device();
// Delete and optionally close native handles and clear the input vector afterward
@@ -810,7 +811,8 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute);
+ bool supportCameraMute,
+ bool overrideToPortrait);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -1090,6 +1092,7 @@
const bool mUseHalBufManager;
const bool mSupportCameraMute;
+ const bool mOverrideToPortrait;
};
virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1097,7 +1100,8 @@
sp<HalInterface> /*interface*/,
const Vector<int32_t>& /*sessionParamKeys*/,
bool /*useHalBufManager*/,
- bool /*supportCameraMute*/) = 0;
+ bool /*supportCameraMute*/,
+ bool /*overrideToPortrait*/) = 0;
sp<RequestThread> mRequestThread;
@@ -1367,6 +1371,10 @@
// performance class.
bool mOverrideForPerfClass;
+ // Whether the camera framework overrides the device characteristics for
+ // app compatibility reasons.
+ bool mOverrideToPortrait;
+
// The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
nsecs_t mMinExpectedDuration = 0;
// Whether the camera device runs at fixed frame rate based on AE_MODE and
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index e16982b..6569395 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -521,7 +521,7 @@
if (result->partial_result != 0)
request.resultExtras.partialResultCount = result->partial_result;
- if ((result->result != nullptr) && !states.legacyClient) {
+ if ((result->result != nullptr) && !states.legacyClient && !states.overrideToPortrait) {
camera_metadata_ro_entry entry;
auto ret = find_camera_metadata_ro_entry(result->result,
ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 8c71c2b..019c8a8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -107,6 +107,7 @@
bool legacyClient;
nsecs_t& minFrameDuration;
bool& isFixedFps;
+ bool overrideToPortrait;
};
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index b3cb178..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -69,7 +69,9 @@
}
// Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
- nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+ // Because the code between here and queueBuffer() takes time to execute, make sure the
+ // presentationInterval is slightly shorter than readoutInterval.
+ nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
}
currentTime = systemTime();
}
- ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+ ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
", timestamp %" PRId64, __FUNCTION__, readoutInterval,
- currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+ mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
mPendingBuffers.pop();
queueBufferToClientLocked(buffer, currentTime);
return true;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index cb9690c..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -86,6 +86,7 @@
static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+ static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index a02e5f6..9cdd365 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -142,13 +142,13 @@
ch : // pillarbox or 1:1, full height
cw / mRotateAspect; // letterbox, not full height
switch (rotateMode) {
- case ANDROID_SCALER_ROTATE_AND_CROP_90:
+ case ANDROID_SCALER_ROTATE_AND_CROP_270:
transformMat[1] = -rw / ch; // +y -> -x
transformMat[2] = rh / cw; // +x -> +y
xShift = (cw + rw) / 2; // left edge of crop to right edge of rotated
yShift = (ch - rh) / 2; // top edge of crop to top edge of rotated
break;
- case ANDROID_SCALER_ROTATE_AND_CROP_270:
+ case ANDROID_SCALER_ROTATE_AND_CROP_90:
transformMat[1] = rw / ch; // +y -> +x
transformMat[2] = -rh / cw; // +x -> -y
xShift = (cw - rw) / 2; // left edge of crop to left edge of rotated
@@ -271,13 +271,13 @@
rx = cx + (cw - rw) / 2;
ry = cy + (ch - rh) / 2;
switch (rotateMode) {
- case ANDROID_SCALER_ROTATE_AND_CROP_90:
+ case ANDROID_SCALER_ROTATE_AND_CROP_270:
transformMat[1] = ch / rw; // +y -> +x
transformMat[2] = -cw / rh; // +x -> -y
xShift = -(cw - rw) / 2; // left edge of rotated to left edge of cropped
yShift = ry - cy + ch; // top edge of rotated to bottom edge of cropped
break;
- case ANDROID_SCALER_ROTATE_AND_CROP_270:
+ case ANDROID_SCALER_ROTATE_AND_CROP_90:
transformMat[1] = -ch / rw; // +y -> -x
transformMat[2] = cw / rh; // +x -> +y
xShift = (cw + rw) / 2; // left edge of rotated to left edge of cropped
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index ec28d31..1e103f2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -163,8 +163,9 @@
}
AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
- bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
- mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+ bool overrideToPortrait, bool legacyClient) :
+ Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+ mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
}
status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
@@ -193,7 +194,8 @@
SET_ERR("Session iface returned is null");
return INVALID_OPERATION;
}
- res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+ res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+ mOverrideToPortrait);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
@@ -207,7 +209,8 @@
for (auto& physicalId : physicalCameraIds) {
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
- physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+ physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+ /*overrideToPortrait*/true);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -372,8 +375,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+ mOverrideToPortrait}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -414,8 +417,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+ mOverrideToPortrait}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -1408,9 +1411,10 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) :
+ bool supportCameraMute,
+ bool overrideToPortrait) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute) {}
+ supportCameraMute, overrideToPortrait) {}
status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -1579,9 +1583,10 @@
sp<Camera3Device::HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) {
+ bool supportCameraMute,
+ bool overrideToPortrait) {
return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute);
+ useHalBufManager, supportCameraMute, overrideToPortrait);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index fd66661..630985f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,7 @@
using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
class AidlCameraDeviceCallbacks;
friend class AidlCameraDeviceCallbacks;
- explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+ explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient = false);
virtual ~AidlCamera3Device() { }
@@ -174,7 +174,8 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute);
+ bool supportCameraMute,
+ bool overrideToPortrait);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -259,7 +260,8 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) override;
+ bool supportCameraMute,
+ bool overrideToPortrait) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 8ff0b07..9ce0622 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -124,8 +124,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+ /*overrideToPortrait*/false}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -170,8 +170,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+ /*overrideToPortrait*/false}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 9557692..44c60cf 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -162,7 +162,8 @@
return res;
}
- res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+ res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+ mOverrideToPortrait);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
@@ -176,7 +177,8 @@
for (auto& physicalId : physicalCameraIds) {
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
- physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+ physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+ /*overrideToPortrait*/true);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -363,7 +365,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+ mResultMetadataQueue
};
//HidlCaptureOutputStates hidlStates {
@@ -425,7 +428,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+ mResultMetadataQueue
};
for (const auto& result : results) {
@@ -472,7 +476,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+ mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -698,9 +703,10 @@
sp<Camera3Device::HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) {
+ bool supportCameraMute,
+ bool overrideToPortrait) {
return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute);
+ useHalBufManager, supportCameraMute, overrideToPortrait);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -1693,9 +1699,10 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) :
+ bool supportCameraMute,
+ bool overrideToPortrait) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute) {}
+ supportCameraMute, overrideToPortrait) {}
status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 2e98fe0..2bd4660 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,8 +31,9 @@
public Camera3Device {
public:
- explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
- bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+ explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
+ legacyClient) { }
virtual ~HidlCamera3Device() {}
@@ -175,7 +176,8 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute);
+ bool supportCameraMute,
+ bool overrideToPortrait);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -222,7 +224,8 @@
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
- bool supportCameraMute) override;
+ bool supportCameraMute,
+ bool overrideToPortrait) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 2b4f8a1..c7f8fa1 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,8 +105,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+ /*overrideToPortrait*/false}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -146,8 +146,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+ /*overrideToPortrait*/false}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -182,8 +182,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
- mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+ /*overrideToPortrait*/false}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 65a0300..259e8a5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -65,7 +65,8 @@
HStatus status = HStatus::NO_ERROR;
binder::Status serviceRet =
mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraMetadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ &cameraMetadata);
HCameraMetadata hidlMetadata;
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
@@ -116,7 +117,8 @@
binder::Status serviceRet = mAidlICameraService->connectDevice(
callbacks, String16(cameraId.c_str()), String16(""), {},
hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&deviceRemote);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ /*out*/&deviceRemote);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index d3377f4..ae4d5dd 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -31,47 +31,48 @@
std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
{30, {
ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+ ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
ANDROID_CONTROL_ZOOM_RATIO_RANGE,
ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
- ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
} },
{31, {
- ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
- ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
- ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
- ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+ ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+ ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+ ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
ANDROID_SENSOR_INFO_BINNING_FACTOR,
+ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
} },
{32, {
ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
} },
{33, {
- ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
ANDROID_AUTOMOTIVE_LENS_FACING,
ANDROID_AUTOMOTIVE_LOCATION,
+ ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
- ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
- ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+ ANDROID_SENSOR_READOUT_TIMESTAMP,
} },
};
@@ -81,9 +82,9 @@
*/
std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
{30, {
+ ANDROID_CONTROL_EXTENDED_SCENE_MODE,
ANDROID_CONTROL_ZOOM_RATIO,
ANDROID_SCALER_ROTATE_AND_CROP,
- ANDROID_CONTROL_EXTENDED_SCENE_MODE,
} },
{31, {
ANDROID_SENSOR_PIXEL_MODE,
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 97d7bf4..09f8eb6 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -229,11 +229,11 @@
mCameraService->getCameraVendorTagCache(&cache);
CameraInfo cameraInfo;
- mCameraService->getCameraInfo(cameraId, &cameraInfo);
+ mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
CameraMetadata metadata;
mCameraService->getCameraCharacteristics(cameraIdStr,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
}
void CameraFuzzer::invokeCameraSound() {
@@ -320,7 +320,8 @@
rc = mCameraService->connect(this, cameraId, String16(),
android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ &cameraDevice);
if (!rc.isOk()) {
// camera not connected
return;
@@ -534,7 +535,8 @@
sp<hardware::camera2::ICameraDeviceUser> device;
mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ &device);
if (device == nullptr) {
continue;
}
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index e9f6979..2f55def 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -102,23 +102,57 @@
sp<device::V3_2::ICameraDevice> mDeviceInterface;
hardware::hidl_vec<common::V1_0::VendorTagSection> mVendorTagSections;
+ // Whether to call a physical camera unavailable callback upon setCallback
+ bool mHasPhysicalCameraUnavailableCallback;
+ hardware::hidl_string mLogicalCameraId;
+ hardware::hidl_string mUnavailablePhysicalCameraId;
+
TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection) :
mDeviceNames(devices),
mDeviceInterface(new TestDeviceInterface(devices)),
- mVendorTagSections (vendorSection) {}
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(false) {}
TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
android::hardware::hidl_vec<uint8_t> chars) :
mDeviceNames(devices),
mDeviceInterface(new TestDeviceInterface(devices, chars)),
- mVendorTagSections (vendorSection) {}
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(false) {}
+
+ TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
+ const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
+ android::hardware::hidl_vec<uint8_t> chars,
+ const hardware::hidl_string& logicalCameraId,
+ const hardware::hidl_string& unavailablePhysicalCameraId) :
+ mDeviceNames(devices),
+ mDeviceInterface(new TestDeviceInterface(devices, chars)),
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(true),
+ mLogicalCameraId(logicalCameraId),
+ mUnavailablePhysicalCameraId(unavailablePhysicalCameraId) {}
virtual hardware::Return<Status> setCallback(
const sp<provider::V2_4::ICameraProviderCallback>& callbacks) override {
mCalledCounter[SET_CALLBACK]++;
mCallbacks = callbacks;
+ if (mHasPhysicalCameraUnavailableCallback) {
+ auto cast26 = provider::V2_6::ICameraProviderCallback::castFrom(callbacks);
+ if (!cast26.isOk()) {
+ ADD_FAILURE() << "Failed to cast ICameraProviderCallback to V2_6";
+ } else {
+ sp<provider::V2_6::ICameraProviderCallback> callback26 = cast26;
+ if (callback26 == nullptr) {
+ ADD_FAILURE() << "V2_6::ICameraProviderCallback is null after conversion";
+ } else {
+ callback26->physicalCameraDeviceStatusChange(mLogicalCameraId,
+ mUnavailablePhysicalCameraId,
+ android::hardware::camera::common::V1_0::CameraDeviceStatus::NOT_PRESENT);
+ }
+ }
+ }
return hardware::Return<Status>(Status::OK);
}
@@ -266,12 +300,16 @@
};
struct TestStatusListener : public CameraProviderManager::StatusListener {
+ int mPhysicalCameraStatusChangeCount = 0;
+
~TestStatusListener() {}
void onDeviceStatusChanged(const String8 &,
CameraDeviceStatus) override {}
void onDeviceStatusChanged(const String8 &, const String8 &,
- CameraDeviceStatus) override {}
+ CameraDeviceStatus) override {
+ mPhysicalCameraStatusChangeCount++;
+ }
void onTorchStatusChanged(const String8 &,
TorchModeStatus) override {}
void onTorchStatusChanged(const String8 &,
@@ -634,3 +672,46 @@
ASSERT_EQ(deviceCount, deviceNames.size()) <<
"Unexpected amount of camera devices";
}
+
+// Test that CameraProviderManager does not trigger
+// onDeviceStatusChanged(NOT_PRESENT) for physical camera before initialize()
+// returns.
+TEST(CameraProviderManagerTest, PhysicalCameraAvailabilityCallbackRaceTest) {
+ std::vector<hardware::hidl_string> deviceNames;
+ deviceNames.push_back("device@3.2/test/0");
+ hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+
+ sp<CameraProviderManager> providerManager = new CameraProviderManager();
+ sp<TestStatusListener> statusListener = new TestStatusListener();
+ TestInteractionProxy serviceProxy;
+
+ android::hardware::hidl_vec<uint8_t> chars;
+ CameraMetadata meta;
+ int32_t charKeys[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES };
+ meta.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, charKeys,
+ sizeof(charKeys) / sizeof(charKeys[0]));
+ uint8_t capabilities[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA };
+ meta.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities,
+ sizeof(capabilities)/sizeof(capabilities[0]));
+ uint8_t physicalCameraIds[] = { '2', '\0', '3', '\0' };
+ meta.update(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, physicalCameraIds,
+ sizeof(physicalCameraIds)/sizeof(physicalCameraIds[0]));
+ camera_metadata_t* metaBuffer = const_cast<camera_metadata_t*>(meta.getAndLock());
+ chars.setToExternal(reinterpret_cast<uint8_t*>(metaBuffer),
+ get_camera_metadata_size(metaBuffer));
+
+ sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
+ vendorSection, chars, "device@3.2/test/0", "2");
+ serviceProxy.setProvider(provider);
+
+ status_t res = providerManager->initialize(statusListener, &serviceProxy);
+ ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+ ASSERT_EQ(statusListener->mPhysicalCameraStatusChangeCount, 0)
+ << "Unexpected physical camera status change callback upon provider init.";
+
+ std::unordered_map<std::string, std::set<std::string>> unavailablePhysicalIds;
+ auto cameraIds = providerManager->getCameraDeviceIds(&unavailablePhysicalIds);
+ ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
+ << "Unavailable physical camera Ids not set properly.";
+}
diff --git a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
index 3c187cd..9f86526 100644
--- a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
@@ -195,6 +195,7 @@
// Round-trip results can't be exact since we've gone from a large int range -> small int range
// and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
+
e = result.find(ANDROID_CONTROL_AE_REGIONS);
EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
@@ -209,11 +210,11 @@
EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
auto full_landmarks = std::vector<int32_t> {
- full_crop[0], full_crop[1] + full_crop[3],
full_crop[0] + full_crop[2], full_crop[1],
- full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
+ full_crop[0], full_crop[1] + full_crop[3],
+ full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
- full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
+ full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
};
e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
@@ -286,7 +287,6 @@
// Round-trip results can't be exact since we've gone from a large int range -> small int range
// and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
-
e = result.find(ANDROID_CONTROL_AE_REGIONS);
EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
@@ -301,11 +301,11 @@
EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
auto full_landmarks = std::vector<int32_t> {
- full_crop[0] + full_crop[2], full_crop[1],
full_crop[0], full_crop[1] + full_crop[3],
- full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
+ full_crop[0] + full_crop[2], full_crop[1],
+ full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
- full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
+ full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
};
e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");