Merge "Camera: Use originalFormat for getMaxPreviewFps" into udc-dev
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 5b1bd91..cd5d354 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,53 +44,5 @@
],
"file_patterns": ["(?i)drm|crypto"]
}
- ],
-
- "platinum-postsubmit": [
- // runs regularly, independent of changes in this tree.
- // signals if changes elsewhere break media functionality
- // @FlakyTest: in staged-postsubmit, but not postsubmit
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.EncodeDecodeTest"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
- },
- {
- "exclude-annotation": "androidx.test.filters.FlakyTest"
- }
- ]
- }
- ],
-
- "staged-platinum-postsubmit": [
- // runs every four hours
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.EncodeDecodeTest"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
- }
- ]
- }
]
-
- // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
- // platinum-postsubmit once issues in cuttlefish are fixed
}
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 77296a4..3e4247b 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -29,6 +29,14 @@
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
namespace android {
// codecname set and passed in as a compile flag from Android.bp
@@ -726,6 +734,24 @@
}
}
+void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
+ mSignalledError = true;
+ work->result = error;
+ work->workletsProcessed = 1u;
+}
+
+bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
+ if (size > mTmpFrameBufferSize) {
+ mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+ if (mTmpFrameBuffer == nullptr) {
+ mTmpFrameBufferSize = 0;
+ return false;
+ }
+ mTmpFrameBufferSize = size;
+ }
+ return true;
+}
+
bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work) {
if (!(work && pool)) return false;
@@ -772,6 +798,7 @@
getHDRStaticParams(buffer, work);
getHDR10PlusInfoData(buffer, work);
+#if LIBYUV_VERSION < 1779
if (buffer->bitdepth == 10 &&
!(buffer->image_format == libgav1::kImageFormatYuv420 ||
buffer->image_format == libgav1::kImageFormatMonochrome400)) {
@@ -781,6 +808,7 @@
work->result = C2_CORRUPTED;
return false;
}
+#endif
const bool isMonochrome =
buffer->image_format == libgav1::kImageFormatMonochrome400;
@@ -798,6 +826,7 @@
allowRGBA1010102 = true;
}
format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
(buffer->image_format != libgav1::kImageFormatYuv420)) {
ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
@@ -806,6 +835,7 @@
work->workletsProcessed = 1u;
return false;
}
+#endif
}
if (mHalPixelFormat != format) {
@@ -854,9 +884,6 @@
uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
- size_t srcYStride = buffer->stride[0];
- size_t srcUStride = buffer->stride[1];
- size_t srcVStride = buffer->stride[2];
C2PlanarLayout layout = wView.layout();
size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
@@ -867,26 +894,130 @@
const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+ size_t srcYStride = buffer->stride[0] / 2;
+ size_t srcUStride = buffer->stride[1] / 2;
+ size_t srcVStride = buffer->stride[2] / 2;
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410OrRGBA1010102(
- (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride / sizeof(uint32_t), mWidth, mHeight,
- std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+ bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, &libyuv::kYuvV2020Constants,
+ mWidth, mHeight);
+ processed = true;
+ } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+ libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, &libyuv::kYuvV2020Constants,
+ mWidth, mHeight);
+ processed = true;
+ }
+#endif // HAVE_LIBYUV_I410_I210_TO_AB30
+ if (!processed) {
+ if (isMonochrome) {
+ const size_t tmpSize = mWidth;
+ const bool needFill = tmpSize > mTmpFrameBufferSize;
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ srcU = srcV = mTmpFrameBuffer.get();
+ srcUStride = srcVStride = 0;
+ if (needFill) {
+ std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+ }
+ }
+ convertYUV420Planar16ToY410OrRGBA1010102(
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride,
+ dstYStride / sizeof(uint32_t), mWidth, mHeight,
+ std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+ }
} else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+ dstYStride /= 2;
+ dstUStride /= 2;
+ dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+ if (buffer->image_format == libgav1::kImageFormatYuv444 ||
+ buffer->image_format == libgav1::kImageFormatYuv422) {
+ // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
+ // libyuv::I210ToP010 when they are available.
+ // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
+ // guarantees.
+ const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ uint16_t *const tmpY = mTmpFrameBuffer.get();
+ uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+ uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ mWidth, mHeight);
+ } else {
+ libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ mWidth, mHeight);
+ }
+ libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+ (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight, isMonochrome);
+ }
+#else // LIBYUV_VERSION < 1779
convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
- dstUStride / 2, mWidth, mHeight, isMonochrome);
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ dstUStride, mWidth, mHeight, isMonochrome);
+#endif // LIBYUV_VERSION >= 1779
} else {
- convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2, dstYStride, dstUStride, mWidth,
- mHeight, isMonochrome);
+#if LIBYUV_VERSION >= 1779
+ if (buffer->image_format == libgav1::kImageFormatYuv444) {
+ // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
+ // it's available.
+ const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+ if (!allocTmpFrameBuffer(tmpSize)) {
+ ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+ setError(work, C2_NO_MEMORY);
+ return false;
+ }
+ uint16_t *const tmpY = mTmpFrameBuffer.get();
+ uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+ uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+ libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+ mWidth, mHeight);
+ libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+ libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+ dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ mWidth, mHeight, isMonochrome);
+ }
+#else // LIBYUV_VERSION < 1779
+ convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+ srcUStride, srcVStride, dstYStride, dstUStride,
+ mWidth, mHeight, isMonochrome);
+#endif // LIBYUV_VERSION >= 1779
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
+ size_t srcYStride = buffer->stride[0];
+ size_t srcUStride = buffer->stride[1];
+ size_t srcVStride = buffer->stride[2];
if (buffer->image_format == libgav1::kImageFormatYuv444) {
libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index f0e14d7..c3b27ea 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -19,6 +19,8 @@
#include <inttypes.h>
+#include <memory>
+
#include <media/stagefright/foundation/ColorUtils.h>
#include <SimpleC2Component.h>
@@ -60,6 +62,9 @@
uint32_t mHeight;
bool mSignalledOutputEos;
bool mSignalledError;
+ // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+ std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+ size_t mTmpFrameBufferSize = 0;
C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
@@ -97,6 +102,9 @@
void destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
const std::shared_ptr<C2GraphicBlock>& block);
+ // Sets |work->result| and mSignalledError. Returns false.
+ void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
+ bool allocTmpFrameBuffer(size_t size);
bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
const std::unique_ptr<C2Work>& work);
c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index ecd5463..e4daf5c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -71,10 +71,11 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx",
- "libstagefright_surface_utils",
+ "libstagefright_surface_utils",
"libstagefright_xmlparser",
"libui",
"libutils",
+ "server_configurable_flags",
],
export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 1c86ba9..881c74e 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -34,6 +34,7 @@
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/drm/1.0/types.h>
+#include <android-base/parseint.h>
#include <android-base/properties.h>
#include <android-base/stringprintf.h>
#include <binder/MemoryBase.h>
@@ -52,6 +53,7 @@
#include <media/stagefright/SurfaceUtils.h>
#include <media/MediaCodecBuffer.h>
#include <mediadrm/ICrypto.h>
+#include <server_configurable_flags/get_flags.h>
#include <system/window.h>
#include "CCodecBufferChannel.h"
@@ -75,7 +77,6 @@
namespace {
constexpr size_t kSmoothnessFactor = 4;
-constexpr size_t kRenderingDepth = 3;
// This is for keeping IGBP's buffer dropping logic in legacy mode other
// than making it non-blocking. Do not change this value.
@@ -149,10 +150,11 @@
mFirstValidFrameIndex(0u),
mIsSurfaceToDisplay(false),
mHasPresentFenceTimes(false),
+ mRenderingDepth(0u),
mMetaMode(MODE_NONE),
mInputMetEos(false),
mSendEncryptedInfoBuffer(false) {
- mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+ mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor;
{
Mutexed<Input>::Locked input(mInput);
input->buffers.reset(new DummyInputBuffers(""));
@@ -167,11 +169,15 @@
Mutexed<Output>::Locked output(mOutput);
output->outputDelay = 0u;
output->numSlots = kSmoothnessFactor;
+ output->bounded = false;
}
{
Mutexed<BlockPools>::Locked pools(mBlockPools);
pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
}
+ std::string value = server_configurable_flags::GetServerConfigurableFlag(
+ "media_native", "ccodec_rendering_depth", "0");
+ android::base::ParseInt(value, &mRenderingDepth);
}
CCodecBufferChannel::~CCodecBufferChannel() {
@@ -727,7 +733,7 @@
Mutexed<Output>::Locked output(mOutput);
if (!output->buffers ||
output->buffers->hasPending() ||
- output->buffers->numActiveSlots() >= output->numSlots) {
+ (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
return;
}
}
@@ -1017,7 +1023,7 @@
int hasPresentFenceTimes = 0;
window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
mHasPresentFenceTimes = hasPresentFenceTimes == 1;
- if (mHasPresentFenceTimes) {
+ if (!mHasPresentFenceTimes) {
ALOGI("Using latch times for frame rendered signals - present fences not supported");
}
}
@@ -1386,7 +1392,7 @@
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
- reorderDepth.value + kRenderingDepth;
+ reorderDepth.value + mRenderingDepth;
outputSurface = output->surface ?
output->surface->getIGraphicBufferProducer() : nullptr;
if (outputSurface) {
@@ -1509,6 +1515,7 @@
Mutexed<Output>::Locked output(mOutput);
output->outputDelay = outputDelayValue;
output->numSlots = numOutputSlots;
+ output->bounded = bool(outputSurface);
if (graphic) {
if (outputSurface || !buffersBoundToCodec) {
output->buffers.reset(new GraphicOutputBuffers(mName));
@@ -2053,7 +2060,7 @@
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
maxDequeueCount = output->maxDequeueBuffers =
- numOutputSlots + reorderDepth + kRenderingDepth;
+ numOutputSlots + reorderDepth + mRenderingDepth;
if (output->surface) {
output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 0d25d6d..2d87aa9 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -321,6 +321,9 @@
std::unique_ptr<OutputBuffers> buffers;
size_t numSlots;
uint32_t outputDelay;
+ // true iff the underlying block pool is bounded --- for example,
+ // a BufferQueue-based block pool would be bounded by the BufferQueue.
+ bool bounded;
};
Mutexed<Output> mOutput;
Mutexed<std::list<std::unique_ptr<C2Work>>> mFlushedConfigs;
@@ -341,6 +344,7 @@
std::map<uint64_t, int> rotation;
};
Mutexed<OutputSurface> mOutputSurface;
+ int mRenderingDepth;
struct BlockPools {
C2Allocator::id_t inputAllocatorId;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..e18dd59 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -637,6 +637,10 @@
if (encoder) {
attrs |= MediaCodecInfo::kFlagIsEncoder;
}
+ if (codec.quirkSet.find("attribute::enforce-xml-capabilities") !=
+ codec.quirkSet.end()) {
+ attrs |= MediaCodecInfo::kFlagIsEnforceXmlCapabilities;
+ }
if (trait.owner == "software") {
attrs |= MediaCodecInfo::kFlagIsSoftwareOnly;
} else {
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 7c0ce57..9c4ccb8 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1306,7 +1306,7 @@
streamType = AUDIO_STREAM_DTMF;
}
attr = AudioSystem::streamTypeToAttributes(streamType);
- attr.flags = AUDIO_FLAG_LOW_LATENCY;
+ attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_LOW_LATENCY);
const size_t frameCount = mProcessSize;
status_t status = mpAudioTrack->set(
diff --git a/media/libaudiousecasevalidation/UsecaseValidator.cpp b/media/libaudiousecasevalidation/UsecaseValidator.cpp
index d62df3a..bf532de 100644
--- a/media/libaudiousecasevalidation/UsecaseValidator.cpp
+++ b/media/libaudiousecasevalidation/UsecaseValidator.cpp
@@ -142,6 +142,9 @@
bool areFlagsValid(audio_flags_mask_t flags) {
ALOGV("areFlagsValid flags: %#x", flags);
+ if ((flags & (AUDIO_FLAG_SCO|AUDIO_FLAG_AUDIBILITY_ENFORCED|AUDIO_FLAG_BEACON)) != 0) {
+ return false;
+ }
if ((flags & AUDIO_FLAG_LOW_LATENCY) != 0) {
return true;
}
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..3ebd13e 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -102,6 +102,111 @@
return OK;
}
+static int32_t convertToIntNoSign(const AString &str) {
+ char *end;
+ unsigned long u = strtoul(str.c_str(), &end, 10);
+ if (end == str.c_str() || *end != '\0') {
+ // malformed integer
+ return -1;
+ }
+ if (u > INT32_MAX) {
+ // The number is too big
+ return -1;
+ }
+ return static_cast<int32_t>(u);
+}
+
+static void parseSize(const AString &str, int32_t *width, int32_t *height) {
+ ssize_t ix = str.find("x");
+ if (ix == -1) {
+ ix = str.find("*");
+ if (ix == -1) {
+ return;
+ }
+ }
+ AString wStr(str, 0, ix);
+ AString hStr(str, ix + 1, str.size() - ix - 1);
+ *width = convertToIntNoSign(wStr);
+ *height = convertToIntNoSign(hStr);
+}
+
+static void parseRange(const AString &str, int32_t *min, int32_t *max) {
+ ssize_t ix = str.find("-");
+ if (ix == -1) {
+ return;
+ }
+ AString minStr(str, 0, ix);
+ AString maxStr(str, ix + 1, str.size() - ix - 1);
+ *min = convertToIntNoSign(minStr);
+ *max = convertToIntNoSign(maxStr);
+}
+
+static void parseSizeRange(const AString &str, int32_t *minWidth, int32_t *minHeight,
+ int32_t *maxWidth, int32_t *maxHeight) {
+ ssize_t ix = str.find("-");
+ if (ix == -1) {
+ return;
+ }
+ AString minSize(str, 0, ix);
+ AString maxSize(str, ix + 1, str.size() - ix - 1);
+ parseSize(minSize, minWidth, minHeight);
+ parseSize(maxSize, maxWidth, maxHeight);
+}
+
+
+bool MediaCodecInfo::Capabilities::isResolutionSupported(int32_t width, int32_t height) {
+ AString blockSizeStr;
+ AString blockCountStr;
+ int32_t blockWidth = -1;
+ int32_t blockHeight = -1;
+ int32_t maxBlocks = -1;
+ int32_t minBlocks = -1;
+
+ if (mDetails->findString("block-size", &blockSizeStr)) {
+ parseSize(blockSizeStr, &blockWidth, &blockHeight);
+ }
+ if (mDetails->findString("block-count-range", &blockCountStr)) {
+ parseRange(blockCountStr, &minBlocks, &maxBlocks);
+ }
+ if (maxBlocks != -1 && blockWidth != -1 && blockHeight != -1) {
+ if (maxBlocks < ((width + blockWidth - 1) / blockWidth) *
+ ((height + blockHeight - 1) / blockHeight)) {
+ return false;
+ }
+ }
+
+ AString sizeRangeStr;
+ int32_t maxWidth = -1;
+ int32_t maxHeight = -1;
+ int32_t minWidth = -1;
+ int32_t minHeight = -1;
+
+ if (mDetails->findString("size-range", &sizeRangeStr)) {
+ parseSizeRange(sizeRangeStr, &minWidth, &minHeight, &maxWidth, &maxHeight);
+ }
+
+ if (maxWidth != -1 && maxHeight != -1) {
+ // The logic is that the format is not supported if width or height is outside
+ // of min-max limits, UNLESS codec allows to swap it and in this case format is
+ // not supported if width is outside of min-max height or height is outside of
+ // min-max width
+ if (width < minWidth || height < minHeight ||
+ width > maxWidth || height > maxHeight) {
+ int32_t swappable = 0;
+ if (!mDetails->findInt32("feature-can-swap-width-height", &swappable) ||
+ swappable == 0) {
+ return false;
+ }
+ if (width < minHeight || height < minWidth ||
+ width > maxHeight || height > maxWidth) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+
void MediaCodecInfo::CapabilitiesWriter::addDetail(
const char* key, const char* value) {
mCap->mDetails->setString(key, value);
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..855bc28 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -59,6 +59,7 @@
kFlagIsVendor = 1 << 1,
kFlagIsSoftwareOnly = 1 << 2,
kFlagIsHardwareAccelerated = 1 << 3,
+ kFlagIsEnforceXmlCapabilities = 1 << 4,
};
struct Capabilities : public RefBase {
@@ -96,6 +97,8 @@
*/
const sp<AMessage> getDetails() const;
+ bool isResolutionSupported(int32_t width, int32_t height);
+
protected:
Vector<ProfileLevel> mProfileLevels;
SortedVector<ProfileLevel> mProfileLevelsSorted;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index f1534c9..a26fcbe 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -314,6 +314,7 @@
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
+ "server_configurable_flags",
],
static_libs: [
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3427f8f..178458c 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,17 +19,18 @@
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
-#include <set>
-#include <random>
-#include <stdlib.h>
-#include <inttypes.h>
-#include <stdlib.h>
#include <dlfcn.h>
+#include <inttypes.h>
+#include <random>
+#include <set>
+#include <stdlib.h>
+#include <string>
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
+#include <android/api-level.h>
#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -81,6 +82,7 @@
#include <media/stagefright/SurfaceUtils.h>
#include <nativeloader/dlext_namespaces.h>
#include <private/android_filesystem_config.h>
+#include <server_configurable_flags/get_flags.h>
#include <utils/Singleton.h>
namespace android {
@@ -90,6 +92,8 @@
using aidl::android::media::IResourceManagerClient;
using aidl::android::media::IResourceManagerService;
using aidl::android::media::ClientInfoParcel;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
// key for media statistics
static const char *kCodecKeyName = "codec";
@@ -155,6 +159,7 @@
static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
+static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
@@ -211,6 +216,7 @@
static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
+// Freeze
static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
@@ -225,6 +231,7 @@
"android.media.mediacodec.freeze-distance-ms-histogram";
static const char *kCodecFreezeDistanceMsHistogramBuckets =
"android.media.mediacodec.freeze-distance-ms-histogram-buckets";
+// Judder
static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
@@ -233,6 +240,32 @@
static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
static const char *kCodecJudderScoreHistogramBuckets =
"android.media.mediacodec.judder-score-histogram-buckets";
+// Freeze event
+static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
+static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
+static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
+static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
+static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
+static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
+static const char *kFreezeEventDetailsDurationMs =
+ "android.media.mediacodec.freeze.details-duration-ms";
+static const char *kFreezeEventDetailsDistanceMs =
+ "android.media.mediacodec.freeze.details-distance-ms";
+// Judder event
+static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
+static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
+static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
+static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
+static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
+static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
+static const char *kJudderEventDetailsActualDurationUs =
+ "android.media.mediacodec.judder.details-actual-duration-us";
+static const char *kJudderEventDetailsContentDurationUs =
+ "android.media.mediacodec.judder.details-content-duration-us";
+static const char *kJudderEventDetailsDistanceMs =
+ "android.media.mediacodec.judder.details-distance-ms";
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
@@ -992,6 +1025,9 @@
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
mIsSurfaceToDisplay(false),
+ mVideoRenderQualityTracker(
+ VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+ server_configurable_flags::GetServerConfigurableFlag)),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -1168,6 +1204,12 @@
mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
h.emitBuckets());
}
+ if (m.freezeEventCount != 0) {
+ mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
+ }
+ if (m.judderEventCount != 0) {
+ mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
+ }
}
if (mLatencyHist.getCount() != 0 ) {
@@ -1406,6 +1448,53 @@
}
}
+static std::string emitVector(std::vector<int32_t> vector) {
+ std::ostringstream sstr;
+ for (size_t i = 0; i < vector.size(); ++i) {
+ if (i != 0) {
+ sstr << ',';
+ }
+ sstr << vector[i];
+ }
+ return sstr.str();
+}
+
+static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
+ if (e.valid) {
+ mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
+ mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
+ mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
+ mediametrics_setInt64(handle, kFreezeEventCount, e.count);
+ mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
+ mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
+ mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
+ emitVector(e.details.durationMs));
+ mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
+ emitVector(e.details.distanceMs));
+ mediametrics_selfRecord(handle);
+ mediametrics_delete(handle);
+ }
+}
+
+static void reportToMediaMetricsIfValid(const JudderEvent &e) {
+ if (e.valid) {
+ mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
+ mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
+ mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
+ mediametrics_setInt64(handle, kJudderEventCount, e.count);
+ mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
+ mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
+ mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
+ emitVector(e.details.actualRenderDurationUs));
+ mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
+ emitVector(e.details.contentRenderDurationUs));
+ mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
+ emitVector(e.details.distanceMs));
+ mediametrics_selfRecord(handle);
+ mediametrics_delete(handle);
+ }
+}
+
void MediaCodec::flushMediametrics() {
ALOGD("flushMediametrics");
@@ -1424,6 +1513,10 @@
}
// we no longer have anything pending upload
mMetricsToUpload = false;
+
+ // Freeze and judder events are reported separately
+ reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
+ reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1537,7 +1630,12 @@
// Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
// rendered frame.
if (!mTunneled || mediaTimeUs != INT64_MAX) {
- mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs);
+ FreezeEvent freezeEvent;
+ JudderEvent judderEvent;
+ mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
+ &judderEvent);
+ reportToMediaMetricsIfValid(freezeEvent);
+ reportToMediaMetricsIfValid(judderEvent);
}
}
}
@@ -1950,6 +2048,40 @@
return configure(format, nativeWindow, crypto, NULL, flags);
}
+bool MediaCodec::isResolutionSupported(const sp<AMessage>& format) {
+ int32_t width = -1;
+ int32_t height = -1;
+ int32_t maxWidth = -1;
+ int32_t maxHeight = -1;
+ format->findInt32("width", &width);
+ format->findInt32("height", &height);
+ format->findInt32("max-width", &maxWidth);
+ format->findInt32("max-height", &maxHeight);
+ AString mediaType;
+ if (!format->findString("mime", &mediaType)) {
+ ALOGI("Can not check mediaFormat: No MIME set.");
+ return true;
+ }
+ sp<MediaCodecInfo::Capabilities> caps = mCodecInfo->getCapabilitiesFor(mediaType.c_str());
+ if (caps == NULL) {
+ ALOGI("Can not get Capabilities for MIME %s.", mediaType.c_str());
+ return true;
+ }
+ if (width != -1 && height != -1) {
+ if (!caps->isResolutionSupported(width, height)) {
+ ALOGD("Frame resolution (%dx%d) is beyond codec capabilities", width, height);
+ return false;
+ }
+ }
+ if (maxWidth != -1 && maxHeight != -1) {
+ if (!caps->isResolutionSupported(maxWidth, maxHeight)) {
+ ALOGD("Max frame resolution (%dx%d) is beyond codec capabilities", maxWidth, maxHeight);
+ return false;
+ }
+ }
+ return true;
+}
+
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &surface,
@@ -2037,7 +2169,24 @@
mediametrics_delete(nextMetricsHandle);
return BAD_VALUE;
}
-
+ // For applications built with targetSdkVersion of Android U or later (or if MediaCodec's
+ // caller is not an app) we enforce codec resolution capabilities if such enforcement is
+ // required by 'enforce-xml-capabilities' attribute
+ if (android_get_application_target_sdk_version() >= __ANDROID_API_U__) {
+ if (mCodecInfo != nullptr &&
+ (mCodecInfo->getAttributes() &
+ MediaCodecInfo::kFlagIsEnforceXmlCapabilities)) {
+ if (!isResolutionSupported(format)) {
+ mErrorLog.log(LOG_TAG,
+ base::StringPrintf("The input resolution of %dx%d is not "
+ "supported for this codec; please query MediaCodecList "
+ "for the supported formats including the resolution. See "
+ "CodecCapabilities#isFormatSupported() and "
+ "VideoCapabilities#isSizeSupported()", mWidth, mHeight));
+ return BAD_VALUE;
+ }
+ }
+ }
} else {
if (nextMetricsHandle != 0) {
int32_t channelCount;
@@ -3811,6 +3960,14 @@
if (interestingFormat->findInt32("level", &level)) {
mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
}
+ sp<AMessage> uncompressedFormat =
+ (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
+ int32_t componentColorFormat = -1;
+ if (uncompressedFormat->findInt32("android._color-format",
+ &componentColorFormat)) {
+ mediametrics_setInt32(mMetricsHandle,
+ kCodecComponentColorFormat, componentColorFormat);
+ }
updateHdrMetrics(true /* isConfig */);
int32_t codecMaxInputSize = -1;
if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index df25ead..0996b87 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -20,15 +20,92 @@
#include <media/stagefright/VideoRenderQualityTracker.h>
#include <assert.h>
+#include <charconv>
#include <cmath>
+#include <stdio.h>
#include <sys/time.h>
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
+
namespace android {
+using android::base::ParseBoolResult;
+
static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
+ GetServerConfigurableFlagFn;
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, bool *value) {
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
+ *value ? "true" : "false");
+ switch (android::base::ParseBool(valueStr)) {
+ case ParseBoolResult::kTrue: *value = true; break;
+ case ParseBoolResult::kFalse: *value = false; break;
+ case ParseBoolResult::kError:
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ break;
+ }
+}
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, int32_t *value) {
+ char defaultStr[11];
+ sprintf(defaultStr, "%d", int(*value));
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
+ if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ return;
+ }
+}
+
+template<typename T>
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+ char const *flagNameSuffix, std::vector<T> *value) {
+ std::stringstream sstr;
+ for (int i = 0; i < value->size(); ++i) {
+ if (i != 0) {
+ sstr << ",";
+ }
+ sstr << (*value)[i];
+ }
+ std::string flagName("render_metrics_");
+ flagName.append(flagNameSuffix);
+ std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
+ if (valueStr.size() == 0) {
+ return;
+ }
+ // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
+ std::vector<T> newValues;
+ const char *p = valueStr.c_str();
+ const char *last = p + valueStr.size();
+ while (p != last) {
+ if (*p == ',') {
+ p++;
+ }
+ T value = -1;
+ auto [ptr, error] = std::from_chars(p, last, value);
+ if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+ ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+ valueStr.c_str());
+ return;
+ }
+ p = ptr;
+ newValues.push_back(value);
+ }
+ *value = std::move(newValues);
+}
+
VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
clear();
}
@@ -42,11 +119,40 @@
contentFrameRate = FRAME_RATE_UNDETERMINED;
desiredFrameRate = FRAME_RATE_UNDETERMINED;
actualFrameRate = FRAME_RATE_UNDETERMINED;
+ freezeEventCount = 0;
freezeDurationMsHistogram.clear();
freezeDistanceMsHistogram.clear();
+ judderEventCount = 0;
judderScoreHistogram.clear();
}
+VideoRenderQualityTracker::Configuration
+ VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+ GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
+ VideoRenderQualityTracker::Configuration c;
+#define getFlag(FIELDNAME, FLAGNAME) \
+ getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
+ getFlag(enabled, "enabled");
+ getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
+ getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
+ getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
+ getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
+ getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
+ getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
+ getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
+ getFlag(freezeEventMax, "freeze_event_max");
+ getFlag(freezeEventDetailsMax, "freeze_event_details_max");
+ getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
+ getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
+ getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
+ getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
+ getFlag(judderEventMax, "judder_event_max");
+ getFlag(judderEventDetailsMax, "judder_event_details_max");
+ getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+#undef getFlag
+ return c;
+}
+
VideoRenderQualityTracker::Configuration::Configuration() {
enabled = true;
@@ -62,18 +168,24 @@
// Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
// because of frame drops for live content, or because the user is seeking.
- contentTimeAdvancedForLiveContentToleranceUs = 200 * 1000;
+ liveContentFrameDropToleranceUs = 200 * 1000;
// Freeze configuration
freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
freezeDurationMsHistogramToScore = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
60000};
+ freezeEventMax = 0; // enabled only when debugging
+ freezeEventDetailsMax = 20;
+ freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
// Judder configuration
judderErrorToleranceUs = 2000;
judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
judderScoreHistogramToScore = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
+ judderEventMax = 0; // enabled only when debugging
+ judderEventDetailsMax = 20;
+ judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
}
VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
@@ -139,7 +251,9 @@
mLastContentTimeUs = contentTimeUs;
}
-void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs) {
+void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+ FreezeEvent *freezeEventOut,
+ JudderEvent *judderEventOut) {
if (!mConfiguration.enabled) {
return;
}
@@ -183,10 +297,23 @@
nextExpectedFrame.desiredRenderTimeUs);
}
processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
- nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs);
+ nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
+ freezeEventOut, judderEventOut);
mLastRenderTimeUs = actualRenderTimeUs;
}
+VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
+ FreezeEvent event = std::move(mFreezeEvent);
+ mFreezeEvent.valid = false;
+ return event;
+}
+
+VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
+ JudderEvent event = std::move(mJudderEvent);
+ mJudderEvent.valid = false;
+ return event;
+}
+
const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
if (!mConfiguration.enabled) {
return mMetrics;
@@ -232,6 +359,10 @@
mLastContentTimeUs = -1;
mLastRenderTimeUs = -1;
mLastFreezeEndTimeUs = -1;
+ mLastJudderEndTimeUs = -1;
+ mWasPreviousFrameDropped = false;
+ mFreezeEvent.valid = false;
+ mJudderEvent.valid = false;
// Don't worry about tracking frame rendering times from now up until playback catches up to the
// discontinuity. While stuttering or freezing could be found in the next few frames, the impact
@@ -276,7 +407,7 @@
int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
bool skippedForwardDueToLiveContentFrameDrops =
abs(contentFrameDurationUs - desiredFrameDurationUs) <
- mConfiguration.contentTimeAdvancedForLiveContentToleranceUs;
+ mConfiguration.liveContentFrameDropToleranceUs;
if (!skippedForwardDueToLiveContentFrameDrops) {
ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
@@ -298,6 +429,7 @@
updateFrameDurations(mDesiredFrameDurationUs, -1);
updateFrameDurations(mActualFrameDurationUs, -1);
updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+ mWasPreviousFrameDropped = false;
}
void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
@@ -308,11 +440,14 @@
updateFrameDurations(mActualFrameDurationUs, -1);
updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+ mWasPreviousFrameDropped = true;
}
void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
int64_t desiredRenderTimeUs,
- int64_t actualRenderTimeUs) {
+ int64_t actualRenderTimeUs,
+ FreezeEvent *freezeEventOut,
+ JudderEvent *judderEventOut) {
// Capture the timestamp at which the first frame was rendered
if (mMetrics.firstRenderTimeUs == 0) {
mMetrics.firstRenderTimeUs = actualRenderTimeUs;
@@ -334,29 +469,90 @@
updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
// If the previous frame was dropped, there was a freeze if we've already rendered a frame
- if (mActualFrameDurationUs[1] == -1 && mLastRenderTimeUs != -1) {
- processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mMetrics);
+ if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
+ processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
+ mMetrics, mConfiguration);
mLastFreezeEndTimeUs = actualRenderTimeUs;
}
+ maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
+ mConfiguration, freezeEventOut);
// Judder is computed on the prior video frame, not the current video frame
int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
mContentFrameDurationUs,
mConfiguration);
+ int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
+ mActualFrameDurationUs[1];
if (judderScore != 0) {
- mMetrics.judderScoreHistogram.insert(judderScore);
+ processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
+ mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
+ mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
}
+ maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
+ mConfiguration, judderEventOut);
+
+ mWasPreviousFrameDropped = false;
}
void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
- int64_t lastFreezeEndTimeUs,
- VideoRenderQualityMetrics &m) {
- int64_t freezeDurationMs = (actualRenderTimeUs - lastRenderTimeUs) / 1000;
- m.freezeDurationMsHistogram.insert(freezeDurationMs);
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ VideoRenderQualityMetrics &m,
+ const Configuration &c) {
+ int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
+ m.freezeDurationMsHistogram.insert(durationMs);
+ int32_t distanceMs = -1;
if (lastFreezeEndTimeUs != -1) {
- int64_t distanceSinceLastFreezeMs = (lastRenderTimeUs - lastFreezeEndTimeUs) / 1000;
- m.freezeDistanceMsHistogram.insert(distanceSinceLastFreezeMs);
+ // The distance to the last freeze is measured from the end of the last freze to the start
+ // of this freeze.
+ distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
+ m.freezeDistanceMsHistogram.insert(distanceMs);
}
+ if (c.freezeEventMax > 0) {
+ if (e.valid == false) {
+ m.freezeEventCount++;
+ e.valid = true;
+ e.initialTimeUs = lastRenderTimeUs;
+ e.durationMs = 0;
+ e.sumDurationMs = 0;
+ e.sumDistanceMs = 0;
+ e.count = 0;
+ e.details.durationMs.clear();
+ e.details.distanceMs.clear();
+ // The first occurrence in the event should not have the distance recorded as part of the
+ // event, because it belongs in a vacuum between two events. However we still want the
+ // distance recorded in the details to calculate times in all details in all events.
+ } else if (distanceMs != -1) {
+ e.durationMs += distanceMs;
+ e.sumDistanceMs += distanceMs;
+ }
+ e.durationMs += durationMs;
+ e.count++;
+ e.sumDurationMs += durationMs;
+ if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
+ e.details.durationMs.push_back(durationMs);
+ e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+ }
+ }
+}
+
+void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ const VideoRenderQualityMetrics & m,
+ const Configuration &c,
+ FreezeEvent *freezeEventOut) {
+ if (lastFreezeEndTimeUs == -1 || !e.valid) {
+ return;
+ }
+ // Future freeze occurrences are still pulled into the current freeze event if under tolerance
+ int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
+ if (distanceMs < c.freezeEventDistanceToleranceMs) {
+ return;
+ }
+ if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
+ *freezeEventOut = std::move(e);
+ }
+ // start recording a new freeze event after pushing the current one back to the caller
+ e.valid = false;
}
int64_t VideoRenderQualityTracker::computePreviousJudderScore(
@@ -401,6 +597,67 @@
return abs(errorUs) / 1000; // error in millis to keep numbers small
}
+void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
+ int64_t lastJudderEndTime,
+ const FrameDurationUs &actualDurationUs,
+ const FrameDurationUs &contentDurationUs,
+ JudderEvent &e, VideoRenderQualityMetrics &m,
+ const Configuration &c) {
+ int32_t distanceMs = -1;
+ if (lastJudderEndTime != -1) {
+ distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
+ }
+ m.judderScoreHistogram.insert(judderScore);
+ if (c.judderEventMax > 0) {
+ if (!e.valid) {
+ m.judderEventCount++;
+ e.valid = true;
+ e.initialTimeUs = judderTimeUs;
+ e.durationMs = 0;
+ e.sumScore = 0;
+ e.sumDistanceMs = 0;
+ e.count = 0;
+ e.details.contentRenderDurationUs.clear();
+ e.details.actualRenderDurationUs.clear();
+ e.details.distanceMs.clear();
+ // The first occurrence in the event should not have the distance recorded as part of the
+ // event, because it belongs in a vacuum between two events. However we still want the
+ // distance recorded in the details to calculate the times using all details in all events.
+ } else if (distanceMs != -1) {
+ e.durationMs += distanceMs;
+ e.sumDistanceMs += distanceMs;
+ }
+ e.durationMs += actualDurationUs[1] / 1000;
+ e.count++;
+ e.sumScore += judderScore;
+ if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
+ e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
+ e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
+ e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+ }
+ }
+}
+
+void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
+ int64_t lastJudderEndTimeUs, JudderEvent &e,
+ const VideoRenderQualityMetrics &m,
+ const Configuration &c,
+ JudderEvent *judderEventOut) {
+ if (lastJudderEndTimeUs == -1 || !e.valid) {
+ return;
+ }
+ // Future judder occurrences are still pulled into the current judder event if under tolerance
+ int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
+ if (distanceMs < c.judderEventDistanceToleranceMs) {
+ return;
+ }
+ if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
+ *judderEventOut = std::move(e);
+ }
+ // start recording a new judder event after pushing the current one back to the caller
+ e.valid = false;
+}
+
void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
const Configuration &c) {
m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 52d7d3d..144ea53 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -462,6 +462,7 @@
constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
void updateTunnelPeek(const sp<AMessage> &msg);
void processRenderedFrames(const sp<AMessage> &msg);
+ bool isResolutionSupported(const sp<AMessage> &format);
inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index 8bfead9..2b3dbcb 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -66,6 +66,8 @@
int32_t freezeScore;
// The computed percentage of total playback duration that was frozen.
float freezeRate;
+ // The number of freeze events.
+ int32_t freezeEventCount;
// A histogram of the durations between each freeze.
MediaHistogram<int32_t> freezeDistanceMsHistogram;
@@ -76,6 +78,8 @@
int32_t judderScore;
// The computed percentage of total frames that had judder.
float judderRate;
+ // The number of judder events.
+ int32_t judderEventCount;
};
///////////////////////////////////////////////////////
@@ -101,6 +105,15 @@
// Configurable elements of the metrics algorithms
class Configuration {
public:
+ // system/server_configurable_flags/libflags/include/get_flags.h:GetServerConfigurableFlag
+ typedef std::string (*GetServerConfigurableFlagFn)(
+ const std::string& experiment_category_name,
+ const std::string& experiment_flag_name,
+ const std::string& default_value);
+
+ static Configuration getFromServerConfigurableFlags(
+ GetServerConfigurableFlagFn getServerConfigurableFlagFn);
+
Configuration();
// Whether or not frame render quality is tracked.
@@ -124,7 +137,7 @@
// skip forward in content time is due to frame drops. If the app-desired frame duration is
// short, but the content frame duration is large, it is assumed the app is intentionally
// seeking forward.
- int32_t contentTimeAdvancedForLiveContentToleranceUs;
+ int32_t liveContentFrameDropToleranceUs;
// Freeze configuration
//
@@ -135,6 +148,13 @@
std::vector<int64_t> freezeDurationMsHistogramToScore;
// The values used to distribute distances between freezes across a histogram.
std::vector<int32_t> freezeDistanceMsHistogramBuckets;
+ // The maximum number of freeze events to send back to the caller.
+ int32_t freezeEventMax;
+ // The maximum number of detail entries tracked per freeze event.
+ int32_t freezeEventDetailsMax;
+ // The maximum distance in time between two freeze occurrences such that both will be
+ // lumped into the same freeze event.
+ int32_t freezeEventDistanceToleranceMs;
// Judder configuration
//
@@ -144,7 +164,68 @@
std::vector<int32_t> judderScoreHistogramBuckets;
// The values used to compare against judder score histogram counts when determining an
// overall score.
- std::vector<int32_t> judderScoreHistogramToScore;
+ std::vector<int64_t> judderScoreHistogramToScore;
+ // The maximum number of judder events to send back to the caller.
+ int32_t judderEventMax;
+ // The maximum number of detail entries tracked per judder event.
+ int32_t judderEventDetailsMax;
+ // The maximum distance in time between two judder occurrences such that both will be
+ // lumped into the same judder event.
+ int32_t judderEventDistanceToleranceMs;
+ };
+
+ struct FreezeEvent {
+ // Details are captured for each freeze up to a limited number. The arrays are guaranteed to
+ // have the same size.
+ struct Details {
+ /// The duration of the freeze.
+ std::vector<int32_t> durationMs;
+ // The distance between the beginning of this freeze and the end of the previous freeze.
+ std::vector<int32_t> distanceMs;
+ };
+ // Whether or not the data in this structure is valid.
+ bool valid = false;
+ // The time at which the first freeze for this event was detected.
+ int64_t initialTimeUs;
+ // The total duration from the beginning of the first freeze to the end of the last freeze
+ // in this event.
+ int32_t durationMs;
+ // The number of freezes in this event.
+ int64_t count;
+ // The sum of all durations of all freezes in this event.
+ int64_t sumDurationMs;
+ // The sum of all distances between each freeze in this event.
+ int64_t sumDistanceMs;
+ // Detailed information for the first N freezes in this event.
+ Details details;
+ };
+
+ struct JudderEvent {
+ // Details are captured for each frame judder up to a limited number. The arrays are
+ // guaranteed to have the same size.
+ struct Details {
+ // The actual render duration of the frame for this judder occurrence.
+ std::vector<int32_t> actualRenderDurationUs;
+ // The content render duration of the frame for this judder occurrence.
+ std::vector<int32_t> contentRenderDurationUs;
+ // The distance from this judder occurrence and the previous judder occurrence.
+ std::vector<int32_t> distanceMs;
+ };
+ // Whether or not the data in this structure is valid.
+ bool valid = false;
+ // The time at which the first judder occurrence for this event was detected.
+ int64_t initialTimeUs;
+ // The total duration from the first judder occurrence to the last judder occurrence in this
+ // event.
+ int32_t durationMs;
+ // The number of judder occurrences in this event.
+ int64_t count;
+ // The sum of all judder scores in this event.
+ int64_t sumScore;
+ // The sum of all distances between each judder occurrence in this event.
+ int64_t sumDistanceMs;
+ // Detailed information for the first N judder occurrences in this event.
+ Details details;
};
VideoRenderQualityTracker();
@@ -164,7 +245,16 @@
void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs);
// Called when the system has detected that the frame has actually been rendered to the display.
- void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs);
+ // Returns any freeze events or judder events that were detected.
+ void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+ FreezeEvent *freezeEventOut = nullptr,
+ JudderEvent *judderEventOut = nullptr);
+
+ // Gets and resets data for the current freeze event.
+ FreezeEvent getAndResetFreezeEvent();
+
+ // Gets and resets data for the current judder event.
+ JudderEvent getAndResetJudderEvent();
// Retrieve the metrics.
const VideoRenderQualityMetrics &getMetrics();
@@ -233,13 +323,31 @@
// Process a frame freeze.
static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
- int64_t lastFreezeEndTimeUs, VideoRenderQualityMetrics &m);
+ int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+ VideoRenderQualityMetrics &m, const Configuration &c);
+
+ // Retrieve a freeze event if an event just finished.
+ static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs,
+ FreezeEvent &e, const VideoRenderQualityMetrics & m,
+ const Configuration &c, FreezeEvent *freezeEventOut);
// Compute a judder score for the previously-rendered frame.
static int64_t computePreviousJudderScore(const FrameDurationUs &actualRenderDurationUs,
const FrameDurationUs &contentRenderDurationUs,
const Configuration &c);
+ // Process a frame judder.
+ static void processJudder(int32_t judderScore, int64_t judderTimeUs,
+ int64_t lastJudderEndTimeUs,
+ const FrameDurationUs &contentDurationUs,
+ const FrameDurationUs &actualDurationUs, JudderEvent &e,
+ VideoRenderQualityMetrics &m, const Configuration &c);
+
+ // Retrieve a judder event if an event just finished.
+ static void maybeCaptureJudderEvent(int64_t actualRenderTimeUs, int64_t lastJudderEndTimeUs,
+ JudderEvent &e, const VideoRenderQualityMetrics & m,
+ const Configuration &c, JudderEvent *judderEventOut);
+
// Check to see if a discontinuity has occurred by examining the content time and the
// app-desired render time. If so, reset some internal state.
bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
@@ -252,7 +360,8 @@
// Update the metrics because a rendered frame was detected.
void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs,
- int64_t actualRenderTimeUs);
+ int64_t actualRenderTimeUs,
+ FreezeEvent *freezeEventOut, JudderEvent *judderEventOut);
// Configurable elements of the metrics algorithms.
const Configuration mConfiguration;
@@ -269,9 +378,21 @@
// The most recent timestamp of the first frame rendered after the freeze.
int64_t mLastFreezeEndTimeUs;
+ // The most recent timestamp of frame judder.
+ int64_t mLastJudderEndTimeUs;
+
// The render duration of the playback.
int64_t mRenderDurationMs;
+ // True if the previous frame was dropped.
+ bool mWasPreviousFrameDropped;
+
+ // The freeze event that's currently being tracked.
+ FreezeEvent mFreezeEvent;
+
+ // The judder event that's currently being tracked.
+ JudderEvent mJudderEvent;
+
// Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep
// a list of the frames, and process them as skipped frames the next time a frame is rendered.
std::list<int64_t> mPendingSkippedFrameContentTimeUsList;
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index 7f89605..581292e 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -60,10 +60,10 @@
name: "VideoRenderQualityTracker_test",
srcs: ["VideoRenderQualityTracker_test.cpp"],
- // TODO(b/234833109): Figure out why shared_libs linkage causes stack corruption
- static_libs: [
- "libstagefright",
+ shared_libs: [
+ "libbase",
"liblog",
+ "libstagefright",
],
cflags: [
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
index 9f14663..3ff7482 100644
--- a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -26,6 +26,8 @@
using Metrics = VideoRenderQualityMetrics;
using Configuration = VideoRenderQualityTracker::Configuration;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
@@ -48,7 +50,8 @@
void render(std::initializer_list<T> renderDurationMsList) {
for (auto renderDurationMs : renderDurationMsList) {
mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
- mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs);
+ mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+ &mJudderEvent);
mMediaTimeUs += mContentFrameDurationUs;
mClockTimeNs += int64_t(renderDurationMs * 1000 * 1000);
}
@@ -58,7 +61,8 @@
int64_t durationUs = durationMs < 0 ? mContentFrameDurationUs : durationMs * 1000;
for (int i = 0; i < numFrames; ++i) {
mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
- mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs);
+ mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+ &mJudderEvent);
mMediaTimeUs += mContentFrameDurationUs;
mClockTimeNs += durationUs * 1000;
}
@@ -84,11 +88,25 @@
return mVideoRenderQualityTracker.getMetrics();
}
+ FreezeEvent getAndClearFreezeEvent() {
+ FreezeEvent e = std::move(mFreezeEvent);
+ mFreezeEvent.valid = false;
+ return e;
+ }
+
+ JudderEvent getAndClearJudderEvent() {
+ JudderEvent e = std::move(mJudderEvent);
+ mJudderEvent.valid = false;
+ return e;
+ }
+
private:
VideoRenderQualityTracker mVideoRenderQualityTracker;
int64_t mContentFrameDurationUs;
int64_t mMediaTimeUs;
int64_t mClockTimeNs;
+ VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
+ VideoRenderQualityTracker::JudderEvent mJudderEvent;
};
class VideoRenderQualityTrackerTest : public ::testing::Test {
@@ -96,6 +114,247 @@
VideoRenderQualityTrackerTest() {}
};
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withDefaults) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn =
+ [](const std::string &, const std::string &, const std::string &defaultStr) -> std::string {
+ return defaultStr;
+ };
+
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &, const std::string &) -> std::string {
+ return "";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &, const std::string &) -> std::string {
+ return "abc";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+ if (flag == "render_metrics_enabled") {
+ return "fals";
+ } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+ return "fals";
+ } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+ return "100a";
+ } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+ return "10b0";
+ } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+ return "c100";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+ return "1,5300,3b400,123";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+ return "2,5300*400,132";
+ } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+ return "3,12345678901234,5,7";
+ } else if (flag == "render_metrics_freeze_event_max") {
+ return "12345678901234";
+ } else if (flag == "render_metrics_freeze_event_details_max") {
+ return "12345.11321";
+ } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+ return "*!-";
+ } else if (flag == "render_metrics_judder_error_tolerance_us") {
+ return "10.5";
+ } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+ return "abc";
+ } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+ return "123,";
+ } else if (flag == "render_metrics_judder_event_max") {
+ return ",1234";
+ } else if (flag == "render_metrics_judder_event_details_max") {
+ return "10*10";
+ } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+ return "140-a";
+ }
+ return "";
+ }
+ };
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ Configuration d; // default configuration
+ EXPECT_EQ(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
+ Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+ [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+ if (flag == "render_metrics_enabled") {
+ return "false";
+ } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+ return "false";
+ } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+ return "2000";
+ } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+ return "3000";
+ } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+ return "4000";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+ return "100,200,300,400";
+ } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+ return "1234567890120,1234567890121,1234567890122";
+ } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+ return "500,600,700,800,900";
+ } else if (flag == "render_metrics_freeze_event_max") {
+ return "5000";
+ } else if (flag == "render_metrics_freeze_event_details_max") {
+ return "6000";
+ } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+ return "7000";
+ } else if (flag == "render_metrics_judder_error_tolerance_us") {
+ return "8000";
+ } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+ return "1,2,3,4,5";
+ } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+ return "-1,-2,-3,-4,-5";
+ } else if (flag == "render_metrics_judder_event_max") {
+ return "9000";
+ } else if (flag == "render_metrics_judder_event_details_max") {
+ return "10000";
+ } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+ return "11000";
+ }
+ return "";
+ }
+ };
+
+ Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+ // The default configuration here used to verify we're not configuring the values to the
+ // default - if we are accidentally configuring to the default then we're not necessarily
+ // testing the parsing.
+ Configuration d;
+ EXPECT_EQ(c.enabled, false);
+ EXPECT_NE(c.enabled, d.enabled);
+ EXPECT_EQ(c.areSkippedFramesDropped, false);
+ EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+ EXPECT_EQ(c.maxExpectedContentFrameDurationUs, 2000);
+ EXPECT_NE(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+ EXPECT_EQ(c.frameRateDetectionToleranceUs, 3000);
+ EXPECT_NE(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+ EXPECT_EQ(c.liveContentFrameDropToleranceUs, 4000);
+ EXPECT_NE(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+ {
+ std::vector<int32_t> expected({100,200,300,400});
+ EXPECT_EQ(c.freezeDurationMsHistogramBuckets, expected);
+ EXPECT_NE(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+ }
+ {
+ std::vector<int64_t> expected({1234567890120LL,1234567890121LL,1234567890122LL});
+ EXPECT_EQ(c.freezeDurationMsHistogramToScore, expected);
+ EXPECT_NE(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+ }
+ {
+ std::vector<int32_t> expected({500,600,700,800,900});
+ EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, expected);
+ EXPECT_NE(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+ }
+ EXPECT_EQ(c.freezeEventMax, 5000);
+ EXPECT_NE(c.freezeEventMax, d.freezeEventMax);
+ EXPECT_EQ(c.freezeEventDetailsMax, 6000);
+ EXPECT_NE(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+ EXPECT_EQ(c.freezeEventDistanceToleranceMs, 7000);
+ EXPECT_NE(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+ EXPECT_EQ(c.judderErrorToleranceUs, 8000);
+ EXPECT_NE(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+ {
+ std::vector<int32_t> expected({1,2,3,4,5});
+ EXPECT_EQ(c.judderScoreHistogramBuckets, expected);
+ EXPECT_NE(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+ }
+ {
+ std::vector<int64_t> expected({-1,-2,-3,-4,-5});
+ EXPECT_EQ(c.judderScoreHistogramToScore, expected);
+ EXPECT_NE(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+ }
+ EXPECT_EQ(c.judderEventMax, 9000);
+ EXPECT_NE(c.judderEventMax, d.judderEventMax);
+ EXPECT_EQ(c.judderEventDetailsMax, 10000);
+ EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
+ EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
+ EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
Configuration c;
Helper h(16.66, c);
@@ -232,6 +491,18 @@
EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
}
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
+ Configuration c;
+ Helper h(20, c);
+ h.render(3);
+ EXPECT_EQ(h.getMetrics().freezeRate, 0);
+ h.drop(3);
+ h.render(3);
+ // +1 because the first frame before drops is considered frozen
+ // and then -1 because the last frame has an unknown render duration
+ EXPECT_EQ(h.getMetrics().freezeRate, 4.0 / 8.0);
+}
+
TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
Configuration c;
// +17 because freeze durations include the render time of the previous frame
@@ -499,4 +770,146 @@
}
}
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
+ Configuration c;
+ c.freezeEventMax = 5;
+ c.freezeEventDetailsMax = 4;
+ c.freezeEventDistanceToleranceMs = 1000;
+ Helper h(20, c);
+ h.render(10);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(3);
+ h.render(1000 / 20); // +1 because it's unclear if the current frame is frozen
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(1);
+ h.render(10);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(6);
+ h.render(12);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+ h.drop(10);
+ h.render(1000 / 20 + 1); // +1 because it's unclear if the current frame is frozen
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 1);
+ FreezeEvent e = h.getAndClearFreezeEvent();
+ EXPECT_EQ(e.valid, true); // freeze event
+ // -1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.initialTimeUs, 9 * 20 * 1000);
+ // only count the last frame of the first group of rendered frames
+ EXPECT_EQ(e.durationMs, (1 + 3 + 1000 / 20 + 1 + 10 + 6 + 12 + 10) * 20);
+ EXPECT_EQ(e.count, 4);
+ // number of dropped frames
+ // +1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.sumDurationMs, (4 + 2 + 7 + 11) * 20);
+ // number of rendered frames between dropped frames
+ // -1 because the last rendered frame is considered frozen
+ EXPECT_EQ(e.sumDistanceMs, ((1000 / 20) - 1 + 9 + 11) * 20);
+ // +1 for each since the last rendered frame is considered frozen
+ ASSERT_EQ(e.details.durationMs.size(), 4);
+ EXPECT_EQ(e.details.durationMs[0], 4 * 20);
+ EXPECT_EQ(e.details.durationMs[1], 2 * 20);
+ EXPECT_EQ(e.details.durationMs[2], 7 * 20);
+ EXPECT_EQ(e.details.durationMs[3], 11 * 20);
+ // -1 for each since the last rendered frame is considered frozen
+ ASSERT_EQ(e.details.distanceMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs[0], -1);
+ EXPECT_EQ(e.details.distanceMs[1], 1000 - 20);
+ EXPECT_EQ(e.details.distanceMs[2], 9 * 20);
+ EXPECT_EQ(e.details.distanceMs[3], 11 * 20);
+ int64_t previousEventEndTimeUs = e.initialTimeUs + e.durationMs * 1000;
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(4);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 2);
+ e = h.getAndClearFreezeEvent();
+ EXPECT_EQ(e.valid, true);
+ // 1000ms tolerance means 1000ms from the end of the last event to the beginning of this event
+ EXPECT_EQ(e.initialTimeUs, previousEventEndTimeUs + 1000 * 1000);
+ EXPECT_EQ(e.count, 5);
+ // 5 freezes captured in the freeze event, but only 4 details are recorded
+ EXPECT_EQ(e.details.durationMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs.size(), 4);
+ EXPECT_EQ(e.details.distanceMs[0], 1000); // same as the tolerance
+ // The duration across the entire series f freezes is captured, with only 4 details captured
+ // +1 because the first rendered frame is considered frozen (not the 1st dropped frame)
+ EXPECT_EQ(e.durationMs, (1 + 1 + 4 + 1 + 4 + 1 + 4 + 1 + 4 + 1) * 20);
+ // The duration of all 5 freeze events are captured, with only 4 details captured
+ EXPECT_EQ(e.sumDurationMs, (2 + 2 + 2 + 2 + 2) * 20);
+ // The distance of all 5 freeze events are captured, with only 4 details captured
+ EXPECT_EQ(e.sumDistanceMs, (3 + 3 + 3 + 3) * 20);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 3);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 4);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 5);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+ h.drop(1);
+ h.render(1000 / 20 + 1);
+ // The 6th event isn't captured because it exceeds the configured limit
+ EXPECT_EQ(h.getMetrics().freezeEventCount, 6);
+ EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
+ Configuration c;
+ c.judderEventMax = 4;
+ c.judderEventDetailsMax = 3;
+ c.judderEventDistanceToleranceMs = 100;
+ Helper h(20, c);
+ h.render({19, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({15, 19, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({28, 20, 19});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ h.render({13, 20, 20, 20, 20});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+ // Start with judder for the next event at the end of the sequence, because judder is scored
+ // one frame behind, and for combining judder occurrences into events, it's not clear yet if
+ // the current frame has judder or not.
+ h.render({15, 20, 20, 20, 20, 20, 15});
+ JudderEvent e = h.getAndClearJudderEvent();
+ EXPECT_EQ(e.valid, true);
+ EXPECT_EQ(e.initialTimeUs, (19 + 20 + 19) * 1000);
+ EXPECT_EQ(e.durationMs, 15 + 19 + 20 + 19 /**/ + 28 + 20 + 19 /**/ + 13 + 20 * 4 /**/ + 15);
+ EXPECT_EQ(e.count, 4);
+ EXPECT_EQ(e.sumScore, (20 - 15) + (28 - 20) + (20 - 13) + (20 - 15));
+ EXPECT_EQ(e.sumDistanceMs, 19 + 20 + 19 /**/ + 20 + 19 /**/ + 20 * 4);
+ ASSERT_EQ(e.details.actualRenderDurationUs.size(), 3); // 3 details per configured maximum
+ EXPECT_EQ(e.details.actualRenderDurationUs[0], 15 * 1000);
+ EXPECT_EQ(e.details.actualRenderDurationUs[1], 28 * 1000);
+ EXPECT_EQ(e.details.actualRenderDurationUs[2], 13 * 1000);
+ ASSERT_EQ(e.details.contentRenderDurationUs.size(), 3);
+ EXPECT_EQ(e.details.contentRenderDurationUs[0], 20 * 1000);
+ EXPECT_EQ(e.details.contentRenderDurationUs[1], 20 * 1000);
+ EXPECT_EQ(e.details.contentRenderDurationUs[2], 20 * 1000);
+ ASSERT_EQ(e.details.distanceMs.size(), 3);
+ EXPECT_EQ(e.details.distanceMs[0], -1);
+ EXPECT_EQ(e.details.distanceMs[1], 19 + 20 + 19);
+ EXPECT_EQ(e.details.distanceMs[2], 20 + 19);
+ h.render({20, 20, 20, 20, 20, 15});
+ e = h.getAndClearJudderEvent();
+ EXPECT_EQ(e.valid, true);
+ ASSERT_EQ(e.details.distanceMs.size(), 1);
+ EXPECT_EQ(e.details.distanceMs[0], 100); // same as the tolerance
+ h.render({20, 20, 20, 20, 20, 15});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+ h.render({20, 20, 20, 20, 20, 15});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+ h.render({20, 20, 20, 20, 20, 20});
+ EXPECT_EQ(h.getAndClearJudderEvent().valid, false); // max number of judder events exceeded
+}
+
} // android
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 814a327..76270d3 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -829,7 +829,7 @@
/**
* Query the dataspace of the input {@link AImage}.
*
- * Available since API level 33.
+ * Available since API level 34.
*
* @param image the {@link AImage} of interest.
* @param dataSpace the dataspace of the image will be filled here if the method call succeeds.
@@ -843,7 +843,7 @@
* image has been deleted.</li></ul>
*/
media_status_t AImage_getDataSpace(const AImage* image,
- /*out*/int32_t* dataSpace) __INTRODUCED_IN(33);
+ /*out*/int32_t* dataSpace) __INTRODUCED_IN(34);
__END_DECLS
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 992955b..b6dcaae 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -395,7 +395,7 @@
* the combination of {@code hardwareBufferFormat} and {@code dataSpace} for the
* format of the Image that the reader will produce.</p>
*
- * Available since API level 33.
+ * Available since API level 34.
*
* @param width The default width in pixels of the Images that this reader will produce.
* @param height The default height in pixels of the Images that this reader will produce.
@@ -422,7 +422,7 @@
*/
media_status_t AImageReader_newWithDataSpace(int32_t width, int32_t height, uint64_t usage,
int32_t maxImages, uint32_t hardwareBufferFormat, int32_t dataSpace,
- /*out*/ AImageReader** reader) __INTRODUCED_IN(33);
+ /*out*/ AImageReader** reader) __INTRODUCED_IN(34);
/**
* Acquire the next {@link AImage} from the image reader's queue asynchronously.
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4dd81ab..4f045fd 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,13 +13,13 @@
AImageReader_getWindow; # introduced=24
AImageReader_new; # introduced=24
AImageReader_newWithUsage; # introduced=26
- AImageReader_newWithDataSpace; # introduced=Tiramisu
+ AImageReader_newWithDataSpace; # introduced=UpsideDownCake
AImageReader_setBufferRemovedListener; # introduced=26
AImageReader_setImageListener; # introduced=24
AImage_delete; # introduced=24
AImage_deleteAsync; # introduced=26
AImage_getCropRect; # introduced=24
- AImage_getDataSpace; # introduced=Tiramisu
+ AImage_getDataSpace; # introduced=UpsideDownCake
AImage_getFormat; # introduced=24
AImage_getHardwareBuffer; # introduced=26
AImage_getHeight; # introduced=24
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 293117f..84b70c6 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4110,8 +4110,9 @@
// signal actual start of output stream when the render position reported by the kernel
// starts moving.
- if (!mStandby && !mHalStarted && mKernelPositionOnStandby !=
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]) {
+ if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+ && (mKernelPositionOnStandby
+ != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
mHalStarted = true;
mWaitHalStartCV.broadcast();
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index e652546..694aff3 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,7 +120,7 @@
camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
@@ -153,7 +153,7 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
- ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ colorSpace,
useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
@@ -196,7 +196,7 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
- ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ colorSpace,
useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index f1fc815..6e10f30 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -145,7 +145,6 @@
res = TClientBase::startCameraOps();
if (res != OK) {
TClientBase::finishCameraOps();
- mDevice.clear();
return res;
}
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index adb2217..af1372b 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,6 +524,8 @@
"audiotrack",
// other media
"codec",
+ "freeze",
+ "judder",
"extractor",
"mediadrm",
"mediaparser",
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index ad4cfce..ea76bcd 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -648,6 +648,12 @@
}
AStatsEvent_writeInt32(event, resolutionChangeCount);
+ int32_t componentColorFormat = -1;
+ if (item->getInt32("android.media.mediacodec.component-color-format", &componentColorFormat)) {
+ metrics_proto.set_component_color_format(componentColorFormat);
+ }
+ AStatsEvent_writeInt32(event, componentColorFormat);
+
int64_t firstRenderTimeUs = -1;
item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
int64_t framesReleased = -1;