Video frame scheduler using public APIs
Bug: 112555500
Test: MediaPlayer2Test
Change-Id: Ie4b21964c761b4350b50b23271b28cf9dcd933ad
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
index 9d9e179..e3c9b4b 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/MediaClock.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
-#include <media/stagefright/VideoFrameScheduler.h>
+#include <media/stagefright/VideoFrameScheduler2.h>
#include <media/MediaCodecBuffer.h>
#include <inttypes.h>
@@ -1436,7 +1436,7 @@
if (mHasVideo) {
if (mVideoScheduler == NULL) {
- mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler = new VideoFrameScheduler2();
mVideoScheduler->init();
}
}
@@ -1779,7 +1779,7 @@
void NuPlayer2::Renderer::onSetVideoFrameRate(float fps) {
if (mVideoScheduler == NULL) {
- mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler = new VideoFrameScheduler2();
}
mVideoScheduler->init(fps);
}
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
index 305af68..484d9b7 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
@@ -28,7 +28,7 @@
class JWakeLock;
struct MediaClock;
class MediaCodecBuffer;
-struct VideoFrameScheduler;
+struct VideoFrameSchedulerBase;
struct NuPlayer2::Renderer : public AHandler {
enum Flags {
@@ -156,7 +156,7 @@
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
- sp<VideoFrameScheduler> mVideoScheduler;
+ sp<VideoFrameSchedulerBase> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index a047975..a521f62 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -28,7 +28,7 @@
class AWakeLock;
struct MediaClock;
class MediaCodecBuffer;
-struct VideoFrameScheduler;
+struct VideoFrameSchedulerBase;
struct NuPlayer::Renderer : public AHandler {
enum Flags {
@@ -156,7 +156,7 @@
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
- sp<VideoFrameScheduler> mVideoScheduler;
+ sp<VideoFrameSchedulerBase> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 9aea88a..ae9d0c0 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -133,6 +133,7 @@
"SurfaceUtils.cpp",
"Utils.cpp",
"ThrottledSource.cpp",
+ "VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
],
@@ -237,7 +238,8 @@
"MediaClock.cpp",
"NdkUtils.cpp",
"Utils.cpp",
- "VideoFrameScheduler.cpp",
+ "VideoFrameSchedulerBase.cpp",
+ "VideoFrameScheduler2.cpp",
"http/ClearMediaHTTP.cpp",
],
@@ -247,10 +249,12 @@
"libnetd_client",
"libutils",
"libstagefright_foundation",
+ "libandroid",
],
static_libs: [
"libmedia_player2_util",
+ "libmedia2_jni_core",
],
export_include_dirs: [
diff --git a/media/libstagefright/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
index 9020fc1..4e5b5e2 100644
--- a/media/libstagefright/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014 The Android Open Source Project
+ * Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,8 +19,7 @@
#include <utils/Log.h>
#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Trace.h>
-
-#include <sys/time.h>
+#include <utils/String16.h>
#include <binder/IServiceManager.h>
#include <gui/ISurfaceComposer.h>
@@ -32,321 +31,14 @@
namespace android {
-static const nsecs_t kNanosIn1s = 1000000000;
-
-template<class T>
-static int compare(const T *lhs, const T *rhs) {
- if (*lhs < *rhs) {
- return -1;
- } else if (*lhs > *rhs) {
- return 1;
- } else {
- return 0;
- }
-}
-
-/* ======================================================================= */
-/* PLL */
-/* ======================================================================= */
-
-static const size_t kMinSamplesToStartPrime = 3;
-static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
-static const size_t kMinSamplesToEstimatePeriod = 3;
-static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
-
-static const size_t kPrecision = 12;
-static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
-static const int64_t kMultiplesThresholdDiv = 4; // 25%
-static const int64_t kReFitThresholdDiv = 100; // 1%
-static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
-static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz
-static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
-
-VideoFrameScheduler::PLL::PLL()
- : mPeriod(-1),
- mPhase(0),
- mPrimed(false),
- mSamplesUsedForPriming(0),
- mLastTime(-1),
- mNumSamples(0) {
-}
-
-void VideoFrameScheduler::PLL::reset(float fps) {
- //test();
-
- mSamplesUsedForPriming = 0;
- mLastTime = -1;
-
- // set up or reset video PLL
- if (fps <= 0.f) {
- mPeriod = -1;
- mPrimed = false;
- } else {
- ALOGV("reset at %.1f fps", fps);
- mPeriod = (nsecs_t)(1e9 / fps + 0.5);
- mPrimed = true;
- }
-
- restart();
-}
-
-// reset PLL but keep previous period estimate
-void VideoFrameScheduler::PLL::restart() {
- mNumSamples = 0;
- mPhase = -1;
-}
-
-#if 0
-
-void VideoFrameScheduler::PLL::test() {
- nsecs_t period = kNanosIn1s / 60;
- mTimes[0] = 0;
- mTimes[1] = period;
- mTimes[2] = period * 3;
- mTimes[3] = period * 4;
- mTimes[4] = period * 7;
- mTimes[5] = period * 8;
- mTimes[6] = period * 10;
- mTimes[7] = period * 12;
- mNumSamples = 8;
- int64_t a, b, err;
- fit(0, period * 12 / 7, 8, &a, &b, &err);
- // a = 0.8(5)+
- // b = -0.14097(2)+
- // err = 0.2750578(703)+
- ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
- (long long)a, (a / (float)(1 << kPrecision)),
- (long long)b, (b / (float)(1 << kPrecision)),
- (long long)err, (err / (float)(1 << (kPrecision * 2))));
-}
-
-#endif
-
-bool VideoFrameScheduler::PLL::fit(
- nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
- int64_t *a, int64_t *b, int64_t *err) {
- if (numSamplesToUse > mNumSamples) {
- numSamplesToUse = mNumSamples;
- }
-
- if ((period >> kPrecision) == 0 ) {
- ALOGW("Period is 0, or after including precision is 0 - would cause div0, returning");
- return false;
- }
-
- int64_t sumX = 0;
- int64_t sumXX = 0;
- int64_t sumXY = 0;
- int64_t sumYY = 0;
- int64_t sumY = 0;
-
- int64_t x = 0; // x usually is in [0..numSamplesToUse)
- nsecs_t lastTime;
- for (size_t i = 0; i < numSamplesToUse; i++) {
- size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
- nsecs_t time = mTimes[ix];
- if (i > 0) {
- x += divRound(time - lastTime, period);
- }
- // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
- // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
- // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
- // while we are not refitting.
- int64_t y = divRound(time - phase, period >> kPrecision);
- sumX += x;
- sumY += y;
- sumXX += x * x;
- sumXY += x * y;
- sumYY += y * y;
- lastTime = time;
- }
-
- int64_t div = (int64_t)numSamplesToUse * sumXX - sumX * sumX;
- if (div == 0) {
- return false;
- }
-
- int64_t a_nom = (int64_t)numSamplesToUse * sumXY - sumX * sumY;
- int64_t b_nom = sumXX * sumY - sumX * sumXY;
- *a = divRound(a_nom, div);
- *b = divRound(b_nom, div);
- // don't use a and b directly as the rounding error is significant
- *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
- ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
- numSamplesToUse,
- (long long)*a, (*a / (float)(1 << kPrecision)),
- (long long)*b, (*b / (float)(1 << kPrecision)),
- (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
- return true;
-}
-
-void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
- if (numSamplesToUse > mNumSamples) {
- numSamplesToUse = mNumSamples;
- }
- CHECK(numSamplesToUse >= 3); // must have at least 3 samples
-
- // estimate video framerate from deltas between timestamps, and
- // 2nd order deltas
- Vector<nsecs_t> deltas;
- nsecs_t lastTime, firstTime;
- for (size_t i = 0; i < numSamplesToUse; ++i) {
- size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
- nsecs_t time = mTimes[index];
- if (i > 0) {
- if (time - lastTime > kMinPeriod) {
- //ALOGV("delta: %lld", (long long)(time - lastTime));
- deltas.push(time - lastTime);
- }
- } else {
- firstTime = time;
- }
- lastTime = time;
- }
- deltas.sort(compare<nsecs_t>);
- size_t numDeltas = deltas.size();
- if (numDeltas > 1) {
- nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
- nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
- for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
- if (deltas[i] > deltaMaxLimit) {
- deltas.resize(i);
- numDeltas = i;
- break;
- }
- }
- for (size_t i = 1; i < numDeltas; ++i) {
- nsecs_t delta2nd = deltas[i] - deltas[i - 1];
- if (delta2nd >= deltaMinLimit) {
- //ALOGV("delta2: %lld", (long long)(delta2nd));
- deltas.push(delta2nd);
- }
- }
- }
-
- // use the one that yields the best match
- int64_t bestScore;
- for (size_t i = 0; i < deltas.size(); ++i) {
- nsecs_t delta = deltas[i];
- int64_t score = 0;
-#if 1
- // simplest score: number of deltas that are near multiples
- size_t matches = 0;
- for (size_t j = 0; j < deltas.size(); ++j) {
- nsecs_t err = periodicError(deltas[j], delta);
- if (err < delta / kMultiplesThresholdDiv) {
- ++matches;
- }
- }
- score = matches;
-#if 0
- // could be weighed by the (1 - normalized error)
- if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
- int64_t a, b, err;
- fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
- err = (1 << (2 * kPrecision)) - err;
- score *= max(0, err);
- }
-#endif
-#else
- // or use the error as a negative score
- if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
- int64_t a, b, err;
- fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
- score = -delta * err;
- }
-#endif
- if (i == 0 || score > bestScore) {
- bestScore = score;
- mPeriod = delta;
- mPhase = firstTime;
- }
- }
- ALOGV("priming[%zu] phase:%lld period:%lld",
- numSamplesToUse, (long long)mPhase, (long long)mPeriod);
-}
-
-nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
- if (mLastTime >= 0
- // if time goes backward, or we skipped rendering
- && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
- restart();
- }
-
- mLastTime = time;
- mTimes[mNumSamples % kHistorySize] = time;
- ++mNumSamples;
-
- bool doFit = time > mRefitAt;
- if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
- prime(kMinSamplesToStopPrime);
- ++mSamplesUsedForPriming;
- doFit = true;
- }
- if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
- if (mPhase < 0) {
- // initialize phase to the current render time
- mPhase = time;
- doFit = true;
- } else if (!doFit) {
- int64_t err = periodicError(time - mPhase, mPeriod);
- doFit = err > mPeriod / kReFitThresholdDiv;
- }
-
- if (doFit) {
- int64_t a, b, err;
- if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
- // samples are not suitable for fitting. this means they are
- // also not suitable for priming.
- ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
- return mPeriod;
- }
-
- mRefitAt = time + kRefitRefreshPeriod;
-
- mPhase += (mPeriod * b) >> kPrecision;
- mPeriod = (mPeriod * a) >> kPrecision;
- ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
-
- if (err < kErrorThreshold) {
- if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
- mPrimed = true;
- }
- } else {
- mPrimed = false;
- mSamplesUsedForPriming = 0;
- }
- }
- }
- return mPeriod;
-}
-
-nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
- return mPrimed ? mPeriod : 0;
-}
-
-/* ======================================================================= */
-/* Frame Scheduler */
-/* ======================================================================= */
-
-static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
-static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
-
-VideoFrameScheduler::VideoFrameScheduler()
- : mVsyncTime(0),
- mVsyncPeriod(0),
- mVsyncRefreshAt(0),
- mLastVsyncTime(-1),
- mTimeCorrection(0) {
+VideoFrameScheduler::VideoFrameScheduler() : VideoFrameSchedulerBase() {
}
void VideoFrameScheduler::updateVsync() {
mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
- mVsyncPeriod = 0;
mVsyncTime = 0;
+ mVsyncPeriod = 0;
- // TODO: schedule frames for the destination surface
- // For now, surface flinger only schedules frames on the primary display
if (mComposer == NULL) {
String16 name("SurfaceFlinger");
sp<IServiceManager> sm = defaultServiceManager();
@@ -368,136 +60,6 @@
}
}
-void VideoFrameScheduler::init(float videoFps) {
- updateVsync();
-
- mLastVsyncTime = -1;
- mTimeCorrection = 0;
-
- mPll.reset(videoFps);
-}
-
-void VideoFrameScheduler::restart() {
- mLastVsyncTime = -1;
- mTimeCorrection = 0;
-
- mPll.restart();
-}
-
-nsecs_t VideoFrameScheduler::getVsyncPeriod() {
- if (mVsyncPeriod > 0) {
- return mVsyncPeriod;
- }
- return kDefaultVsyncPeriod;
-}
-
-float VideoFrameScheduler::getFrameRate() {
- nsecs_t videoPeriod = mPll.getPeriod();
- if (videoPeriod > 0) {
- return 1e9 / videoPeriod;
- }
- return 0.f;
-}
-
-nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
- nsecs_t origRenderTime = renderTime;
-
- nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
- if (now >= mVsyncRefreshAt) {
- updateVsync();
- }
-
- // without VSYNC info, there is nothing to do
- if (mVsyncPeriod == 0) {
- ALOGV("no vsync: render=%lld", (long long)renderTime);
- return renderTime;
- }
-
- // ensure vsync time is well before (corrected) render time
- if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
- mVsyncTime -=
- ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
- }
-
- // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
- // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
- renderTime -= mVsyncPeriod / 2;
-
- const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
- if (videoPeriod > 0) {
- // Smooth out rendering
- size_t N = 12;
- nsecs_t fiveSixthDev =
- abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
- / (mVsyncPeriod / 100);
- // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
- if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
- N = 20;
- }
-
- nsecs_t offset = 0;
- nsecs_t edgeRemainder = 0;
- for (size_t i = 1; i <= N; i++) {
- offset +=
- (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
- edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
- }
- mTimeCorrection += mVsyncPeriod / 2 - offset / (nsecs_t)N;
- renderTime += mTimeCorrection;
- nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
- edgeRemainder = abs(edgeRemainder / (nsecs_t)N - mVsyncPeriod / 2);
- if (edgeRemainder <= mVsyncPeriod / 3) {
- correctionLimit /= 2;
- }
-
- // estimate how many VSYNCs a frame will spend on the display
- nsecs_t nextVsyncTime =
- renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
- if (mLastVsyncTime >= 0) {
- size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
- size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
- bool vsyncsPerFrameAreNearlyConstant =
- periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
-
- if (mTimeCorrection > correctionLimit &&
- (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
- // remove a VSYNC
- mTimeCorrection -= mVsyncPeriod / 2;
- renderTime -= mVsyncPeriod / 2;
- nextVsyncTime -= mVsyncPeriod;
- if (vsyncsForLastFrame > 0)
- --vsyncsForLastFrame;
- } else if (mTimeCorrection < -correctionLimit &&
- (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
- // add a VSYNC
- mTimeCorrection += mVsyncPeriod / 2;
- renderTime += mVsyncPeriod / 2;
- nextVsyncTime += mVsyncPeriod;
- if (vsyncsForLastFrame < ULONG_MAX)
- ++vsyncsForLastFrame;
- } else if (mTimeCorrection < -correctionLimit * 2
- || mTimeCorrection > correctionLimit * 2) {
- ALOGW("correction beyond limit: %lld vs %lld (vsyncs for last frame: %zu, min: %zu)"
- " restarting. render=%lld",
- (long long)mTimeCorrection, (long long)correctionLimit,
- vsyncsForLastFrame, minVsyncsPerFrame, (long long)origRenderTime);
- restart();
- return origRenderTime;
- }
-
- ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
- }
- mLastVsyncTime = nextVsyncTime;
- }
-
- // align rendertime to the center between VSYNC edges
- renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
- renderTime += mVsyncPeriod / 2;
- ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
- ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
- return renderTime;
-}
-
void VideoFrameScheduler::release() {
mComposer.clear();
}
@@ -507,4 +69,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/VideoFrameScheduler2.cpp b/media/libstagefright/VideoFrameScheduler2.cpp
new file mode 100644
index 0000000..e02ae7d
--- /dev/null
+++ b/media/libstagefright/VideoFrameScheduler2.cpp
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFrameScheduler2"
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/Trace.h>
+
+#include <algorithm>
+#include <jni.h>
+#include <math.h>
+
+#include <android/choreographer.h>
+#include <android/looper.h>
+#include <media/stagefright/VideoFrameScheduler2.h>
+#include <mediaplayer2/JavaVMHelper.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+static void getVsyncOffset(long* appVsyncOffsetPtr, long* sfVsyncOffsetPtr);
+
+/* ======================================================================= */
+/* VsyncTracker */
+/* ======================================================================= */
+
+class VsyncTracker : public RefBase{
+public:
+ VsyncTracker();
+ ~VsyncTracker() {}
+ long getVsyncPeriod();
+ long getVsyncTime(long periodOffset);
+ void addSample(long timestamp);
+
+private:
+ static const int kMaxSamples = 32;
+ static const int kMinSamplesForUpdate = 6;
+ int mNumSamples;
+ int mFirstSample;
+ long mReferenceTime;
+ long mPhase;
+ long mPeriod;
+ long mTimestampSamples[kMaxSamples];
+ Mutex mLock;
+
+ void updateModelLocked();
+};
+
+VsyncTracker::VsyncTracker()
+ : mNumSamples(0),
+ mFirstSample(0),
+ mReferenceTime(0),
+ mPhase(0),
+ mPeriod(0) {
+ for (int i = 0; i < kMaxSamples; i++) {
+ mTimestampSamples[i] = 0;
+ }
+}
+
+long VsyncTracker::getVsyncPeriod() {
+ Mutex::Autolock dataLock(mLock);
+ return mPeriod;
+}
+
+long VsyncTracker::getVsyncTime(long periodOffset) {
+ Mutex::Autolock dataLock(mLock);
+ const long now = systemTime();
+ long phase = mReferenceTime + mPhase;
+ return (((now - phase) / mPeriod) + periodOffset + 1) * mPeriod + phase;
+}
+
+void VsyncTracker::addSample(long timestamp) {
+ Mutex::Autolock dataLock(mLock);
+ if (mNumSamples == 0) {
+ mPhase = 0;
+ mReferenceTime = timestamp;
+ }
+ int idx = (mFirstSample + mNumSamples) % kMaxSamples;
+ mTimestampSamples[idx] = timestamp;
+ if (mNumSamples < kMaxSamples) {
+ mNumSamples++;
+ } else {
+ mFirstSample = (mFirstSample + 1) % kMaxSamples;
+ }
+ updateModelLocked();
+}
+
+void VsyncTracker::updateModelLocked() {
+ if (mNumSamples < kMinSamplesForUpdate) {
+ return;
+ }
+ long durationSum = 0;
+ long minDuration = LONG_MAX;
+ long maxDuration = 0;
+
+ for (int i = 1; i < mNumSamples; i++) {
+ int idx = (mFirstSample + i) % kMaxSamples;
+ int prev = (idx + kMaxSamples - 1) % kMaxSamples;
+ long duration = mTimestampSamples[idx] - mTimestampSamples[prev];
+ durationSum += duration;
+ minDuration = min(minDuration, duration);
+ maxDuration = max(maxDuration, duration);
+ }
+
+ durationSum -= (minDuration + maxDuration);
+ mPeriod = durationSum / (mNumSamples - 3);
+
+ double sampleAvgX = 0.0;
+ double sampleAvgY = 0.0;
+ double scale = 2.0 * M_PI / (double) mPeriod;
+
+ for (int i = 1; i < mNumSamples; i++) {
+ int idx = (mFirstSample + i) % kMaxSamples;
+ long sample = mTimestampSamples[idx] - mReferenceTime;
+ double samplePhase = (double) (sample % mPeriod) * scale;
+ sampleAvgX += cos(samplePhase);
+ sampleAvgY += sin(samplePhase);
+ }
+
+ sampleAvgX /= (double) mNumSamples - 1.0;
+ sampleAvgY /= (double) mNumSamples - 1.0;
+ mPhase = (long) (atan2(sampleAvgY, sampleAvgX) / scale);
+}
+
+static void frameCallback(long frameTimeNanos, void* data) {
+ if (data == NULL) {
+ return;
+ }
+ sp<VsyncTracker> vsyncTracker(static_cast<VsyncTracker*>(data));
+ vsyncTracker->addSample(frameTimeNanos);
+ AChoreographer_postFrameCallback(AChoreographer_getInstance(),
+ frameCallback, static_cast<void*>(vsyncTracker.get()));
+}
+
+/* ======================================================================= */
+/* JNI */
+/* ======================================================================= */
+
+static void getVsyncOffset(long* appVsyncOffsetPtr, long* sfVsyncOffsetPtr) {
+ static const long kOneMillisecInNanosec = 1000000;
+ static const long kOneSecInNanosec = kOneMillisecInNanosec * 1000;
+
+ JNIEnv *env = JavaVMHelper::getJNIEnv();
+ jclass jDisplayManagerGlobalCls = env->FindClass(
+ "android/hardware/display/DisplayManagerGlobal");
+ jclass jDisplayCls = env->FindClass("android/view/Display");
+
+ jmethodID jGetInstance = env->GetStaticMethodID(jDisplayManagerGlobalCls,
+ "getInstance", "()Landroid/hardware/display/DisplayManagerGlobal;");
+ jobject javaDisplayManagerGlobalObj = env->CallStaticObjectMethod(
+ jDisplayManagerGlobalCls, jGetInstance);
+
+ jfieldID jDEFAULT_DISPLAY = env->GetStaticFieldID(jDisplayCls, "DEFAULT_DISPLAY", "I");
+ jint DEFAULT_DISPLAY = env->GetStaticIntField(jDisplayCls, jDEFAULT_DISPLAY);
+
+ jmethodID jgetRealDisplay = env->GetMethodID(jDisplayManagerGlobalCls,
+ "getRealDisplay", "(I)Landroid/view/Display;");
+ jobject javaDisplayObj = env->CallObjectMethod(
+ javaDisplayManagerGlobalObj, jgetRealDisplay, DEFAULT_DISPLAY);
+
+ jmethodID jGetRefreshRate = env->GetMethodID(jDisplayCls, "getRefreshRate", "()F");
+ jfloat javaRefreshRate = env->CallFloatMethod(javaDisplayObj, jGetRefreshRate);
+ long vsyncPeriod = (long) (kOneSecInNanosec / (float) javaRefreshRate);
+
+ jmethodID jGetAppVsyncOffsetNanos = env->GetMethodID(
+ jDisplayCls, "getAppVsyncOffsetNanos", "()J");
+ jlong javaAppVsyncOffset = env->CallLongMethod(javaDisplayObj, jGetAppVsyncOffsetNanos);
+ *appVsyncOffsetPtr = (long) javaAppVsyncOffset;
+
+ jmethodID jGetPresentationDeadlineNanos = env->GetMethodID(
+ jDisplayCls, "getPresentationDeadlineNanos", "()J");
+ jlong javaPresentationDeadline = env->CallLongMethod(
+ javaDisplayObj, jGetPresentationDeadlineNanos);
+
+ *sfVsyncOffsetPtr = vsyncPeriod - ((long) javaPresentationDeadline - kOneMillisecInNanosec);
+}
+
+/* ======================================================================= */
+/* Choreographer Thread */
+/* ======================================================================= */
+
+struct ChoreographerThread : public Thread {
+ ChoreographerThread(bool canCallJava);
+ status_t init(void* data);
+ virtual status_t readyToRun() override;
+ virtual bool threadLoop() override;
+
+protected:
+ virtual ~ChoreographerThread() {}
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(ChoreographerThread);
+ void* mData;
+};
+
+ChoreographerThread::ChoreographerThread(bool canCallJava) : Thread(canCallJava) {
+}
+
+status_t ChoreographerThread::init(void* data) {
+ if (data == NULL) {
+ return NO_INIT;
+ }
+ mData = data;
+ return OK;
+}
+
+status_t ChoreographerThread::readyToRun() {
+ ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
+ if (AChoreographer_getInstance() == NULL) {
+ return NO_INIT;
+ }
+ AChoreographer_postFrameCallback(AChoreographer_getInstance(), frameCallback, mData);
+ return OK;
+}
+
+bool ChoreographerThread::threadLoop() {
+ ALooper_pollOnce(-1, nullptr, nullptr, nullptr);
+ return true;
+}
+
+/* ======================================================================= */
+/* Frame Scheduler */
+/* ======================================================================= */
+
+VideoFrameScheduler2::VideoFrameScheduler2() : VideoFrameSchedulerBase() {
+
+ getVsyncOffset(&mAppVsyncOffset, &mSfVsyncOffset);
+
+ Mutex::Autolock threadLock(mLock);
+ mChoreographerThread = new ChoreographerThread(true);
+
+ mVsyncTracker = new VsyncTracker();
+ if (mChoreographerThread->init(static_cast<void*>(mVsyncTracker.get())) != OK) {
+ mChoreographerThread.clear();
+ }
+ if (mChoreographerThread != NULL && mChoreographerThread->run("Choreographer") != OK) {
+ mChoreographerThread.clear();
+ }
+}
+
+void VideoFrameScheduler2::updateVsync() {
+ mVsyncTime = 0;
+ mVsyncPeriod = 0;
+
+ if (mVsyncTracker != NULL) {
+ mVsyncPeriod = mVsyncTracker->getVsyncPeriod();
+ mVsyncTime = mVsyncTracker->getVsyncTime(mSfVsyncOffset - mAppVsyncOffset);
+ }
+ mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
+}
+
+void VideoFrameScheduler2::release() {
+ // Do not change order
+ {
+ Mutex::Autolock threadLock(mLock);
+ mChoreographerThread->requestExitAndWait();
+ mChoreographerThread.clear();
+ }
+
+ mVsyncTracker.clear();
+}
+
+VideoFrameScheduler2::~VideoFrameScheduler2() {
+ release();
+}
+
+} // namespace android
diff --git a/media/libstagefright/VideoFrameSchedulerBase.cpp b/media/libstagefright/VideoFrameSchedulerBase.cpp
new file mode 100644
index 0000000..77107ff
--- /dev/null
+++ b/media/libstagefright/VideoFrameSchedulerBase.cpp
@@ -0,0 +1,465 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFrameSchedulerBase"
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
+#include <utils/Vector.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/VideoFrameSchedulerBase.h>
+
+namespace android {
+
+template<class T>
+static int compare(const T *lhs, const T *rhs) {
+ if (*lhs < *rhs) {
+ return -1;
+ } else if (*lhs > *rhs) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
+/* ======================================================================= */
+/* PLL */
+/* ======================================================================= */
+
+static const size_t kMinSamplesToStartPrime = 3;
+static const size_t kMinSamplesToStopPrime = VideoFrameSchedulerBase::kHistorySize;
+static const size_t kMinSamplesToEstimatePeriod = 3;
+static const size_t kMaxSamplesToEstimatePeriod = VideoFrameSchedulerBase::kHistorySize;
+
+static const size_t kPrecision = 12;
+static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
+static const int64_t kMultiplesThresholdDiv = 4; // 25%
+static const int64_t kReFitThresholdDiv = 100; // 1%
+static const nsecs_t kMaxAllowedFrameSkip = VideoFrameSchedulerBase::kNanosIn1s; // 1 sec
+static const nsecs_t kMinPeriod = VideoFrameSchedulerBase::kNanosIn1s / 120; // 120Hz
+static const nsecs_t kRefitRefreshPeriod = 10 * VideoFrameSchedulerBase::kNanosIn1s; // 10 sec
+
+VideoFrameSchedulerBase::PLL::PLL()
+ : mPeriod(-1),
+ mPhase(0),
+ mPrimed(false),
+ mSamplesUsedForPriming(0),
+ mLastTime(-1),
+ mNumSamples(0) {
+}
+
+void VideoFrameSchedulerBase::PLL::reset(float fps) {
+ //test();
+
+ mSamplesUsedForPriming = 0;
+ mLastTime = -1;
+
+ // set up or reset video PLL
+ if (fps <= 0.f) {
+ mPeriod = -1;
+ mPrimed = false;
+ } else {
+ ALOGV("reset at %.1f fps", fps);
+ mPeriod = (nsecs_t)(1e9 / fps + 0.5);
+ mPrimed = true;
+ }
+
+ restart();
+}
+
+// reset PLL but keep previous period estimate
+void VideoFrameSchedulerBase::PLL::restart() {
+ mNumSamples = 0;
+ mPhase = -1;
+}
+
+#if 0
+
+void VideoFrameSchedulerBase::PLL::test() {
+ nsecs_t period = VideoFrameSchedulerBase::kNanosIn1s / 60;
+ mTimes[0] = 0;
+ mTimes[1] = period;
+ mTimes[2] = period * 3;
+ mTimes[3] = period * 4;
+ mTimes[4] = period * 7;
+ mTimes[5] = period * 8;
+ mTimes[6] = period * 10;
+ mTimes[7] = period * 12;
+ mNumSamples = 8;
+ int64_t a, b, err;
+ fit(0, period * 12 / 7, 8, &a, &b, &err);
+ // a = 0.8(5)+
+ // b = -0.14097(2)+
+ // err = 0.2750578(703)+
+ ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ (long long)a, (a / (float)(1 << kPrecision)),
+ (long long)b, (b / (float)(1 << kPrecision)),
+ (long long)err, (err / (float)(1 << (kPrecision * 2))));
+}
+
+#endif
+
+bool VideoFrameSchedulerBase::PLL::fit(
+ nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
+ int64_t *a, int64_t *b, int64_t *err) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+
+ if ((period >> kPrecision) == 0 ) {
+ ALOGW("Period is 0, or after including precision is 0 - would cause div0, returning");
+ return false;
+ }
+
+ int64_t sumX = 0;
+ int64_t sumXX = 0;
+ int64_t sumXY = 0;
+ int64_t sumYY = 0;
+ int64_t sumY = 0;
+
+ int64_t x = 0; // x usually is in [0..numSamplesToUse)
+ nsecs_t lastTime;
+ for (size_t i = 0; i < numSamplesToUse; i++) {
+ size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[ix];
+ if (i > 0) {
+ x += divRound(time - lastTime, period);
+ }
+ // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
+ // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
+ // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
+ // while we are not refitting.
+ int64_t y = divRound(time - phase, period >> kPrecision);
+ sumX += x;
+ sumY += y;
+ sumXX += x * x;
+ sumXY += x * y;
+ sumYY += y * y;
+ lastTime = time;
+ }
+
+ int64_t div = (int64_t)numSamplesToUse * sumXX - sumX * sumX;
+ if (div == 0) {
+ return false;
+ }
+
+ int64_t a_nom = (int64_t)numSamplesToUse * sumXY - sumX * sumY;
+ int64_t b_nom = sumXX * sumY - sumX * sumXY;
+ *a = divRound(a_nom, div);
+ *b = divRound(b_nom, div);
+ // don't use a and b directly as the rounding error is significant
+ *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
+ ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ numSamplesToUse,
+ (long long)*a, (*a / (float)(1 << kPrecision)),
+ (long long)*b, (*b / (float)(1 << kPrecision)),
+ (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
+ return true;
+}
+
+void VideoFrameSchedulerBase::PLL::prime(size_t numSamplesToUse) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+ CHECK(numSamplesToUse >= 3); // must have at least 3 samples
+
+ // estimate video framerate from deltas between timestamps, and
+ // 2nd order deltas
+ Vector<nsecs_t> deltas;
+ nsecs_t lastTime, firstTime;
+ for (size_t i = 0; i < numSamplesToUse; ++i) {
+ size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[index];
+ if (i > 0) {
+ if (time - lastTime > kMinPeriod) {
+ //ALOGV("delta: %lld", (long long)(time - lastTime));
+ deltas.push(time - lastTime);
+ }
+ } else {
+ firstTime = time;
+ }
+ lastTime = time;
+ }
+ deltas.sort(compare<nsecs_t>);
+ size_t numDeltas = deltas.size();
+ if (numDeltas > 1) {
+ nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
+ nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
+ for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
+ if (deltas[i] > deltaMaxLimit) {
+ deltas.resize(i);
+ numDeltas = i;
+ break;
+ }
+ }
+ for (size_t i = 1; i < numDeltas; ++i) {
+ nsecs_t delta2nd = deltas[i] - deltas[i - 1];
+ if (delta2nd >= deltaMinLimit) {
+ //ALOGV("delta2: %lld", (long long)(delta2nd));
+ deltas.push(delta2nd);
+ }
+ }
+ }
+
+ // use the one that yields the best match
+ int64_t bestScore;
+ for (size_t i = 0; i < deltas.size(); ++i) {
+ nsecs_t delta = deltas[i];
+ int64_t score = 0;
+#if 1
+ // simplest score: number of deltas that are near multiples
+ size_t matches = 0;
+ for (size_t j = 0; j < deltas.size(); ++j) {
+ nsecs_t err = periodicError(deltas[j], delta);
+ if (err < delta / kMultiplesThresholdDiv) {
+ ++matches;
+ }
+ }
+ score = matches;
+#if 0
+ // could be weighed by the (1 - normalized error)
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ err = (1 << (2 * kPrecision)) - err;
+ score *= max(0, err);
+ }
+#endif
+#else
+ // or use the error as a negative score
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ score = -delta * err;
+ }
+#endif
+ if (i == 0 || score > bestScore) {
+ bestScore = score;
+ mPeriod = delta;
+ mPhase = firstTime;
+ }
+ }
+ ALOGV("priming[%zu] phase:%lld period:%lld",
+ numSamplesToUse, (long long)mPhase, (long long)mPeriod);
+}
+
+nsecs_t VideoFrameSchedulerBase::PLL::addSample(nsecs_t time) {
+ if (mLastTime >= 0
+ // if time goes backward, or we skipped rendering
+ && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
+ restart();
+ }
+
+ mLastTime = time;
+ mTimes[mNumSamples % kHistorySize] = time;
+ ++mNumSamples;
+
+ bool doFit = time > mRefitAt;
+ if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
+ prime(kMinSamplesToStopPrime);
+ ++mSamplesUsedForPriming;
+ doFit = true;
+ }
+ if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
+ if (mPhase < 0) {
+ // initialize phase to the current render time
+ mPhase = time;
+ doFit = true;
+ } else if (!doFit) {
+ int64_t err = periodicError(time - mPhase, mPeriod);
+ doFit = err > mPeriod / kReFitThresholdDiv;
+ }
+
+ if (doFit) {
+ int64_t a, b, err;
+ if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
+ // samples are not suitable for fitting. this means they are
+ // also not suitable for priming.
+ ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
+ return mPeriod;
+ }
+
+ mRefitAt = time + kRefitRefreshPeriod;
+
+ mPhase += (mPeriod * b) >> kPrecision;
+ mPeriod = (mPeriod * a) >> kPrecision;
+ ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
+
+ if (err < kErrorThreshold) {
+ if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
+ mPrimed = true;
+ }
+ } else {
+ mPrimed = false;
+ mSamplesUsedForPriming = 0;
+ }
+ }
+ }
+ return mPeriod;
+}
+
+nsecs_t VideoFrameSchedulerBase::PLL::getPeriod() const {
+ return mPrimed ? mPeriod : 0;
+}
+
+/* ======================================================================= */
+/* Frame Scheduler */
+/* ======================================================================= */
+
+VideoFrameSchedulerBase::VideoFrameSchedulerBase()
+ : mVsyncTime(0),
+ mVsyncPeriod(0),
+ mVsyncRefreshAt(0),
+ mLastVsyncTime(-1),
+ mTimeCorrection(0) {
+}
+
+void VideoFrameSchedulerBase::init(float videoFps) {
+ updateVsync();
+
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.reset(videoFps);
+}
+
+void VideoFrameSchedulerBase::restart() {
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.restart();
+}
+
+nsecs_t VideoFrameSchedulerBase::getVsyncPeriod() {
+ if (mVsyncPeriod > 0) {
+ return mVsyncPeriod;
+ }
+ return kDefaultVsyncPeriod;
+}
+
+float VideoFrameSchedulerBase::getFrameRate() {
+ nsecs_t videoPeriod = mPll.getPeriod();
+ if (videoPeriod > 0) {
+ return 1e9 / videoPeriod;
+ }
+ return 0.f;
+}
+
+nsecs_t VideoFrameSchedulerBase::schedule(nsecs_t renderTime) {
+ nsecs_t origRenderTime = renderTime;
+
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+ if (now >= mVsyncRefreshAt) {
+ updateVsync();
+ }
+
+ // without VSYNC info, there is nothing to do
+ if (mVsyncPeriod == 0) {
+ ALOGV("no vsync: render=%lld", (long long)renderTime);
+ return renderTime;
+ }
+
+ // ensure vsync time is well before (corrected) render time
+ if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
+ mVsyncTime -=
+ ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
+ }
+
+ // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
+ // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
+ renderTime -= mVsyncPeriod / 2;
+
+ const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
+ if (videoPeriod > 0) {
+ // Smooth out rendering
+ size_t N = 12;
+ nsecs_t fiveSixthDev =
+ abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
+ / (mVsyncPeriod / 100);
+ // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
+ if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
+ N = 20;
+ }
+
+ nsecs_t offset = 0;
+ nsecs_t edgeRemainder = 0;
+ for (size_t i = 1; i <= N; i++) {
+ offset +=
+ (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
+ edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
+ }
+ mTimeCorrection += mVsyncPeriod / 2 - offset / (nsecs_t)N;
+ renderTime += mTimeCorrection;
+ nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
+ edgeRemainder = abs(edgeRemainder / (nsecs_t)N - mVsyncPeriod / 2);
+ if (edgeRemainder <= mVsyncPeriod / 3) {
+ correctionLimit /= 2;
+ }
+
+ // estimate how many VSYNCs a frame will spend on the display
+ nsecs_t nextVsyncTime =
+ renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
+ if (mLastVsyncTime >= 0) {
+ size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
+ size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
+ bool vsyncsPerFrameAreNearlyConstant =
+ periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
+
+ if (mTimeCorrection > correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
+ // remove a VSYNC
+ mTimeCorrection -= mVsyncPeriod / 2;
+ renderTime -= mVsyncPeriod / 2;
+ nextVsyncTime -= mVsyncPeriod;
+ if (vsyncsForLastFrame > 0)
+ --vsyncsForLastFrame;
+ } else if (mTimeCorrection < -correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
+ // add a VSYNC
+ mTimeCorrection += mVsyncPeriod / 2;
+ renderTime += mVsyncPeriod / 2;
+ nextVsyncTime += mVsyncPeriod;
+ if (vsyncsForLastFrame < ULONG_MAX)
+ ++vsyncsForLastFrame;
+ } else if (mTimeCorrection < -correctionLimit * 2
+ || mTimeCorrection > correctionLimit * 2) {
+ ALOGW("correction beyond limit: %lld vs %lld (vsyncs for last frame: %zu, min: %zu)"
+ " restarting. render=%lld",
+ (long long)mTimeCorrection, (long long)correctionLimit,
+ vsyncsForLastFrame, minVsyncsPerFrame, (long long)origRenderTime);
+ restart();
+ return origRenderTime;
+ }
+
+ ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
+ }
+ mLastVsyncTime = nextVsyncTime;
+ }
+
+ // align rendertime to the center between VSYNC edges
+ renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
+ renderTime += mVsyncPeriod / 2;
+ ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
+ ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
+ return renderTime;
+}
+
+VideoFrameSchedulerBase::~VideoFrameSchedulerBase() {}
+
+} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
index 9d97dfd..fcfcbec 100644
--- a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2014, The Android Open Source Project
+ * Copyright 2018, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,87 +17,24 @@
#ifndef VIDEO_FRAME_SCHEDULER_H_
#define VIDEO_FRAME_SCHEDULER_H_
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/VideoFrameSchedulerBase.h>
namespace android {
class ISurfaceComposer;
-struct VideoFrameScheduler : public RefBase {
+struct VideoFrameScheduler : public VideoFrameSchedulerBase {
VideoFrameScheduler();
-
- // (re)initialize scheduler
- void init(float videoFps = -1);
- // use in case of video render-time discontinuity, e.g. seek
- void restart();
- // get adjusted nanotime for a video frame render at renderTime
- nsecs_t schedule(nsecs_t renderTime);
-
- // returns the vsync period for the main display
- nsecs_t getVsyncPeriod();
-
- // returns the current frames-per-second, or 0.f if not primed
- float getFrameRate();
-
- void release();
-
- static const size_t kHistorySize = 8;
+ void release() override;
protected:
virtual ~VideoFrameScheduler();
private:
- struct PLL {
- PLL();
-
- // reset PLL to new PLL
- void reset(float fps = -1);
- // keep current estimate, but restart phase
- void restart();
- // returns period or 0 if not yet primed
- nsecs_t addSample(nsecs_t time);
- nsecs_t getPeriod() const;
-
- private:
- nsecs_t mPeriod;
- nsecs_t mPhase;
-
- bool mPrimed; // have an estimate for the period
- size_t mSamplesUsedForPriming;
-
- nsecs_t mLastTime; // last input time
- nsecs_t mRefitAt; // next input time to fit at
-
- size_t mNumSamples; // can go past kHistorySize
- nsecs_t mTimes[kHistorySize];
-
- void test();
- // returns whether fit was successful
- bool fit(nsecs_t phase, nsecs_t period, size_t numSamples,
- int64_t *a, int64_t *b, int64_t *err);
- void prime(size_t numSamples);
- };
-
- void updateVsync();
-
- nsecs_t mVsyncTime; // vsync timing from display
- nsecs_t mVsyncPeriod;
- nsecs_t mVsyncRefreshAt; // next time to refresh timing info
-
- nsecs_t mLastVsyncTime; // estimated vsync time for last frame
- nsecs_t mTimeCorrection; // running adjustment
-
- PLL mPll; // PLL for video frame rate based on render time
-
+ void updateVsync() override;
sp<ISurfaceComposer> mComposer;
-
- DISALLOW_EVIL_CONSTRUCTORS(VideoFrameScheduler);
};
} // namespace android
#endif // VIDEO_FRAME_SCHEDULER_H_
-
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h
new file mode 100644
index 0000000..be911cc
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler2.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FRAME_SCHEDULER_2_H_
+#define VIDEO_FRAME_SCHEDULER_2_H_
+
+#include <media/stagefright/VideoFrameSchedulerBase.h>
+
+namespace android {
+
+class VsyncTracker;
+struct ChoreographerThread;
+
+struct VideoFrameScheduler2 : public VideoFrameSchedulerBase {
+ VideoFrameScheduler2();
+ void release() override;
+
+protected:
+ virtual ~VideoFrameScheduler2();
+
+private:
+ void updateVsync() override;
+
+ long mAppVsyncOffset;
+ long mSfVsyncOffset;
+ sp<VsyncTracker> mVsyncTracker;
+ sp<ChoreographerThread> mChoreographerThread;
+ Mutex mLock;
+};
+
+} // namespace android
+
+#endif // VIDEO_FRAME_SCHEDULER_2_H_
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h b/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h
new file mode 100644
index 0000000..ff5f716
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoFrameSchedulerBase.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FRAME_SCHEDULER_BASE_H_
+#define VIDEO_FRAME_SCHEDULER_BASE_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct VideoFrameSchedulerBase : public RefBase {
+ VideoFrameSchedulerBase();
+
+ // (re)initialize scheduler
+ void init(float videoFps = -1);
+ // use in case of video render-time discontinuity, e.g. seek
+ void restart();
+ // get adjusted nanotime for a video frame render at renderTime
+ nsecs_t schedule(nsecs_t renderTime);
+
+ // returns the vsync period for the main display
+ nsecs_t getVsyncPeriod();
+
+ // returns the current frames-per-second, or 0.f if not primed
+ float getFrameRate();
+
+ virtual void release() = 0;
+
+ static const size_t kHistorySize = 8;
+ static const nsecs_t kNanosIn1s = 1000000000;
+ static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
+ static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
+
+protected:
+ virtual ~VideoFrameSchedulerBase();
+
+ nsecs_t mVsyncTime; // vsync timing from display
+ nsecs_t mVsyncPeriod;
+ nsecs_t mVsyncRefreshAt; // next time to refresh timing info
+
+private:
+ struct PLL {
+ PLL();
+
+ // reset PLL to new PLL
+ void reset(float fps = -1);
+ // keep current estimate, but restart phase
+ void restart();
+ // returns period or 0 if not yet primed
+ nsecs_t addSample(nsecs_t time);
+ nsecs_t getPeriod() const;
+
+ private:
+ nsecs_t mPeriod;
+ nsecs_t mPhase;
+
+ bool mPrimed; // have an estimate for the period
+ size_t mSamplesUsedForPriming;
+
+ nsecs_t mLastTime; // last input time
+ nsecs_t mRefitAt; // next input time to fit at
+
+ size_t mNumSamples; // can go past kHistorySize
+ nsecs_t mTimes[kHistorySize];
+
+ void test();
+ // returns whether fit was successful
+ bool fit(nsecs_t phase, nsecs_t period, size_t numSamples,
+ int64_t *a, int64_t *b, int64_t *err);
+ void prime(size_t numSamples);
+ };
+
+ virtual void updateVsync() = 0;
+
+ nsecs_t mLastVsyncTime; // estimated vsync time for last frame
+ nsecs_t mTimeCorrection; // running adjustment
+ PLL mPll; // PLL for video frame rate based on render time
+
+ DISALLOW_EVIL_CONSTRUCTORS(VideoFrameSchedulerBase);
+};
+
+} // namespace android
+
+#endif // VIDEO_FRAME_SCHEDULER_BASE_H_