Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "VideoFrameScheduler"
     19 #include <utils/Log.h>
     20 #define ATRACE_TAG ATRACE_TAG_VIDEO
     21 #include <utils/Trace.h>
     22 
     23 #include <sys/time.h>
     24 
     25 #include <binder/IServiceManager.h>
     26 #include <gui/ISurfaceComposer.h>
     27 #include <ui/DisplayStatInfo.h>
     28 
     29 #include <media/stagefright/foundation/ADebug.h>
     30 #include <media/stagefright/foundation/AUtils.h>
     31 #include <media/stagefright/VideoFrameScheduler.h>
     32 
     33 namespace android {
     34 
     35 static const nsecs_t kNanosIn1s = 1000000000;
     36 
     37 template<class T>
     38 static int compare(const T *lhs, const T *rhs) {
     39     if (*lhs < *rhs) {
     40         return -1;
     41     } else if (*lhs > *rhs) {
     42         return 1;
     43     } else {
     44         return 0;
     45     }
     46 }
     47 
     48 /* ======================================================================= */
     49 /*                                   PLL                                   */
     50 /* ======================================================================= */
     51 
     52 static const size_t kMinSamplesToStartPrime = 3;
     53 static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
     54 static const size_t kMinSamplesToEstimatePeriod = 3;
     55 static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
     56 
     57 static const size_t kPrecision = 12;
     58 static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
     59 static const int64_t kMultiplesThresholdDiv = 4;            // 25%
     60 static const int64_t kReFitThresholdDiv = 100;              // 1%
     61 static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s;     // 1 sec
     62 static const nsecs_t kMinPeriod = kNanosIn1s / 120;         // 120Hz
     63 static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
     64 
     65 VideoFrameScheduler::PLL::PLL()
     66     : mPeriod(-1),
     67       mPhase(0),
     68       mPrimed(false),
     69       mSamplesUsedForPriming(0),
     70       mLastTime(-1),
     71       mNumSamples(0) {
     72 }
     73 
     74 void VideoFrameScheduler::PLL::reset(float fps) {
     75     //test();
     76 
     77     mSamplesUsedForPriming = 0;
     78     mLastTime = -1;
     79 
     80     // set up or reset video PLL
     81     if (fps <= 0.f) {
     82         mPeriod = -1;
     83         mPrimed = false;
     84     } else {
     85         ALOGV("reset at %.1f fps", fps);
     86         mPeriod = (nsecs_t)(1e9 / fps + 0.5);
     87         mPrimed = true;
     88     }
     89 
     90     restart();
     91 }
     92 
     93 // reset PLL but keep previous period estimate
     94 void VideoFrameScheduler::PLL::restart() {
     95     mNumSamples = 0;
     96     mPhase = -1;
     97 }
     98 
     99 #if 0
    100 
    101 void VideoFrameScheduler::PLL::test() {
    102     nsecs_t period = kNanosIn1s / 60;
    103     mTimes[0] = 0;
    104     mTimes[1] = period;
    105     mTimes[2] = period * 3;
    106     mTimes[3] = period * 4;
    107     mTimes[4] = period * 7;
    108     mTimes[5] = period * 8;
    109     mTimes[6] = period * 10;
    110     mTimes[7] = period * 12;
    111     mNumSamples = 8;
    112     int64_t a, b, err;
    113     fit(0, period * 12 / 7, 8, &a, &b, &err);
    114     // a = 0.8(5)+
    115     // b = -0.14097(2)+
    116     // err = 0.2750578(703)+
    117     ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
    118             (long long)a, (a / (float)(1 << kPrecision)),
    119             (long long)b, (b / (float)(1 << kPrecision)),
    120             (long long)err, (err / (float)(1 << (kPrecision * 2))));
    121 }
    122 
    123 #endif
    124 
    125 bool VideoFrameScheduler::PLL::fit(
    126         nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
    127         int64_t *a, int64_t *b, int64_t *err) {
    128     if (numSamplesToUse > mNumSamples) {
    129         numSamplesToUse = mNumSamples;
    130     }
    131 
    132     if ((period >> kPrecision) == 0 ) {
    133         ALOGW("Period is 0, or after including precision is 0 - would cause div0, returning");
    134         return false;
    135     }
    136 
    137     int64_t sumX = 0;
    138     int64_t sumXX = 0;
    139     int64_t sumXY = 0;
    140     int64_t sumYY = 0;
    141     int64_t sumY = 0;
    142 
    143     int64_t x = 0; // x usually is in [0..numSamplesToUse)
    144     nsecs_t lastTime;
    145     for (size_t i = 0; i < numSamplesToUse; i++) {
    146         size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
    147         nsecs_t time = mTimes[ix];
    148         if (i > 0) {
    149             x += divRound(time - lastTime, period);
    150         }
    151         // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
    152         //   ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
    153         //   priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
    154         //   while we are not refitting.
    155         int64_t y = divRound(time - phase, period >> kPrecision);
    156         sumX += x;
    157         sumY += y;
    158         sumXX += x * x;
    159         sumXY += x * y;
    160         sumYY += y * y;
    161         lastTime = time;
    162     }
    163 
    164     int64_t div   = (int64_t)numSamplesToUse * sumXX - sumX * sumX;
    165     if (div == 0) {
    166         return false;
    167     }
    168 
    169     int64_t a_nom = (int64_t)numSamplesToUse * sumXY - sumX * sumY;
    170     int64_t b_nom = sumXX * sumY            - sumX * sumXY;
    171     *a = divRound(a_nom, div);
    172     *b = divRound(b_nom, div);
    173     // don't use a and b directly as the rounding error is significant
    174     *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
    175     ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
    176             numSamplesToUse,
    177             (long long)*a,   (*a / (float)(1 << kPrecision)),
    178             (long long)*b,   (*b / (float)(1 << kPrecision)),
    179             (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
    180     return true;
    181 }
    182 
    183 void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
    184     if (numSamplesToUse > mNumSamples) {
    185         numSamplesToUse = mNumSamples;
    186     }
    187     CHECK(numSamplesToUse >= 3);  // must have at least 3 samples
    188 
    189     // estimate video framerate from deltas between timestamps, and
    190     // 2nd order deltas
    191     Vector<nsecs_t> deltas;
    192     nsecs_t lastTime, firstTime;
    193     for (size_t i = 0; i < numSamplesToUse; ++i) {
    194         size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
    195         nsecs_t time = mTimes[index];
    196         if (i > 0) {
    197             if (time - lastTime > kMinPeriod) {
    198                 //ALOGV("delta: %lld", (long long)(time - lastTime));
    199                 deltas.push(time - lastTime);
    200             }
    201         } else {
    202             firstTime = time;
    203         }
    204         lastTime = time;
    205     }
    206     deltas.sort(compare<nsecs_t>);
    207     size_t numDeltas = deltas.size();
    208     if (numDeltas > 1) {
    209         nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
    210         nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
    211         for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
    212             if (deltas[i] > deltaMaxLimit) {
    213                 deltas.resize(i);
    214                 numDeltas = i;
    215                 break;
    216             }
    217         }
    218         for (size_t i = 1; i < numDeltas; ++i) {
    219             nsecs_t delta2nd = deltas[i] - deltas[i - 1];
    220             if (delta2nd >= deltaMinLimit) {
    221                 //ALOGV("delta2: %lld", (long long)(delta2nd));
    222                 deltas.push(delta2nd);
    223             }
    224         }
    225     }
    226 
    227     // use the one that yields the best match
    228     int64_t bestScore;
    229     for (size_t i = 0; i < deltas.size(); ++i) {
    230         nsecs_t delta = deltas[i];
    231         int64_t score = 0;
    232 #if 1
    233         // simplest score: number of deltas that are near multiples
    234         size_t matches = 0;
    235         for (size_t j = 0; j < deltas.size(); ++j) {
    236             nsecs_t err = periodicError(deltas[j], delta);
    237             if (err < delta / kMultiplesThresholdDiv) {
    238                 ++matches;
    239             }
    240         }
    241         score = matches;
    242 #if 0
    243         // could be weighed by the (1 - normalized error)
    244         if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
    245             int64_t a, b, err;
    246             fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
    247             err = (1 << (2 * kPrecision)) - err;
    248             score *= max(0, err);
    249         }
    250 #endif
    251 #else
    252         // or use the error as a negative score
    253         if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
    254             int64_t a, b, err;
    255             fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
    256             score = -delta * err;
    257         }
    258 #endif
    259         if (i == 0 || score > bestScore) {
    260             bestScore = score;
    261             mPeriod = delta;
    262             mPhase = firstTime;
    263         }
    264     }
    265     ALOGV("priming[%zu] phase:%lld period:%lld",
    266             numSamplesToUse, (long long)mPhase, (long long)mPeriod);
    267 }
    268 
    269 nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
    270     if (mLastTime >= 0
    271             // if time goes backward, or we skipped rendering
    272             && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
    273         restart();
    274     }
    275 
    276     mLastTime = time;
    277     mTimes[mNumSamples % kHistorySize] = time;
    278     ++mNumSamples;
    279 
    280     bool doFit = time > mRefitAt;
    281     if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
    282         prime(kMinSamplesToStopPrime);
    283         ++mSamplesUsedForPriming;
    284         doFit = true;
    285     }
    286     if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
    287         if (mPhase < 0) {
    288             // initialize phase to the current render time
    289             mPhase = time;
    290             doFit = true;
    291         } else if (!doFit) {
    292             int64_t err = periodicError(time - mPhase, mPeriod);
    293             doFit = err > mPeriod / kReFitThresholdDiv;
    294         }
    295 
    296         if (doFit) {
    297             int64_t a, b, err;
    298             if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
    299                 // samples are not suitable for fitting.  this means they are
    300                 // also not suitable for priming.
    301                 ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
    302                 return mPeriod;
    303             }
    304 
    305             mRefitAt = time + kRefitRefreshPeriod;
    306 
    307             mPhase += (mPeriod * b) >> kPrecision;
    308             mPeriod = (mPeriod * a) >> kPrecision;
    309             ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
    310 
    311             if (err < kErrorThreshold) {
    312                 if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
    313                     mPrimed = true;
    314                 }
    315             } else {
    316                 mPrimed = false;
    317                 mSamplesUsedForPriming = 0;
    318             }
    319         }
    320     }
    321     return mPeriod;
    322 }
    323 
    324 nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
    325     return mPrimed ? mPeriod : 0;
    326 }
    327 
    328 /* ======================================================================= */
    329 /*                             Frame Scheduler                             */
    330 /* ======================================================================= */
    331 
    332 static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60;  // 60Hz
    333 static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s;       // 1 sec
    334 
    335 VideoFrameScheduler::VideoFrameScheduler()
    336     : mVsyncTime(0),
    337       mVsyncPeriod(0),
    338       mVsyncRefreshAt(0),
    339       mLastVsyncTime(-1),
    340       mTimeCorrection(0) {
    341 }
    342 
    343 void VideoFrameScheduler::updateVsync() {
    344     mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
    345     mVsyncPeriod = 0;
    346     mVsyncTime = 0;
    347 
    348     // TODO: schedule frames for the destination surface
    349     // For now, surface flinger only schedules frames on the primary display
    350     if (mComposer == NULL) {
    351         String16 name("SurfaceFlinger");
    352         sp<IServiceManager> sm = defaultServiceManager();
    353         mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
    354     }
    355     if (mComposer != NULL) {
    356         DisplayStatInfo stats;
    357         status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
    358         if (res == OK) {
    359             ALOGV("vsync time:%lld period:%lld",
    360                     (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
    361             mVsyncTime = stats.vsyncTime;
    362             mVsyncPeriod = stats.vsyncPeriod;
    363         } else {
    364             ALOGW("getDisplayStats returned %d", res);
    365         }
    366     } else {
    367         ALOGW("could not get surface mComposer service");
    368     }
    369 }
    370 
    371 void VideoFrameScheduler::init(float videoFps) {
    372     updateVsync();
    373 
    374     mLastVsyncTime = -1;
    375     mTimeCorrection = 0;
    376 
    377     mPll.reset(videoFps);
    378 }
    379 
    380 void VideoFrameScheduler::restart() {
    381     mLastVsyncTime = -1;
    382     mTimeCorrection = 0;
    383 
    384     mPll.restart();
    385 }
    386 
    387 nsecs_t VideoFrameScheduler::getVsyncPeriod() {
    388     if (mVsyncPeriod > 0) {
    389         return mVsyncPeriod;
    390     }
    391     return kDefaultVsyncPeriod;
    392 }
    393 
    394 float VideoFrameScheduler::getFrameRate() {
    395     nsecs_t videoPeriod = mPll.getPeriod();
    396     if (videoPeriod > 0) {
    397         return 1e9 / videoPeriod;
    398     }
    399     return 0.f;
    400 }
    401 
    402 nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
    403     nsecs_t origRenderTime = renderTime;
    404 
    405     nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
    406     if (now >= mVsyncRefreshAt) {
    407         updateVsync();
    408     }
    409 
    410     // without VSYNC info, there is nothing to do
    411     if (mVsyncPeriod == 0) {
    412         ALOGV("no vsync: render=%lld", (long long)renderTime);
    413         return renderTime;
    414     }
    415 
    416     // ensure vsync time is well before (corrected) render time
    417     if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
    418         mVsyncTime -=
    419             ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
    420     }
    421 
    422     // Video presentation takes place at the VSYNC _after_ renderTime.  Adjust renderTime
    423     // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
    424     renderTime -= mVsyncPeriod / 2;
    425 
    426     const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
    427     if (videoPeriod > 0) {
    428         // Smooth out rendering
    429         size_t N = 12;
    430         nsecs_t fiveSixthDev =
    431             abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
    432                     / (mVsyncPeriod / 100);
    433         // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
    434         if (fiveSixthDev < 12) {  /* 12% / 6 = 2% */
    435             N = 20;
    436         }
    437 
    438         nsecs_t offset = 0;
    439         nsecs_t edgeRemainder = 0;
    440         for (size_t i = 1; i <= N; i++) {
    441             offset +=
    442                 (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
    443             edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
    444         }
    445         mTimeCorrection += mVsyncPeriod / 2 - offset / (nsecs_t)N;
    446         renderTime += mTimeCorrection;
    447         nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
    448         edgeRemainder = abs(edgeRemainder / (nsecs_t)N - mVsyncPeriod / 2);
    449         if (edgeRemainder <= mVsyncPeriod / 3) {
    450             correctionLimit /= 2;
    451         }
    452 
    453         // estimate how many VSYNCs a frame will spend on the display
    454         nsecs_t nextVsyncTime =
    455             renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
    456         if (mLastVsyncTime >= 0) {
    457             size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
    458             size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
    459             bool vsyncsPerFrameAreNearlyConstant =
    460                 periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
    461 
    462             if (mTimeCorrection > correctionLimit &&
    463                     (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
    464                 // remove a VSYNC
    465                 mTimeCorrection -= mVsyncPeriod / 2;
    466                 renderTime -= mVsyncPeriod / 2;
    467                 nextVsyncTime -= mVsyncPeriod;
    468                 if (vsyncsForLastFrame > 0)
    469                     --vsyncsForLastFrame;
    470             } else if (mTimeCorrection < -correctionLimit &&
    471                     (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
    472                 // add a VSYNC
    473                 mTimeCorrection += mVsyncPeriod / 2;
    474                 renderTime += mVsyncPeriod / 2;
    475                 nextVsyncTime += mVsyncPeriod;
    476                 if (vsyncsForLastFrame < ULONG_MAX)
    477                     ++vsyncsForLastFrame;
    478             }
    479             ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
    480         }
    481         mLastVsyncTime = nextVsyncTime;
    482     }
    483 
    484     // align rendertime to the center between VSYNC edges
    485     renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
    486     renderTime += mVsyncPeriod / 2;
    487     ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
    488     ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
    489     return renderTime;
    490 }
    491 
    492 void VideoFrameScheduler::release() {
    493     mComposer.clear();
    494 }
    495 
    496 VideoFrameScheduler::~VideoFrameScheduler() {
    497     release();
    498 }
    499 
    500 } // namespace android
    501 
    502