Home | History | Annotate | Download | only in nuplayer
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "NuPlayerRenderer"
     19 #include <utils/Log.h>
     20 
     21 #include "NuPlayerRenderer.h"
     22 
     23 #include <media/stagefright/foundation/ABuffer.h>
     24 #include <media/stagefright/foundation/ADebug.h>
     25 #include <media/stagefright/foundation/AMessage.h>
     26 
     27 namespace android {
     28 
     29 // static
     30 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
     31 
     32 NuPlayer::Renderer::Renderer(
     33         const sp<MediaPlayerBase::AudioSink> &sink,
     34         const sp<AMessage> &notify)
     35     : mAudioSink(sink),
     36       mNotify(notify),
     37       mNumFramesWritten(0),
     38       mDrainAudioQueuePending(false),
     39       mDrainVideoQueuePending(false),
     40       mAudioQueueGeneration(0),
     41       mVideoQueueGeneration(0),
     42       mAnchorTimeMediaUs(-1),
     43       mAnchorTimeRealUs(-1),
     44       mFlushingAudio(false),
     45       mFlushingVideo(false),
     46       mHasAudio(false),
     47       mHasVideo(false),
     48       mSyncQueues(false),
     49       mPaused(false),
     50       mLastPositionUpdateUs(-1ll),
     51       mVideoLateByUs(0ll) {
     52 }
     53 
     54 NuPlayer::Renderer::~Renderer() {
     55 }
     56 
     57 void NuPlayer::Renderer::queueBuffer(
     58         bool audio,
     59         const sp<ABuffer> &buffer,
     60         const sp<AMessage> &notifyConsumed) {
     61     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
     62     msg->setInt32("audio", static_cast<int32_t>(audio));
     63     msg->setObject("buffer", buffer);
     64     msg->setMessage("notifyConsumed", notifyConsumed);
     65     msg->post();
     66 }
     67 
     68 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
     69     CHECK_NE(finalResult, (status_t)OK);
     70 
     71     sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
     72     msg->setInt32("audio", static_cast<int32_t>(audio));
     73     msg->setInt32("finalResult", finalResult);
     74     msg->post();
     75 }
     76 
     77 void NuPlayer::Renderer::flush(bool audio) {
     78     {
     79         Mutex::Autolock autoLock(mFlushLock);
     80         if (audio) {
     81             CHECK(!mFlushingAudio);
     82             mFlushingAudio = true;
     83         } else {
     84             CHECK(!mFlushingVideo);
     85             mFlushingVideo = true;
     86         }
     87     }
     88 
     89     sp<AMessage> msg = new AMessage(kWhatFlush, id());
     90     msg->setInt32("audio", static_cast<int32_t>(audio));
     91     msg->post();
     92 }
     93 
     94 void NuPlayer::Renderer::signalTimeDiscontinuity() {
     95     CHECK(mAudioQueue.empty());
     96     CHECK(mVideoQueue.empty());
     97     mAnchorTimeMediaUs = -1;
     98     mAnchorTimeRealUs = -1;
     99     mSyncQueues = mHasAudio && mHasVideo;
    100 }
    101 
    102 void NuPlayer::Renderer::pause() {
    103     (new AMessage(kWhatPause, id()))->post();
    104 }
    105 
    106 void NuPlayer::Renderer::resume() {
    107     (new AMessage(kWhatResume, id()))->post();
    108 }
    109 
    110 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    111     switch (msg->what()) {
    112         case kWhatDrainAudioQueue:
    113         {
    114             int32_t generation;
    115             CHECK(msg->findInt32("generation", &generation));
    116             if (generation != mAudioQueueGeneration) {
    117                 break;
    118             }
    119 
    120             mDrainAudioQueuePending = false;
    121 
    122             if (onDrainAudioQueue()) {
    123                 uint32_t numFramesPlayed;
    124                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
    125                          (status_t)OK);
    126 
    127                 uint32_t numFramesPendingPlayout =
    128                     mNumFramesWritten - numFramesPlayed;
    129 
    130                 // This is how long the audio sink will have data to
    131                 // play back.
    132                 int64_t delayUs =
    133                     mAudioSink->msecsPerFrame()
    134                         * numFramesPendingPlayout * 1000ll;
    135 
    136                 // Let's give it more data after about half that time
    137                 // has elapsed.
    138                 postDrainAudioQueue(delayUs / 2);
    139             }
    140             break;
    141         }
    142 
    143         case kWhatDrainVideoQueue:
    144         {
    145             int32_t generation;
    146             CHECK(msg->findInt32("generation", &generation));
    147             if (generation != mVideoQueueGeneration) {
    148                 break;
    149             }
    150 
    151             mDrainVideoQueuePending = false;
    152 
    153             onDrainVideoQueue();
    154 
    155             postDrainVideoQueue();
    156             break;
    157         }
    158 
    159         case kWhatQueueBuffer:
    160         {
    161             onQueueBuffer(msg);
    162             break;
    163         }
    164 
    165         case kWhatQueueEOS:
    166         {
    167             onQueueEOS(msg);
    168             break;
    169         }
    170 
    171         case kWhatFlush:
    172         {
    173             onFlush(msg);
    174             break;
    175         }
    176 
    177         case kWhatAudioSinkChanged:
    178         {
    179             onAudioSinkChanged();
    180             break;
    181         }
    182 
    183         case kWhatPause:
    184         {
    185             onPause();
    186             break;
    187         }
    188 
    189         case kWhatResume:
    190         {
    191             onResume();
    192             break;
    193         }
    194 
    195         default:
    196             TRESPASS();
    197             break;
    198     }
    199 }
    200 
    201 void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
    202     if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
    203         return;
    204     }
    205 
    206     if (mAudioQueue.empty()) {
    207         return;
    208     }
    209 
    210     mDrainAudioQueuePending = true;
    211     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
    212     msg->setInt32("generation", mAudioQueueGeneration);
    213     msg->post(delayUs);
    214 }
    215 
    216 void NuPlayer::Renderer::signalAudioSinkChanged() {
    217     (new AMessage(kWhatAudioSinkChanged, id()))->post();
    218 }
    219 
    220 bool NuPlayer::Renderer::onDrainAudioQueue() {
    221     uint32_t numFramesPlayed;
    222     CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
    223 
    224     ssize_t numFramesAvailableToWrite =
    225         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
    226 
    227 #if 0
    228     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
    229         LOGI("audio sink underrun");
    230     } else {
    231         LOGV("audio queue has %d frames left to play",
    232              mAudioSink->frameCount() - numFramesAvailableToWrite);
    233     }
    234 #endif
    235 
    236     size_t numBytesAvailableToWrite =
    237         numFramesAvailableToWrite * mAudioSink->frameSize();
    238 
    239     while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
    240         QueueEntry *entry = &*mAudioQueue.begin();
    241 
    242         if (entry->mBuffer == NULL) {
    243             // EOS
    244 
    245             notifyEOS(true /* audio */, entry->mFinalResult);
    246 
    247             mAudioQueue.erase(mAudioQueue.begin());
    248             entry = NULL;
    249             return false;
    250         }
    251 
    252         if (entry->mOffset == 0) {
    253             int64_t mediaTimeUs;
    254             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    255 
    256             LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
    257 
    258             mAnchorTimeMediaUs = mediaTimeUs;
    259 
    260             uint32_t numFramesPlayed;
    261             CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
    262 
    263             uint32_t numFramesPendingPlayout =
    264                 mNumFramesWritten - numFramesPlayed;
    265 
    266             int64_t realTimeOffsetUs =
    267                 (mAudioSink->latency() / 2  /* XXX */
    268                     + numFramesPendingPlayout
    269                         * mAudioSink->msecsPerFrame()) * 1000ll;
    270 
    271             // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
    272 
    273             mAnchorTimeRealUs =
    274                 ALooper::GetNowUs() + realTimeOffsetUs;
    275         }
    276 
    277         size_t copy = entry->mBuffer->size() - entry->mOffset;
    278         if (copy > numBytesAvailableToWrite) {
    279             copy = numBytesAvailableToWrite;
    280         }
    281 
    282         CHECK_EQ(mAudioSink->write(
    283                     entry->mBuffer->data() + entry->mOffset, copy),
    284                  (ssize_t)copy);
    285 
    286         entry->mOffset += copy;
    287         if (entry->mOffset == entry->mBuffer->size()) {
    288             entry->mNotifyConsumed->post();
    289             mAudioQueue.erase(mAudioQueue.begin());
    290 
    291             entry = NULL;
    292         }
    293 
    294         numBytesAvailableToWrite -= copy;
    295         size_t copiedFrames = copy / mAudioSink->frameSize();
    296         mNumFramesWritten += copiedFrames;
    297     }
    298 
    299     notifyPosition();
    300 
    301     return !mAudioQueue.empty();
    302 }
    303 
    304 void NuPlayer::Renderer::postDrainVideoQueue() {
    305     if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
    306         return;
    307     }
    308 
    309     if (mVideoQueue.empty()) {
    310         return;
    311     }
    312 
    313     QueueEntry &entry = *mVideoQueue.begin();
    314 
    315     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
    316     msg->setInt32("generation", mVideoQueueGeneration);
    317 
    318     int64_t delayUs;
    319 
    320     if (entry.mBuffer == NULL) {
    321         // EOS doesn't carry a timestamp.
    322         delayUs = 0;
    323     } else {
    324         int64_t mediaTimeUs;
    325         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    326 
    327         if (mAnchorTimeMediaUs < 0) {
    328             delayUs = 0;
    329 
    330             if (!mHasAudio) {
    331                 mAnchorTimeMediaUs = mediaTimeUs;
    332                 mAnchorTimeRealUs = ALooper::GetNowUs();
    333             }
    334         } else {
    335             int64_t realTimeUs =
    336                 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
    337 
    338             delayUs = realTimeUs - ALooper::GetNowUs();
    339         }
    340     }
    341 
    342     msg->post(delayUs);
    343 
    344     mDrainVideoQueuePending = true;
    345 }
    346 
    347 void NuPlayer::Renderer::onDrainVideoQueue() {
    348     if (mVideoQueue.empty()) {
    349         return;
    350     }
    351 
    352     QueueEntry *entry = &*mVideoQueue.begin();
    353 
    354     if (entry->mBuffer == NULL) {
    355         // EOS
    356 
    357         notifyEOS(false /* audio */, entry->mFinalResult);
    358 
    359         mVideoQueue.erase(mVideoQueue.begin());
    360         entry = NULL;
    361 
    362         mVideoLateByUs = 0ll;
    363 
    364         notifyPosition();
    365         return;
    366     }
    367 
    368     int64_t mediaTimeUs;
    369     CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    370 
    371     int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
    372     mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
    373 
    374     bool tooLate = (mVideoLateByUs > 40000);
    375 
    376     if (tooLate) {
    377         LOGV("video late by %lld us (%.2f secs)", lateByUs, lateByUs / 1E6);
    378     } else {
    379         LOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
    380     }
    381 
    382     entry->mNotifyConsumed->setInt32("render", true);
    383     entry->mNotifyConsumed->post();
    384     mVideoQueue.erase(mVideoQueue.begin());
    385     entry = NULL;
    386 
    387     notifyPosition();
    388 }
    389 
    390 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
    391     sp<AMessage> notify = mNotify->dup();
    392     notify->setInt32("what", kWhatEOS);
    393     notify->setInt32("audio", static_cast<int32_t>(audio));
    394     notify->setInt32("finalResult", finalResult);
    395     notify->post();
    396 }
    397 
    398 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
    399     int32_t audio;
    400     CHECK(msg->findInt32("audio", &audio));
    401 
    402     if (audio) {
    403         mHasAudio = true;
    404     } else {
    405         mHasVideo = true;
    406     }
    407 
    408     if (dropBufferWhileFlushing(audio, msg)) {
    409         return;
    410     }
    411 
    412     sp<RefBase> obj;
    413     CHECK(msg->findObject("buffer", &obj));
    414     sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
    415 
    416     sp<AMessage> notifyConsumed;
    417     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
    418 
    419     QueueEntry entry;
    420     entry.mBuffer = buffer;
    421     entry.mNotifyConsumed = notifyConsumed;
    422     entry.mOffset = 0;
    423     entry.mFinalResult = OK;
    424 
    425     if (audio) {
    426         mAudioQueue.push_back(entry);
    427         postDrainAudioQueue();
    428     } else {
    429         mVideoQueue.push_back(entry);
    430         postDrainVideoQueue();
    431     }
    432 
    433     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
    434         return;
    435     }
    436 
    437     sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    438     sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
    439 
    440     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
    441         // EOS signalled on either queue.
    442         syncQueuesDone();
    443         return;
    444     }
    445 
    446     int64_t firstAudioTimeUs;
    447     int64_t firstVideoTimeUs;
    448     CHECK(firstAudioBuffer->meta()
    449             ->findInt64("timeUs", &firstAudioTimeUs));
    450     CHECK(firstVideoBuffer->meta()
    451             ->findInt64("timeUs", &firstVideoTimeUs));
    452 
    453     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
    454 
    455     LOGV("queueDiff = %.2f secs", diff / 1E6);
    456 
    457     if (diff > 100000ll) {
    458         // Audio data starts More than 0.1 secs before video.
    459         // Drop some audio.
    460 
    461         (*mAudioQueue.begin()).mNotifyConsumed->post();
    462         mAudioQueue.erase(mAudioQueue.begin());
    463         return;
    464     }
    465 
    466     syncQueuesDone();
    467 }
    468 
    469 void NuPlayer::Renderer::syncQueuesDone() {
    470     if (!mSyncQueues) {
    471         return;
    472     }
    473 
    474     mSyncQueues = false;
    475 
    476     if (!mAudioQueue.empty()) {
    477         postDrainAudioQueue();
    478     }
    479 
    480     if (!mVideoQueue.empty()) {
    481         postDrainVideoQueue();
    482     }
    483 }
    484 
    485 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
    486     int32_t audio;
    487     CHECK(msg->findInt32("audio", &audio));
    488 
    489     if (dropBufferWhileFlushing(audio, msg)) {
    490         return;
    491     }
    492 
    493     int32_t finalResult;
    494     CHECK(msg->findInt32("finalResult", &finalResult));
    495 
    496     QueueEntry entry;
    497     entry.mOffset = 0;
    498     entry.mFinalResult = finalResult;
    499 
    500     if (audio) {
    501         mAudioQueue.push_back(entry);
    502         postDrainAudioQueue();
    503     } else {
    504         mVideoQueue.push_back(entry);
    505         postDrainVideoQueue();
    506     }
    507 }
    508 
    509 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
    510     int32_t audio;
    511     CHECK(msg->findInt32("audio", &audio));
    512 
    513     // If we're currently syncing the queues, i.e. dropping audio while
    514     // aligning the first audio/video buffer times and only one of the
    515     // two queues has data, we may starve that queue by not requesting
    516     // more buffers from the decoder. If the other source then encounters
    517     // a discontinuity that leads to flushing, we'll never find the
    518     // corresponding discontinuity on the other queue.
    519     // Therefore we'll stop syncing the queues if at least one of them
    520     // is flushed.
    521     syncQueuesDone();
    522 
    523     if (audio) {
    524         flushQueue(&mAudioQueue);
    525 
    526         Mutex::Autolock autoLock(mFlushLock);
    527         mFlushingAudio = false;
    528 
    529         mDrainAudioQueuePending = false;
    530         ++mAudioQueueGeneration;
    531     } else {
    532         flushQueue(&mVideoQueue);
    533 
    534         Mutex::Autolock autoLock(mFlushLock);
    535         mFlushingVideo = false;
    536 
    537         mDrainVideoQueuePending = false;
    538         ++mVideoQueueGeneration;
    539     }
    540 
    541     notifyFlushComplete(audio);
    542 }
    543 
    544 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
    545     while (!queue->empty()) {
    546         QueueEntry *entry = &*queue->begin();
    547 
    548         if (entry->mBuffer != NULL) {
    549             entry->mNotifyConsumed->post();
    550         }
    551 
    552         queue->erase(queue->begin());
    553         entry = NULL;
    554     }
    555 }
    556 
    557 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
    558     sp<AMessage> notify = mNotify->dup();
    559     notify->setInt32("what", kWhatFlushComplete);
    560     notify->setInt32("audio", static_cast<int32_t>(audio));
    561     notify->post();
    562 }
    563 
    564 bool NuPlayer::Renderer::dropBufferWhileFlushing(
    565         bool audio, const sp<AMessage> &msg) {
    566     bool flushing = false;
    567 
    568     {
    569         Mutex::Autolock autoLock(mFlushLock);
    570         if (audio) {
    571             flushing = mFlushingAudio;
    572         } else {
    573             flushing = mFlushingVideo;
    574         }
    575     }
    576 
    577     if (!flushing) {
    578         return false;
    579     }
    580 
    581     sp<AMessage> notifyConsumed;
    582     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
    583         notifyConsumed->post();
    584     }
    585 
    586     return true;
    587 }
    588 
    589 void NuPlayer::Renderer::onAudioSinkChanged() {
    590     CHECK(!mDrainAudioQueuePending);
    591     mNumFramesWritten = 0;
    592 }
    593 
    594 void NuPlayer::Renderer::notifyPosition() {
    595     if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
    596         return;
    597     }
    598 
    599     int64_t nowUs = ALooper::GetNowUs();
    600 
    601     if (mLastPositionUpdateUs >= 0
    602             && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
    603         return;
    604     }
    605     mLastPositionUpdateUs = nowUs;
    606 
    607     int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
    608 
    609     sp<AMessage> notify = mNotify->dup();
    610     notify->setInt32("what", kWhatPosition);
    611     notify->setInt64("positionUs", positionUs);
    612     notify->setInt64("videoLateByUs", mVideoLateByUs);
    613     notify->post();
    614 }
    615 
    616 void NuPlayer::Renderer::onPause() {
    617     CHECK(!mPaused);
    618 
    619     mDrainAudioQueuePending = false;
    620     ++mAudioQueueGeneration;
    621 
    622     mDrainVideoQueuePending = false;
    623     ++mVideoQueueGeneration;
    624 
    625     if (mHasAudio) {
    626         mAudioSink->pause();
    627     }
    628 
    629     mPaused = true;
    630 }
    631 
    632 void NuPlayer::Renderer::onResume() {
    633     CHECK(mPaused);
    634 
    635     if (mHasAudio) {
    636         mAudioSink->start();
    637     }
    638 
    639     mPaused = false;
    640 
    641     if (!mAudioQueue.empty()) {
    642         postDrainAudioQueue();
    643     }
    644 
    645     if (!mVideoQueue.empty()) {
    646         postDrainVideoQueue();
    647     }
    648 }
    649 
    650 }  // namespace android
    651 
    652