Home | History | Annotate | Download | only in nuplayer
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "NuPlayerRenderer"
     19 #include <utils/Log.h>
     20 
     21 #include "NuPlayerRenderer.h"
     22 
     23 #include <media/stagefright/foundation/ABuffer.h>
     24 #include <media/stagefright/foundation/ADebug.h>
     25 #include <media/stagefright/foundation/AMessage.h>
     26 
     27 namespace android {
     28 
     29 // static
     30 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
     31 
     32 NuPlayer::Renderer::Renderer(
     33         const sp<MediaPlayerBase::AudioSink> &sink,
     34         const sp<AMessage> &notify)
     35     : mAudioSink(sink),
     36       mNotify(notify),
     37       mNumFramesWritten(0),
     38       mDrainAudioQueuePending(false),
     39       mDrainVideoQueuePending(false),
     40       mAudioQueueGeneration(0),
     41       mVideoQueueGeneration(0),
     42       mAnchorTimeMediaUs(-1),
     43       mAnchorTimeRealUs(-1),
     44       mFlushingAudio(false),
     45       mFlushingVideo(false),
     46       mHasAudio(false),
     47       mHasVideo(false),
     48       mSyncQueues(false),
     49       mPaused(false),
     50       mLastPositionUpdateUs(-1ll),
     51       mVideoLateByUs(0ll) {
     52 }
     53 
     54 NuPlayer::Renderer::~Renderer() {
     55 }
     56 
     57 void NuPlayer::Renderer::queueBuffer(
     58         bool audio,
     59         const sp<ABuffer> &buffer,
     60         const sp<AMessage> &notifyConsumed) {
     61     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
     62     msg->setInt32("audio", static_cast<int32_t>(audio));
     63     msg->setBuffer("buffer", buffer);
     64     msg->setMessage("notifyConsumed", notifyConsumed);
     65     msg->post();
     66 }
     67 
     68 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
     69     CHECK_NE(finalResult, (status_t)OK);
     70 
     71     sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
     72     msg->setInt32("audio", static_cast<int32_t>(audio));
     73     msg->setInt32("finalResult", finalResult);
     74     msg->post();
     75 }
     76 
     77 void NuPlayer::Renderer::flush(bool audio) {
     78     {
     79         Mutex::Autolock autoLock(mFlushLock);
     80         if (audio) {
     81             CHECK(!mFlushingAudio);
     82             mFlushingAudio = true;
     83         } else {
     84             CHECK(!mFlushingVideo);
     85             mFlushingVideo = true;
     86         }
     87     }
     88 
     89     sp<AMessage> msg = new AMessage(kWhatFlush, id());
     90     msg->setInt32("audio", static_cast<int32_t>(audio));
     91     msg->post();
     92 }
     93 
     94 void NuPlayer::Renderer::signalTimeDiscontinuity() {
     95     CHECK(mAudioQueue.empty());
     96     CHECK(mVideoQueue.empty());
     97     mAnchorTimeMediaUs = -1;
     98     mAnchorTimeRealUs = -1;
     99     mSyncQueues = mHasAudio && mHasVideo;
    100 }
    101 
    102 void NuPlayer::Renderer::pause() {
    103     (new AMessage(kWhatPause, id()))->post();
    104 }
    105 
    106 void NuPlayer::Renderer::resume() {
    107     (new AMessage(kWhatResume, id()))->post();
    108 }
    109 
    110 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    111     switch (msg->what()) {
    112         case kWhatDrainAudioQueue:
    113         {
    114             int32_t generation;
    115             CHECK(msg->findInt32("generation", &generation));
    116             if (generation != mAudioQueueGeneration) {
    117                 break;
    118             }
    119 
    120             mDrainAudioQueuePending = false;
    121 
    122             if (onDrainAudioQueue()) {
    123                 uint32_t numFramesPlayed;
    124                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
    125                          (status_t)OK);
    126 
    127                 uint32_t numFramesPendingPlayout =
    128                     mNumFramesWritten - numFramesPlayed;
    129 
    130                 // This is how long the audio sink will have data to
    131                 // play back.
    132                 int64_t delayUs =
    133                     mAudioSink->msecsPerFrame()
    134                         * numFramesPendingPlayout * 1000ll;
    135 
    136                 // Let's give it more data after about half that time
    137                 // has elapsed.
    138                 postDrainAudioQueue(delayUs / 2);
    139             }
    140             break;
    141         }
    142 
    143         case kWhatDrainVideoQueue:
    144         {
    145             int32_t generation;
    146             CHECK(msg->findInt32("generation", &generation));
    147             if (generation != mVideoQueueGeneration) {
    148                 break;
    149             }
    150 
    151             mDrainVideoQueuePending = false;
    152 
    153             onDrainVideoQueue();
    154 
    155             postDrainVideoQueue();
    156             break;
    157         }
    158 
    159         case kWhatQueueBuffer:
    160         {
    161             onQueueBuffer(msg);
    162             break;
    163         }
    164 
    165         case kWhatQueueEOS:
    166         {
    167             onQueueEOS(msg);
    168             break;
    169         }
    170 
    171         case kWhatFlush:
    172         {
    173             onFlush(msg);
    174             break;
    175         }
    176 
    177         case kWhatAudioSinkChanged:
    178         {
    179             onAudioSinkChanged();
    180             break;
    181         }
    182 
    183         case kWhatPause:
    184         {
    185             onPause();
    186             break;
    187         }
    188 
    189         case kWhatResume:
    190         {
    191             onResume();
    192             break;
    193         }
    194 
    195         default:
    196             TRESPASS();
    197             break;
    198     }
    199 }
    200 
    201 void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
    202     if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
    203         return;
    204     }
    205 
    206     if (mAudioQueue.empty()) {
    207         return;
    208     }
    209 
    210     mDrainAudioQueuePending = true;
    211     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
    212     msg->setInt32("generation", mAudioQueueGeneration);
    213     msg->post(delayUs);
    214 }
    215 
    216 void NuPlayer::Renderer::signalAudioSinkChanged() {
    217     (new AMessage(kWhatAudioSinkChanged, id()))->post();
    218 }
    219 
    220 bool NuPlayer::Renderer::onDrainAudioQueue() {
    221     uint32_t numFramesPlayed;
    222     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
    223         return false;
    224     }
    225 
    226     ssize_t numFramesAvailableToWrite =
    227         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
    228 
    229 #if 0
    230     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
    231         ALOGI("audio sink underrun");
    232     } else {
    233         ALOGV("audio queue has %d frames left to play",
    234              mAudioSink->frameCount() - numFramesAvailableToWrite);
    235     }
    236 #endif
    237 
    238     size_t numBytesAvailableToWrite =
    239         numFramesAvailableToWrite * mAudioSink->frameSize();
    240 
    241     while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
    242         QueueEntry *entry = &*mAudioQueue.begin();
    243 
    244         if (entry->mBuffer == NULL) {
    245             // EOS
    246 
    247             notifyEOS(true /* audio */, entry->mFinalResult);
    248 
    249             mAudioQueue.erase(mAudioQueue.begin());
    250             entry = NULL;
    251             return false;
    252         }
    253 
    254         if (entry->mOffset == 0) {
    255             int64_t mediaTimeUs;
    256             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    257 
    258             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
    259 
    260             mAnchorTimeMediaUs = mediaTimeUs;
    261 
    262             uint32_t numFramesPlayed;
    263             CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
    264 
    265             uint32_t numFramesPendingPlayout =
    266                 mNumFramesWritten - numFramesPlayed;
    267 
    268             int64_t realTimeOffsetUs =
    269                 (mAudioSink->latency() / 2  /* XXX */
    270                     + numFramesPendingPlayout
    271                         * mAudioSink->msecsPerFrame()) * 1000ll;
    272 
    273             // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
    274 
    275             mAnchorTimeRealUs =
    276                 ALooper::GetNowUs() + realTimeOffsetUs;
    277         }
    278 
    279         size_t copy = entry->mBuffer->size() - entry->mOffset;
    280         if (copy > numBytesAvailableToWrite) {
    281             copy = numBytesAvailableToWrite;
    282         }
    283 
    284         CHECK_EQ(mAudioSink->write(
    285                     entry->mBuffer->data() + entry->mOffset, copy),
    286                  (ssize_t)copy);
    287 
    288         entry->mOffset += copy;
    289         if (entry->mOffset == entry->mBuffer->size()) {
    290             entry->mNotifyConsumed->post();
    291             mAudioQueue.erase(mAudioQueue.begin());
    292 
    293             entry = NULL;
    294         }
    295 
    296         numBytesAvailableToWrite -= copy;
    297         size_t copiedFrames = copy / mAudioSink->frameSize();
    298         mNumFramesWritten += copiedFrames;
    299     }
    300 
    301     notifyPosition();
    302 
    303     return !mAudioQueue.empty();
    304 }
    305 
    306 void NuPlayer::Renderer::postDrainVideoQueue() {
    307     if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
    308         return;
    309     }
    310 
    311     if (mVideoQueue.empty()) {
    312         return;
    313     }
    314 
    315     QueueEntry &entry = *mVideoQueue.begin();
    316 
    317     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
    318     msg->setInt32("generation", mVideoQueueGeneration);
    319 
    320     int64_t delayUs;
    321 
    322     if (entry.mBuffer == NULL) {
    323         // EOS doesn't carry a timestamp.
    324         delayUs = 0;
    325     } else {
    326         int64_t mediaTimeUs;
    327         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    328 
    329         if (mAnchorTimeMediaUs < 0) {
    330             delayUs = 0;
    331 
    332             if (!mHasAudio) {
    333                 mAnchorTimeMediaUs = mediaTimeUs;
    334                 mAnchorTimeRealUs = ALooper::GetNowUs();
    335             }
    336         } else {
    337             int64_t realTimeUs =
    338                 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
    339 
    340             delayUs = realTimeUs - ALooper::GetNowUs();
    341         }
    342     }
    343 
    344     msg->post(delayUs);
    345 
    346     mDrainVideoQueuePending = true;
    347 }
    348 
    349 void NuPlayer::Renderer::onDrainVideoQueue() {
    350     if (mVideoQueue.empty()) {
    351         return;
    352     }
    353 
    354     QueueEntry *entry = &*mVideoQueue.begin();
    355 
    356     if (entry->mBuffer == NULL) {
    357         // EOS
    358 
    359         notifyEOS(false /* audio */, entry->mFinalResult);
    360 
    361         mVideoQueue.erase(mVideoQueue.begin());
    362         entry = NULL;
    363 
    364         mVideoLateByUs = 0ll;
    365 
    366         notifyPosition();
    367         return;
    368     }
    369 
    370     int64_t mediaTimeUs;
    371     CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    372 
    373     int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
    374     mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
    375 
    376     bool tooLate = (mVideoLateByUs > 40000);
    377 
    378     if (tooLate) {
    379         ALOGV("video late by %lld us (%.2f secs)",
    380              mVideoLateByUs, mVideoLateByUs / 1E6);
    381     } else {
    382         ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
    383     }
    384 
    385     entry->mNotifyConsumed->setInt32("render", !tooLate);
    386     entry->mNotifyConsumed->post();
    387     mVideoQueue.erase(mVideoQueue.begin());
    388     entry = NULL;
    389 
    390     notifyPosition();
    391 }
    392 
    393 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
    394     sp<AMessage> notify = mNotify->dup();
    395     notify->setInt32("what", kWhatEOS);
    396     notify->setInt32("audio", static_cast<int32_t>(audio));
    397     notify->setInt32("finalResult", finalResult);
    398     notify->post();
    399 }
    400 
    401 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
    402     int32_t audio;
    403     CHECK(msg->findInt32("audio", &audio));
    404 
    405     if (audio) {
    406         mHasAudio = true;
    407     } else {
    408         mHasVideo = true;
    409     }
    410 
    411     if (dropBufferWhileFlushing(audio, msg)) {
    412         return;
    413     }
    414 
    415     sp<ABuffer> buffer;
    416     CHECK(msg->findBuffer("buffer", &buffer));
    417 
    418     sp<AMessage> notifyConsumed;
    419     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
    420 
    421     QueueEntry entry;
    422     entry.mBuffer = buffer;
    423     entry.mNotifyConsumed = notifyConsumed;
    424     entry.mOffset = 0;
    425     entry.mFinalResult = OK;
    426 
    427     if (audio) {
    428         mAudioQueue.push_back(entry);
    429         postDrainAudioQueue();
    430     } else {
    431         mVideoQueue.push_back(entry);
    432         postDrainVideoQueue();
    433     }
    434 
    435     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
    436         return;
    437     }
    438 
    439     sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    440     sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
    441 
    442     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
    443         // EOS signalled on either queue.
    444         syncQueuesDone();
    445         return;
    446     }
    447 
    448     int64_t firstAudioTimeUs;
    449     int64_t firstVideoTimeUs;
    450     CHECK(firstAudioBuffer->meta()
    451             ->findInt64("timeUs", &firstAudioTimeUs));
    452     CHECK(firstVideoBuffer->meta()
    453             ->findInt64("timeUs", &firstVideoTimeUs));
    454 
    455     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
    456 
    457     ALOGV("queueDiff = %.2f secs", diff / 1E6);
    458 
    459     if (diff > 100000ll) {
    460         // Audio data starts More than 0.1 secs before video.
    461         // Drop some audio.
    462 
    463         (*mAudioQueue.begin()).mNotifyConsumed->post();
    464         mAudioQueue.erase(mAudioQueue.begin());
    465         return;
    466     }
    467 
    468     syncQueuesDone();
    469 }
    470 
    471 void NuPlayer::Renderer::syncQueuesDone() {
    472     if (!mSyncQueues) {
    473         return;
    474     }
    475 
    476     mSyncQueues = false;
    477 
    478     if (!mAudioQueue.empty()) {
    479         postDrainAudioQueue();
    480     }
    481 
    482     if (!mVideoQueue.empty()) {
    483         postDrainVideoQueue();
    484     }
    485 }
    486 
    487 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
    488     int32_t audio;
    489     CHECK(msg->findInt32("audio", &audio));
    490 
    491     if (dropBufferWhileFlushing(audio, msg)) {
    492         return;
    493     }
    494 
    495     int32_t finalResult;
    496     CHECK(msg->findInt32("finalResult", &finalResult));
    497 
    498     QueueEntry entry;
    499     entry.mOffset = 0;
    500     entry.mFinalResult = finalResult;
    501 
    502     if (audio) {
    503         mAudioQueue.push_back(entry);
    504         postDrainAudioQueue();
    505     } else {
    506         mVideoQueue.push_back(entry);
    507         postDrainVideoQueue();
    508     }
    509 }
    510 
    511 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
    512     int32_t audio;
    513     CHECK(msg->findInt32("audio", &audio));
    514 
    515     // If we're currently syncing the queues, i.e. dropping audio while
    516     // aligning the first audio/video buffer times and only one of the
    517     // two queues has data, we may starve that queue by not requesting
    518     // more buffers from the decoder. If the other source then encounters
    519     // a discontinuity that leads to flushing, we'll never find the
    520     // corresponding discontinuity on the other queue.
    521     // Therefore we'll stop syncing the queues if at least one of them
    522     // is flushed.
    523     syncQueuesDone();
    524 
    525     if (audio) {
    526         flushQueue(&mAudioQueue);
    527 
    528         Mutex::Autolock autoLock(mFlushLock);
    529         mFlushingAudio = false;
    530 
    531         mDrainAudioQueuePending = false;
    532         ++mAudioQueueGeneration;
    533     } else {
    534         flushQueue(&mVideoQueue);
    535 
    536         Mutex::Autolock autoLock(mFlushLock);
    537         mFlushingVideo = false;
    538 
    539         mDrainVideoQueuePending = false;
    540         ++mVideoQueueGeneration;
    541     }
    542 
    543     notifyFlushComplete(audio);
    544 }
    545 
    546 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
    547     while (!queue->empty()) {
    548         QueueEntry *entry = &*queue->begin();
    549 
    550         if (entry->mBuffer != NULL) {
    551             entry->mNotifyConsumed->post();
    552         }
    553 
    554         queue->erase(queue->begin());
    555         entry = NULL;
    556     }
    557 }
    558 
    559 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
    560     sp<AMessage> notify = mNotify->dup();
    561     notify->setInt32("what", kWhatFlushComplete);
    562     notify->setInt32("audio", static_cast<int32_t>(audio));
    563     notify->post();
    564 }
    565 
    566 bool NuPlayer::Renderer::dropBufferWhileFlushing(
    567         bool audio, const sp<AMessage> &msg) {
    568     bool flushing = false;
    569 
    570     {
    571         Mutex::Autolock autoLock(mFlushLock);
    572         if (audio) {
    573             flushing = mFlushingAudio;
    574         } else {
    575             flushing = mFlushingVideo;
    576         }
    577     }
    578 
    579     if (!flushing) {
    580         return false;
    581     }
    582 
    583     sp<AMessage> notifyConsumed;
    584     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
    585         notifyConsumed->post();
    586     }
    587 
    588     return true;
    589 }
    590 
    591 void NuPlayer::Renderer::onAudioSinkChanged() {
    592     CHECK(!mDrainAudioQueuePending);
    593     mNumFramesWritten = 0;
    594     uint32_t written;
    595     if (mAudioSink->getFramesWritten(&written) == OK) {
    596         mNumFramesWritten = written;
    597     }
    598 }
    599 
    600 void NuPlayer::Renderer::notifyPosition() {
    601     if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
    602         return;
    603     }
    604 
    605     int64_t nowUs = ALooper::GetNowUs();
    606 
    607     if (mLastPositionUpdateUs >= 0
    608             && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
    609         return;
    610     }
    611     mLastPositionUpdateUs = nowUs;
    612 
    613     int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
    614 
    615     sp<AMessage> notify = mNotify->dup();
    616     notify->setInt32("what", kWhatPosition);
    617     notify->setInt64("positionUs", positionUs);
    618     notify->setInt64("videoLateByUs", mVideoLateByUs);
    619     notify->post();
    620 }
    621 
    622 void NuPlayer::Renderer::onPause() {
    623     CHECK(!mPaused);
    624 
    625     mDrainAudioQueuePending = false;
    626     ++mAudioQueueGeneration;
    627 
    628     mDrainVideoQueuePending = false;
    629     ++mVideoQueueGeneration;
    630 
    631     if (mHasAudio) {
    632         mAudioSink->pause();
    633     }
    634 
    635     ALOGV("now paused audio queue has %d entries, video has %d entries",
    636           mAudioQueue.size(), mVideoQueue.size());
    637 
    638     mPaused = true;
    639 }
    640 
    641 void NuPlayer::Renderer::onResume() {
    642     if (!mPaused) {
    643         return;
    644     }
    645 
    646     if (mHasAudio) {
    647         mAudioSink->start();
    648     }
    649 
    650     mPaused = false;
    651 
    652     if (!mAudioQueue.empty()) {
    653         postDrainAudioQueue();
    654     }
    655 
    656     if (!mVideoQueue.empty()) {
    657         postDrainVideoQueue();
    658     }
    659 }
    660 
    661 }  // namespace android
    662 
    663