Home | History | Annotate | Download | only in nuplayer
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "NuPlayerRenderer"
     19 #include <utils/Log.h>
     20 
     21 #include "NuPlayerRenderer.h"
     22 #include <algorithm>
     23 #include <cutils/properties.h>
     24 #include <media/stagefright/foundation/ABuffer.h>
     25 #include <media/stagefright/foundation/ADebug.h>
     26 #include <media/stagefright/foundation/AMessage.h>
     27 #include <media/stagefright/foundation/AUtils.h>
     28 #include <media/stagefright/foundation/AWakeLock.h>
     29 #include <media/stagefright/MediaClock.h>
     30 #include <media/stagefright/MediaErrors.h>
     31 #include <media/stagefright/MetaData.h>
     32 #include <media/stagefright/Utils.h>
     33 #include <media/stagefright/VideoFrameScheduler.h>
     34 
     35 #include <inttypes.h>
     36 
     37 namespace android {
     38 
     39 /*
     40  * Example of common configuration settings in shell script form
     41 
     42    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
     43    adb shell setprop audio.offload.disable 1
     44 
     45    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
     46    adb shell setprop audio.offload.video 1
     47 
     48    #Use audio callbacks for PCM data
     49    adb shell setprop media.stagefright.audio.cbk 1
     50 
     51    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
     52    adb shell setprop media.stagefright.audio.deep 1
     53 
     54    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
     55    adb shell setprop media.stagefright.audio.sink 1000
     56 
     57  * These configurations take effect for the next track played (not the current track).
     58  */
     59 
     60 static inline bool getUseAudioCallbackSetting() {
     61     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
     62 }
     63 
     64 static inline int32_t getAudioSinkPcmMsSetting() {
     65     return property_get_int32(
     66             "media.stagefright.audio.sink", 500 /* default_value */);
     67 }
     68 
     69 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
     70 // is closed to allow the audio DSP to power down.
     71 static const int64_t kOffloadPauseMaxUs = 10000000ll;
     72 
     73 // Maximum allowed delay from AudioSink, 1.5 seconds.
     74 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000ll;
     75 
     76 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
     77 
     78 // static
     79 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
     80         AUDIO_CHANNEL_NONE,
     81         AUDIO_OUTPUT_FLAG_NONE,
     82         AUDIO_FORMAT_INVALID,
     83         0, // mNumChannels
     84         0 // mSampleRate
     85 };
     86 
     87 // static
     88 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
     89 
     90 NuPlayer::Renderer::Renderer(
     91         const sp<MediaPlayerBase::AudioSink> &sink,
     92         const sp<AMessage> &notify,
     93         uint32_t flags)
     94     : mAudioSink(sink),
     95       mUseVirtualAudioSink(false),
     96       mNotify(notify),
     97       mFlags(flags),
     98       mNumFramesWritten(0),
     99       mDrainAudioQueuePending(false),
    100       mDrainVideoQueuePending(false),
    101       mAudioQueueGeneration(0),
    102       mVideoQueueGeneration(0),
    103       mAudioDrainGeneration(0),
    104       mVideoDrainGeneration(0),
    105       mAudioEOSGeneration(0),
    106       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
    107       mAudioFirstAnchorTimeMediaUs(-1),
    108       mAnchorTimeMediaUs(-1),
    109       mAnchorNumFramesWritten(-1),
    110       mVideoLateByUs(0ll),
    111       mHasAudio(false),
    112       mHasVideo(false),
    113       mNotifyCompleteAudio(false),
    114       mNotifyCompleteVideo(false),
    115       mSyncQueues(false),
    116       mPaused(false),
    117       mPauseDrainAudioAllowedUs(0),
    118       mVideoSampleReceived(false),
    119       mVideoRenderingStarted(false),
    120       mVideoRenderingStartGeneration(0),
    121       mAudioRenderingStartGeneration(0),
    122       mRenderingDataDelivered(false),
    123       mNextAudioClockUpdateTimeUs(-1),
    124       mLastAudioMediaTimeUs(-1),
    125       mAudioOffloadPauseTimeoutGeneration(0),
    126       mAudioTornDown(false),
    127       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
    128       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
    129       mTotalBuffersQueued(0),
    130       mLastAudioBufferDrained(0),
    131       mUseAudioCallback(false),
    132       mWakeLock(new AWakeLock()) {
    133     mMediaClock = new MediaClock;
    134     mPlaybackRate = mPlaybackSettings.mSpeed;
    135     mMediaClock->setPlaybackRate(mPlaybackRate);
    136 }
    137 
    138 NuPlayer::Renderer::~Renderer() {
    139     if (offloadingAudio()) {
    140         mAudioSink->stop();
    141         mAudioSink->flush();
    142         mAudioSink->close();
    143     }
    144 
    145     // Try to avoid racing condition in case callback is still on.
    146     Mutex::Autolock autoLock(mLock);
    147     mUseAudioCallback = false;
    148     flushQueue(&mAudioQueue);
    149     flushQueue(&mVideoQueue);
    150     mWakeLock.clear();
    151     mMediaClock.clear();
    152     mVideoScheduler.clear();
    153     mNotify.clear();
    154     mAudioSink.clear();
    155 }
    156 
    157 void NuPlayer::Renderer::queueBuffer(
    158         bool audio,
    159         const sp<ABuffer> &buffer,
    160         const sp<AMessage> &notifyConsumed) {
    161     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
    162     msg->setInt32("queueGeneration", getQueueGeneration(audio));
    163     msg->setInt32("audio", static_cast<int32_t>(audio));
    164     msg->setBuffer("buffer", buffer);
    165     msg->setMessage("notifyConsumed", notifyConsumed);
    166     msg->post();
    167 }
    168 
    169 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
    170     CHECK_NE(finalResult, (status_t)OK);
    171 
    172     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
    173     msg->setInt32("queueGeneration", getQueueGeneration(audio));
    174     msg->setInt32("audio", static_cast<int32_t>(audio));
    175     msg->setInt32("finalResult", finalResult);
    176     msg->post();
    177 }
    178 
    179 status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
    180     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
    181     writeToAMessage(msg, rate);
    182     sp<AMessage> response;
    183     status_t err = msg->postAndAwaitResponse(&response);
    184     if (err == OK && response != NULL) {
    185         CHECK(response->findInt32("err", &err));
    186     }
    187     return err;
    188 }
    189 
    190 status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
    191     if (rate.mSpeed == 0.f) {
    192         onPause();
    193         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
    194         // have to correspond to the any non-0 speed (e.g old speed). Keep
    195         // settings nonetheless, using the old speed, in case audiosink changes.
    196         AudioPlaybackRate newRate = rate;
    197         newRate.mSpeed = mPlaybackSettings.mSpeed;
    198         mPlaybackSettings = newRate;
    199         return OK;
    200     }
    201 
    202     if (mAudioSink != NULL && mAudioSink->ready()) {
    203         status_t err = mAudioSink->setPlaybackRate(rate);
    204         if (err != OK) {
    205             return err;
    206         }
    207     }
    208     mPlaybackSettings = rate;
    209     mPlaybackRate = rate.mSpeed;
    210     mMediaClock->setPlaybackRate(mPlaybackRate);
    211     return OK;
    212 }
    213 
    214 status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
    215     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
    216     sp<AMessage> response;
    217     status_t err = msg->postAndAwaitResponse(&response);
    218     if (err == OK && response != NULL) {
    219         CHECK(response->findInt32("err", &err));
    220         if (err == OK) {
    221             readFromAMessage(response, rate);
    222         }
    223     }
    224     return err;
    225 }
    226 
    227 status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
    228     if (mAudioSink != NULL && mAudioSink->ready()) {
    229         status_t err = mAudioSink->getPlaybackRate(rate);
    230         if (err == OK) {
    231             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
    232                 ALOGW("correcting mismatch in internal/external playback rate");
    233             }
    234             // get playback settings used by audiosink, as it may be
    235             // slightly off due to audiosink not taking small changes.
    236             mPlaybackSettings = *rate;
    237             if (mPaused) {
    238                 rate->mSpeed = 0.f;
    239             }
    240         }
    241         return err;
    242     }
    243     *rate = mPlaybackSettings;
    244     return OK;
    245 }
    246 
    247 status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
    248     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
    249     writeToAMessage(msg, sync, videoFpsHint);
    250     sp<AMessage> response;
    251     status_t err = msg->postAndAwaitResponse(&response);
    252     if (err == OK && response != NULL) {
    253         CHECK(response->findInt32("err", &err));
    254     }
    255     return err;
    256 }
    257 
    258 status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
    259     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
    260         return BAD_VALUE;
    261     }
    262     // TODO: support sync sources
    263     return INVALID_OPERATION;
    264 }
    265 
    266 status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
    267     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
    268     sp<AMessage> response;
    269     status_t err = msg->postAndAwaitResponse(&response);
    270     if (err == OK && response != NULL) {
    271         CHECK(response->findInt32("err", &err));
    272         if (err == OK) {
    273             readFromAMessage(response, sync, videoFps);
    274         }
    275     }
    276     return err;
    277 }
    278 
    279 status_t NuPlayer::Renderer::onGetSyncSettings(
    280         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
    281     *sync = mSyncSettings;
    282     *videoFps = -1.f;
    283     return OK;
    284 }
    285 
    286 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
    287     {
    288         Mutex::Autolock autoLock(mLock);
    289         if (audio) {
    290             mNotifyCompleteAudio |= notifyComplete;
    291             clearAudioFirstAnchorTime_l();
    292             ++mAudioQueueGeneration;
    293             ++mAudioDrainGeneration;
    294         } else {
    295             mNotifyCompleteVideo |= notifyComplete;
    296             ++mVideoQueueGeneration;
    297             ++mVideoDrainGeneration;
    298         }
    299 
    300         clearAnchorTime_l();
    301         mVideoLateByUs = 0;
    302         mSyncQueues = false;
    303     }
    304 
    305     sp<AMessage> msg = new AMessage(kWhatFlush, this);
    306     msg->setInt32("audio", static_cast<int32_t>(audio));
    307     msg->post();
    308 }
    309 
    310 void NuPlayer::Renderer::signalTimeDiscontinuity() {
    311 }
    312 
    313 void NuPlayer::Renderer::signalDisableOffloadAudio() {
    314     (new AMessage(kWhatDisableOffloadAudio, this))->post();
    315 }
    316 
    317 void NuPlayer::Renderer::signalEnableOffloadAudio() {
    318     (new AMessage(kWhatEnableOffloadAudio, this))->post();
    319 }
    320 
    321 void NuPlayer::Renderer::pause() {
    322     (new AMessage(kWhatPause, this))->post();
    323 }
    324 
    325 void NuPlayer::Renderer::resume() {
    326     (new AMessage(kWhatResume, this))->post();
    327 }
    328 
    329 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
    330     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
    331     msg->setFloat("frame-rate", fps);
    332     msg->post();
    333 }
    334 
    335 // Called on any threads without mLock acquired.
    336 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
    337     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
    338     if (result == OK) {
    339         return result;
    340     }
    341 
    342     // MediaClock has not started yet. Try to start it if possible.
    343     {
    344         Mutex::Autolock autoLock(mLock);
    345         if (mAudioFirstAnchorTimeMediaUs == -1) {
    346             return result;
    347         }
    348 
    349         AudioTimestamp ts;
    350         status_t res = mAudioSink->getTimestamp(ts);
    351         if (res != OK) {
    352             return result;
    353         }
    354 
    355         // AudioSink has rendered some frames.
    356         int64_t nowUs = ALooper::GetNowUs();
    357         int64_t nowMediaUs = mAudioSink->getPlayedOutDurationUs(nowUs)
    358                 + mAudioFirstAnchorTimeMediaUs;
    359         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
    360     }
    361 
    362     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
    363 }
    364 
    365 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
    366     mAudioFirstAnchorTimeMediaUs = -1;
    367     mMediaClock->setStartingTimeMedia(-1);
    368 }
    369 
    370 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
    371     if (mAudioFirstAnchorTimeMediaUs == -1) {
    372         mAudioFirstAnchorTimeMediaUs = mediaUs;
    373         mMediaClock->setStartingTimeMedia(mediaUs);
    374     }
    375 }
    376 
    377 void NuPlayer::Renderer::clearAnchorTime_l() {
    378     mMediaClock->clearAnchor();
    379     mAnchorTimeMediaUs = -1;
    380     mAnchorNumFramesWritten = -1;
    381 }
    382 
    383 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
    384     Mutex::Autolock autoLock(mLock);
    385     mVideoLateByUs = lateUs;
    386 }
    387 
    388 int64_t NuPlayer::Renderer::getVideoLateByUs() {
    389     Mutex::Autolock autoLock(mLock);
    390     return mVideoLateByUs;
    391 }
    392 
    393 status_t NuPlayer::Renderer::openAudioSink(
    394         const sp<AMessage> &format,
    395         bool offloadOnly,
    396         bool hasVideo,
    397         uint32_t flags,
    398         bool *isOffloaded) {
    399     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
    400     msg->setMessage("format", format);
    401     msg->setInt32("offload-only", offloadOnly);
    402     msg->setInt32("has-video", hasVideo);
    403     msg->setInt32("flags", flags);
    404 
    405     sp<AMessage> response;
    406     msg->postAndAwaitResponse(&response);
    407 
    408     int32_t err;
    409     if (!response->findInt32("err", &err)) {
    410         err = INVALID_OPERATION;
    411     } else if (err == OK && isOffloaded != NULL) {
    412         int32_t offload;
    413         CHECK(response->findInt32("offload", &offload));
    414         *isOffloaded = (offload != 0);
    415     }
    416     return err;
    417 }
    418 
    419 void NuPlayer::Renderer::closeAudioSink() {
    420     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
    421 
    422     sp<AMessage> response;
    423     msg->postAndAwaitResponse(&response);
    424 }
    425 
    426 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    427     switch (msg->what()) {
    428         case kWhatOpenAudioSink:
    429         {
    430             sp<AMessage> format;
    431             CHECK(msg->findMessage("format", &format));
    432 
    433             int32_t offloadOnly;
    434             CHECK(msg->findInt32("offload-only", &offloadOnly));
    435 
    436             int32_t hasVideo;
    437             CHECK(msg->findInt32("has-video", &hasVideo));
    438 
    439             uint32_t flags;
    440             CHECK(msg->findInt32("flags", (int32_t *)&flags));
    441 
    442             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
    443 
    444             sp<AMessage> response = new AMessage;
    445             response->setInt32("err", err);
    446             response->setInt32("offload", offloadingAudio());
    447 
    448             sp<AReplyToken> replyID;
    449             CHECK(msg->senderAwaitsResponse(&replyID));
    450             response->postReply(replyID);
    451 
    452             break;
    453         }
    454 
    455         case kWhatCloseAudioSink:
    456         {
    457             sp<AReplyToken> replyID;
    458             CHECK(msg->senderAwaitsResponse(&replyID));
    459 
    460             onCloseAudioSink();
    461 
    462             sp<AMessage> response = new AMessage;
    463             response->postReply(replyID);
    464             break;
    465         }
    466 
    467         case kWhatStopAudioSink:
    468         {
    469             mAudioSink->stop();
    470             break;
    471         }
    472 
    473         case kWhatDrainAudioQueue:
    474         {
    475             mDrainAudioQueuePending = false;
    476 
    477             int32_t generation;
    478             CHECK(msg->findInt32("drainGeneration", &generation));
    479             if (generation != getDrainGeneration(true /* audio */)) {
    480                 break;
    481             }
    482 
    483             if (onDrainAudioQueue()) {
    484                 uint32_t numFramesPlayed;
    485                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
    486                          (status_t)OK);
    487 
    488                 uint32_t numFramesPendingPlayout =
    489                     mNumFramesWritten - numFramesPlayed;
    490 
    491                 // This is how long the audio sink will have data to
    492                 // play back.
    493                 int64_t delayUs =
    494                     mAudioSink->msecsPerFrame()
    495                         * numFramesPendingPlayout * 1000ll;
    496                 if (mPlaybackRate > 1.0f) {
    497                     delayUs /= mPlaybackRate;
    498                 }
    499 
    500                 // Let's give it more data after about half that time
    501                 // has elapsed.
    502                 delayUs /= 2;
    503                 // check the buffer size to estimate maximum delay permitted.
    504                 const int64_t maxDrainDelayUs = std::max(
    505                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
    506                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
    507                         (long long)delayUs, (long long)maxDrainDelayUs);
    508                 Mutex::Autolock autoLock(mLock);
    509                 postDrainAudioQueue_l(delayUs);
    510             }
    511             break;
    512         }
    513 
    514         case kWhatDrainVideoQueue:
    515         {
    516             int32_t generation;
    517             CHECK(msg->findInt32("drainGeneration", &generation));
    518             if (generation != getDrainGeneration(false /* audio */)) {
    519                 break;
    520             }
    521 
    522             mDrainVideoQueuePending = false;
    523 
    524             onDrainVideoQueue();
    525 
    526             postDrainVideoQueue();
    527             break;
    528         }
    529 
    530         case kWhatPostDrainVideoQueue:
    531         {
    532             int32_t generation;
    533             CHECK(msg->findInt32("drainGeneration", &generation));
    534             if (generation != getDrainGeneration(false /* audio */)) {
    535                 break;
    536             }
    537 
    538             mDrainVideoQueuePending = false;
    539             postDrainVideoQueue();
    540             break;
    541         }
    542 
    543         case kWhatQueueBuffer:
    544         {
    545             onQueueBuffer(msg);
    546             break;
    547         }
    548 
    549         case kWhatQueueEOS:
    550         {
    551             onQueueEOS(msg);
    552             break;
    553         }
    554 
    555         case kWhatEOS:
    556         {
    557             int32_t generation;
    558             CHECK(msg->findInt32("audioEOSGeneration", &generation));
    559             if (generation != mAudioEOSGeneration) {
    560                 break;
    561             }
    562             status_t finalResult;
    563             CHECK(msg->findInt32("finalResult", &finalResult));
    564             notifyEOS(true /* audio */, finalResult);
    565             break;
    566         }
    567 
    568         case kWhatConfigPlayback:
    569         {
    570             sp<AReplyToken> replyID;
    571             CHECK(msg->senderAwaitsResponse(&replyID));
    572             AudioPlaybackRate rate;
    573             readFromAMessage(msg, &rate);
    574             status_t err = onConfigPlayback(rate);
    575             sp<AMessage> response = new AMessage;
    576             response->setInt32("err", err);
    577             response->postReply(replyID);
    578             break;
    579         }
    580 
    581         case kWhatGetPlaybackSettings:
    582         {
    583             sp<AReplyToken> replyID;
    584             CHECK(msg->senderAwaitsResponse(&replyID));
    585             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
    586             status_t err = onGetPlaybackSettings(&rate);
    587             sp<AMessage> response = new AMessage;
    588             if (err == OK) {
    589                 writeToAMessage(response, rate);
    590             }
    591             response->setInt32("err", err);
    592             response->postReply(replyID);
    593             break;
    594         }
    595 
    596         case kWhatConfigSync:
    597         {
    598             sp<AReplyToken> replyID;
    599             CHECK(msg->senderAwaitsResponse(&replyID));
    600             AVSyncSettings sync;
    601             float videoFpsHint;
    602             readFromAMessage(msg, &sync, &videoFpsHint);
    603             status_t err = onConfigSync(sync, videoFpsHint);
    604             sp<AMessage> response = new AMessage;
    605             response->setInt32("err", err);
    606             response->postReply(replyID);
    607             break;
    608         }
    609 
    610         case kWhatGetSyncSettings:
    611         {
    612             sp<AReplyToken> replyID;
    613             CHECK(msg->senderAwaitsResponse(&replyID));
    614 
    615             ALOGV("kWhatGetSyncSettings");
    616             AVSyncSettings sync;
    617             float videoFps = -1.f;
    618             status_t err = onGetSyncSettings(&sync, &videoFps);
    619             sp<AMessage> response = new AMessage;
    620             if (err == OK) {
    621                 writeToAMessage(response, sync, videoFps);
    622             }
    623             response->setInt32("err", err);
    624             response->postReply(replyID);
    625             break;
    626         }
    627 
    628         case kWhatFlush:
    629         {
    630             onFlush(msg);
    631             break;
    632         }
    633 
    634         case kWhatDisableOffloadAudio:
    635         {
    636             onDisableOffloadAudio();
    637             break;
    638         }
    639 
    640         case kWhatEnableOffloadAudio:
    641         {
    642             onEnableOffloadAudio();
    643             break;
    644         }
    645 
    646         case kWhatPause:
    647         {
    648             onPause();
    649             break;
    650         }
    651 
    652         case kWhatResume:
    653         {
    654             onResume();
    655             break;
    656         }
    657 
    658         case kWhatSetVideoFrameRate:
    659         {
    660             float fps;
    661             CHECK(msg->findFloat("frame-rate", &fps));
    662             onSetVideoFrameRate(fps);
    663             break;
    664         }
    665 
    666         case kWhatAudioTearDown:
    667         {
    668             int32_t reason;
    669             CHECK(msg->findInt32("reason", &reason));
    670 
    671             onAudioTearDown((AudioTearDownReason)reason);
    672             break;
    673         }
    674 
    675         case kWhatAudioOffloadPauseTimeout:
    676         {
    677             int32_t generation;
    678             CHECK(msg->findInt32("drainGeneration", &generation));
    679             if (generation != mAudioOffloadPauseTimeoutGeneration) {
    680                 break;
    681             }
    682             ALOGV("Audio Offload tear down due to pause timeout.");
    683             onAudioTearDown(kDueToTimeout);
    684             mWakeLock->release();
    685             break;
    686         }
    687 
    688         default:
    689             TRESPASS();
    690             break;
    691     }
    692 }
    693 
    694 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
    695     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
    696         return;
    697     }
    698 
    699     if (mAudioQueue.empty()) {
    700         return;
    701     }
    702 
    703     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
    704     if (mPaused) {
    705         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
    706         if (diffUs > delayUs) {
    707             delayUs = diffUs;
    708         }
    709     }
    710 
    711     mDrainAudioQueuePending = true;
    712     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
    713     msg->setInt32("drainGeneration", mAudioDrainGeneration);
    714     msg->post(delayUs);
    715 }
    716 
    717 void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
    718     mAudioRenderingStartGeneration = mAudioDrainGeneration;
    719     mVideoRenderingStartGeneration = mVideoDrainGeneration;
    720     mRenderingDataDelivered = false;
    721 }
    722 
    723 void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
    724     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
    725         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
    726         mRenderingDataDelivered = true;
    727         if (mPaused) {
    728             return;
    729         }
    730         mVideoRenderingStartGeneration = -1;
    731         mAudioRenderingStartGeneration = -1;
    732 
    733         sp<AMessage> notify = mNotify->dup();
    734         notify->setInt32("what", kWhatMediaRenderingStart);
    735         notify->post();
    736     }
    737 }
    738 
    739 // static
    740 size_t NuPlayer::Renderer::AudioSinkCallback(
    741         MediaPlayerBase::AudioSink * /* audioSink */,
    742         void *buffer,
    743         size_t size,
    744         void *cookie,
    745         MediaPlayerBase::AudioSink::cb_event_t event) {
    746     NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
    747 
    748     switch (event) {
    749         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
    750         {
    751             return me->fillAudioBuffer(buffer, size);
    752             break;
    753         }
    754 
    755         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
    756         {
    757             ALOGV("AudioSink::CB_EVENT_STREAM_END");
    758             me->notifyEOSCallback();
    759             break;
    760         }
    761 
    762         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
    763         {
    764             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
    765             me->notifyAudioTearDown(kDueToError);
    766             break;
    767         }
    768     }
    769 
    770     return 0;
    771 }
    772 
    773 void NuPlayer::Renderer::notifyEOSCallback() {
    774     Mutex::Autolock autoLock(mLock);
    775 
    776     if (!mUseAudioCallback) {
    777         return;
    778     }
    779 
    780     notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
    781 }
    782 
    783 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
    784     Mutex::Autolock autoLock(mLock);
    785 
    786     if (!mUseAudioCallback) {
    787         return 0;
    788     }
    789 
    790     bool hasEOS = false;
    791 
    792     size_t sizeCopied = 0;
    793     bool firstEntry = true;
    794     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
    795     while (sizeCopied < size && !mAudioQueue.empty()) {
    796         entry = &*mAudioQueue.begin();
    797 
    798         if (entry->mBuffer == NULL) { // EOS
    799             hasEOS = true;
    800             mAudioQueue.erase(mAudioQueue.begin());
    801             break;
    802         }
    803 
    804         if (firstEntry && entry->mOffset == 0) {
    805             firstEntry = false;
    806             int64_t mediaTimeUs;
    807             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    808             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
    809             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
    810         }
    811 
    812         size_t copy = entry->mBuffer->size() - entry->mOffset;
    813         size_t sizeRemaining = size - sizeCopied;
    814         if (copy > sizeRemaining) {
    815             copy = sizeRemaining;
    816         }
    817 
    818         memcpy((char *)buffer + sizeCopied,
    819                entry->mBuffer->data() + entry->mOffset,
    820                copy);
    821 
    822         entry->mOffset += copy;
    823         if (entry->mOffset == entry->mBuffer->size()) {
    824             entry->mNotifyConsumed->post();
    825             mAudioQueue.erase(mAudioQueue.begin());
    826             entry = NULL;
    827         }
    828         sizeCopied += copy;
    829 
    830         notifyIfMediaRenderingStarted_l();
    831     }
    832 
    833     if (mAudioFirstAnchorTimeMediaUs >= 0) {
    834         int64_t nowUs = ALooper::GetNowUs();
    835         int64_t nowMediaUs =
    836             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
    837         // we don't know how much data we are queueing for offloaded tracks.
    838         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
    839     }
    840 
    841     // for non-offloaded audio, we need to compute the frames written because
    842     // there is no EVENT_STREAM_END notification. The frames written gives
    843     // an estimate on the pending played out duration.
    844     if (!offloadingAudio()) {
    845         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
    846     }
    847 
    848     if (hasEOS) {
    849         (new AMessage(kWhatStopAudioSink, this))->post();
    850         // As there is currently no EVENT_STREAM_END callback notification for
    851         // non-offloaded audio tracks, we need to post the EOS ourselves.
    852         if (!offloadingAudio()) {
    853             int64_t postEOSDelayUs = 0;
    854             if (mAudioSink->needsTrailingPadding()) {
    855                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
    856             }
    857             ALOGV("fillAudioBuffer: notifyEOS "
    858                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
    859                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
    860             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
    861         }
    862     }
    863     return sizeCopied;
    864 }
    865 
    866 void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
    867     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
    868     bool foundEOS = false;
    869     while (it != mAudioQueue.end()) {
    870         int32_t eos;
    871         QueueEntry *entry = &*it++;
    872         if (entry->mBuffer == NULL
    873                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
    874             itEOS = it;
    875             foundEOS = true;
    876         }
    877     }
    878 
    879     if (foundEOS) {
    880         // post all replies before EOS and drop the samples
    881         for (it = mAudioQueue.begin(); it != itEOS; it++) {
    882             if (it->mBuffer == NULL) {
    883                 // delay doesn't matter as we don't even have an AudioTrack
    884                 notifyEOS(true /* audio */, it->mFinalResult);
    885             } else {
    886                 it->mNotifyConsumed->post();
    887             }
    888         }
    889         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
    890     }
    891 }
    892 
    893 bool NuPlayer::Renderer::onDrainAudioQueue() {
    894     // do not drain audio during teardown as queued buffers may be invalid.
    895     if (mAudioTornDown) {
    896         return false;
    897     }
    898     // TODO: This call to getPosition checks if AudioTrack has been created
    899     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
    900     // CHECKs on getPosition will fail.
    901     // We still need to figure out why AudioTrack is not created when
    902     // this function is called. One possible reason could be leftover
    903     // audio. Another possible place is to check whether decoder
    904     // has received INFO_FORMAT_CHANGED as the first buffer since
    905     // AudioSink is opened there, and possible interactions with flush
    906     // immediately after start. Investigate error message
    907     // "vorbis_dsp_synthesis returned -135", along with RTSP.
    908     uint32_t numFramesPlayed;
    909     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
    910         // When getPosition fails, renderer will not reschedule the draining
    911         // unless new samples are queued.
    912         // If we have pending EOS (or "eos" marker for discontinuities), we need
    913         // to post these now as NuPlayerDecoder might be waiting for it.
    914         drainAudioQueueUntilLastEOS();
    915 
    916         ALOGW("onDrainAudioQueue(): audio sink is not ready");
    917         return false;
    918     }
    919 
    920 #if 0
    921     ssize_t numFramesAvailableToWrite =
    922         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
    923 
    924     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
    925         ALOGI("audio sink underrun");
    926     } else {
    927         ALOGV("audio queue has %d frames left to play",
    928              mAudioSink->frameCount() - numFramesAvailableToWrite);
    929     }
    930 #endif
    931 
    932     uint32_t prevFramesWritten = mNumFramesWritten;
    933     while (!mAudioQueue.empty()) {
    934         QueueEntry *entry = &*mAudioQueue.begin();
    935 
    936         mLastAudioBufferDrained = entry->mBufferOrdinal;
    937 
    938         if (entry->mBuffer == NULL) {
    939             // EOS
    940             int64_t postEOSDelayUs = 0;
    941             if (mAudioSink->needsTrailingPadding()) {
    942                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
    943             }
    944             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
    945             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
    946 
    947             mAudioQueue.erase(mAudioQueue.begin());
    948             entry = NULL;
    949             if (mAudioSink->needsTrailingPadding()) {
    950                 // If we're not in gapless playback (i.e. through setNextPlayer), we
    951                 // need to stop the track here, because that will play out the last
    952                 // little bit at the end of the file. Otherwise short files won't play.
    953                 mAudioSink->stop();
    954                 mNumFramesWritten = 0;
    955             }
    956             return false;
    957         }
    958 
    959         // ignore 0-sized buffer which could be EOS marker with no data
    960         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
    961             int64_t mediaTimeUs;
    962             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    963             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
    964                     mediaTimeUs / 1E6);
    965             onNewAudioMediaTime(mediaTimeUs);
    966         }
    967 
    968         size_t copy = entry->mBuffer->size() - entry->mOffset;
    969 
    970         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
    971                                             copy, false /* blocking */);
    972         if (written < 0) {
    973             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
    974             if (written == WOULD_BLOCK) {
    975                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
    976             } else {
    977                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
    978                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
    979                 // true, in which case the NuPlayer will handle the reconnect.
    980                 notifyAudioTearDown(kDueToError);
    981             }
    982             break;
    983         }
    984 
    985         entry->mOffset += written;
    986         size_t remainder = entry->mBuffer->size() - entry->mOffset;
    987         if ((ssize_t)remainder < mAudioSink->frameSize()) {
    988             if (remainder > 0) {
    989                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
    990                         remainder);
    991                 entry->mOffset += remainder;
    992                 copy -= remainder;
    993             }
    994 
    995             entry->mNotifyConsumed->post();
    996             mAudioQueue.erase(mAudioQueue.begin());
    997 
    998             entry = NULL;
    999         }
   1000 
   1001         size_t copiedFrames = written / mAudioSink->frameSize();
   1002         mNumFramesWritten += copiedFrames;
   1003 
   1004         {
   1005             Mutex::Autolock autoLock(mLock);
   1006             int64_t maxTimeMedia;
   1007             maxTimeMedia =
   1008                 mAnchorTimeMediaUs +
   1009                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
   1010                                 * 1000LL * mAudioSink->msecsPerFrame());
   1011             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
   1012 
   1013             notifyIfMediaRenderingStarted_l();
   1014         }
   1015 
   1016         if (written != (ssize_t)copy) {
   1017             // A short count was received from AudioSink::write()
   1018             //
   1019             // AudioSink write is called in non-blocking mode.
   1020             // It may return with a short count when:
   1021             //
   1022             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
   1023             //    discarded.
   1024             // 2) The data to be copied exceeds the available buffer in AudioSink.
   1025             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
   1026             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
   1027 
   1028             // (Case 1)
   1029             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
   1030             // needs to fail, as we should not carry over fractional frames between calls.
   1031             CHECK_EQ(copy % mAudioSink->frameSize(), 0);
   1032 
   1033             // (Case 2, 3, 4)
   1034             // Return early to the caller.
   1035             // Beware of calling immediately again as this may busy-loop if you are not careful.
   1036             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
   1037             break;
   1038         }
   1039     }
   1040 
   1041     // calculate whether we need to reschedule another write.
   1042     bool reschedule = !mAudioQueue.empty()
   1043             && (!mPaused
   1044                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
   1045     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
   1046     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
   1047     return reschedule;
   1048 }
   1049 
   1050 int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
   1051     int32_t sampleRate = offloadingAudio() ?
   1052             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
   1053     if (sampleRate == 0) {
   1054         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
   1055         return 0;
   1056     }
   1057     // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
   1058     return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
   1059 }
   1060 
   1061 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
   1062 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
   1063     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
   1064     if (mUseVirtualAudioSink) {
   1065         int64_t nowUs = ALooper::GetNowUs();
   1066         int64_t mediaUs;
   1067         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
   1068             return 0ll;
   1069         } else {
   1070             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
   1071         }
   1072     }
   1073     return writtenAudioDurationUs - mAudioSink->getPlayedOutDurationUs(nowUs);
   1074 }
   1075 
   1076 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
   1077     int64_t realUs;
   1078     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
   1079         // If failed to get current position, e.g. due to audio clock is
   1080         // not ready, then just play out video immediately without delay.
   1081         return nowUs;
   1082     }
   1083     return realUs;
   1084 }
   1085 
   1086 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
   1087     Mutex::Autolock autoLock(mLock);
   1088     // TRICKY: vorbis decoder generates multiple frames with the same
   1089     // timestamp, so only update on the first frame with a given timestamp
   1090     if (mediaTimeUs == mAnchorTimeMediaUs) {
   1091         return;
   1092     }
   1093     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
   1094 
   1095     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
   1096     if (mNextAudioClockUpdateTimeUs == -1) {
   1097         AudioTimestamp ts;
   1098         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
   1099             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
   1100         }
   1101     }
   1102     int64_t nowUs = ALooper::GetNowUs();
   1103     if (mNextAudioClockUpdateTimeUs >= 0) {
   1104         if (nowUs >= mNextAudioClockUpdateTimeUs) {
   1105             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
   1106             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
   1107             mUseVirtualAudioSink = false;
   1108             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
   1109         }
   1110     } else {
   1111         int64_t unused;
   1112         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
   1113                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
   1114                         > kMaxAllowedAudioSinkDelayUs)) {
   1115             // Enough data has been sent to AudioSink, but AudioSink has not rendered
   1116             // any data yet. Something is wrong with AudioSink, e.g., the device is not
   1117             // connected to audio out.
   1118             // Switch to system clock. This essentially creates a virtual AudioSink with
   1119             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
   1120             // This virtual AudioSink renders audio data starting from the very first sample
   1121             // and it's paced by system clock.
   1122             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
   1123             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
   1124             mUseVirtualAudioSink = true;
   1125         }
   1126     }
   1127     mAnchorNumFramesWritten = mNumFramesWritten;
   1128     mAnchorTimeMediaUs = mediaTimeUs;
   1129 }
   1130 
   1131 // Called without mLock acquired.
   1132 void NuPlayer::Renderer::postDrainVideoQueue() {
   1133     if (mDrainVideoQueuePending
   1134             || getSyncQueues()
   1135             || (mPaused && mVideoSampleReceived)) {
   1136         return;
   1137     }
   1138 
   1139     if (mVideoQueue.empty()) {
   1140         return;
   1141     }
   1142 
   1143     QueueEntry &entry = *mVideoQueue.begin();
   1144 
   1145     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
   1146     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
   1147 
   1148     if (entry.mBuffer == NULL) {
   1149         // EOS doesn't carry a timestamp.
   1150         msg->post();
   1151         mDrainVideoQueuePending = true;
   1152         return;
   1153     }
   1154 
   1155     bool needRepostDrainVideoQueue = false;
   1156     int64_t delayUs;
   1157     int64_t nowUs = ALooper::GetNowUs();
   1158     int64_t realTimeUs;
   1159     if (mFlags & FLAG_REAL_TIME) {
   1160         int64_t mediaTimeUs;
   1161         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1162         realTimeUs = mediaTimeUs;
   1163     } else {
   1164         int64_t mediaTimeUs;
   1165         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1166 
   1167         {
   1168             Mutex::Autolock autoLock(mLock);
   1169             if (mAnchorTimeMediaUs < 0) {
   1170                 mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
   1171                 mAnchorTimeMediaUs = mediaTimeUs;
   1172                 realTimeUs = nowUs;
   1173             } else if (!mVideoSampleReceived) {
   1174                 // Always render the first video frame.
   1175                 realTimeUs = nowUs;
   1176             } else if (mAudioFirstAnchorTimeMediaUs < 0
   1177                 || mMediaClock->getRealTimeFor(mediaTimeUs, &realTimeUs) == OK) {
   1178                 realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
   1179             } else if (mediaTimeUs - mAudioFirstAnchorTimeMediaUs >= 0) {
   1180                 needRepostDrainVideoQueue = true;
   1181                 realTimeUs = nowUs;
   1182             } else {
   1183                 realTimeUs = nowUs;
   1184             }
   1185         }
   1186         if (!mHasAudio) {
   1187             // smooth out videos >= 10fps
   1188             mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
   1189         }
   1190 
   1191         // Heuristics to handle situation when media time changed without a
   1192         // discontinuity. If we have not drained an audio buffer that was
   1193         // received after this buffer, repost in 10 msec. Otherwise repost
   1194         // in 500 msec.
   1195         delayUs = realTimeUs - nowUs;
   1196         int64_t postDelayUs = -1;
   1197         if (delayUs > 500000) {
   1198             postDelayUs = 500000;
   1199             if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
   1200                 postDelayUs = 10000;
   1201             }
   1202         } else if (needRepostDrainVideoQueue) {
   1203             // CHECK(mPlaybackRate > 0);
   1204             // CHECK(mAudioFirstAnchorTimeMediaUs >= 0);
   1205             // CHECK(mediaTimeUs - mAudioFirstAnchorTimeMediaUs >= 0);
   1206             postDelayUs = mediaTimeUs - mAudioFirstAnchorTimeMediaUs;
   1207             postDelayUs /= mPlaybackRate;
   1208         }
   1209 
   1210         if (postDelayUs >= 0) {
   1211             msg->setWhat(kWhatPostDrainVideoQueue);
   1212             msg->post(postDelayUs);
   1213             mVideoScheduler->restart();
   1214             ALOGI("possible video time jump of %dms (%lld : %lld) or uninitialized media clock,"
   1215                     " retrying in %dms",
   1216                     (int)(delayUs / 1000), (long long)mediaTimeUs,
   1217                     (long long)mAudioFirstAnchorTimeMediaUs, (int)(postDelayUs / 1000));
   1218             mDrainVideoQueuePending = true;
   1219             return;
   1220         }
   1221     }
   1222 
   1223     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
   1224     int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
   1225 
   1226     delayUs = realTimeUs - nowUs;
   1227 
   1228     ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
   1229     // post 2 display refreshes before rendering is due
   1230     msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
   1231 
   1232     mDrainVideoQueuePending = true;
   1233 }
   1234 
   1235 void NuPlayer::Renderer::onDrainVideoQueue() {
   1236     if (mVideoQueue.empty()) {
   1237         return;
   1238     }
   1239 
   1240     QueueEntry *entry = &*mVideoQueue.begin();
   1241 
   1242     if (entry->mBuffer == NULL) {
   1243         // EOS
   1244 
   1245         notifyEOS(false /* audio */, entry->mFinalResult);
   1246 
   1247         mVideoQueue.erase(mVideoQueue.begin());
   1248         entry = NULL;
   1249 
   1250         setVideoLateByUs(0);
   1251         return;
   1252     }
   1253 
   1254     int64_t nowUs = ALooper::GetNowUs();
   1255     int64_t realTimeUs;
   1256     int64_t mediaTimeUs = -1;
   1257     if (mFlags & FLAG_REAL_TIME) {
   1258         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
   1259     } else {
   1260         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1261 
   1262         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
   1263     }
   1264 
   1265     bool tooLate = false;
   1266 
   1267     if (!mPaused) {
   1268         setVideoLateByUs(nowUs - realTimeUs);
   1269         tooLate = (mVideoLateByUs > 40000);
   1270 
   1271         if (tooLate) {
   1272             ALOGV("video late by %lld us (%.2f secs)",
   1273                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
   1274         } else {
   1275             int64_t mediaUs = 0;
   1276             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
   1277             ALOGV("rendering video at media time %.2f secs",
   1278                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
   1279                     mediaUs) / 1E6);
   1280 
   1281             if (!(mFlags & FLAG_REAL_TIME)
   1282                     && mLastAudioMediaTimeUs != -1
   1283                     && mediaTimeUs > mLastAudioMediaTimeUs) {
   1284                 // If audio ends before video, video continues to drive media clock.
   1285                 // Also smooth out videos >= 10fps.
   1286                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
   1287             }
   1288         }
   1289     } else {
   1290         setVideoLateByUs(0);
   1291         if (!mVideoSampleReceived && !mHasAudio) {
   1292             // This will ensure that the first frame after a flush won't be used as anchor
   1293             // when renderer is in paused state, because resume can happen any time after seek.
   1294             Mutex::Autolock autoLock(mLock);
   1295             clearAnchorTime_l();
   1296         }
   1297     }
   1298 
   1299     // Always render the first video frame while keeping stats on A/V sync.
   1300     if (!mVideoSampleReceived) {
   1301         realTimeUs = nowUs;
   1302         tooLate = false;
   1303     }
   1304 
   1305     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
   1306     entry->mNotifyConsumed->setInt32("render", !tooLate);
   1307     entry->mNotifyConsumed->post();
   1308     mVideoQueue.erase(mVideoQueue.begin());
   1309     entry = NULL;
   1310 
   1311     mVideoSampleReceived = true;
   1312 
   1313     if (!mPaused) {
   1314         if (!mVideoRenderingStarted) {
   1315             mVideoRenderingStarted = true;
   1316             notifyVideoRenderingStart();
   1317         }
   1318         Mutex::Autolock autoLock(mLock);
   1319         notifyIfMediaRenderingStarted_l();
   1320     }
   1321 }
   1322 
   1323 void NuPlayer::Renderer::notifyVideoRenderingStart() {
   1324     sp<AMessage> notify = mNotify->dup();
   1325     notify->setInt32("what", kWhatVideoRenderingStart);
   1326     notify->post();
   1327 }
   1328 
   1329 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
   1330     if (audio && delayUs > 0) {
   1331         sp<AMessage> msg = new AMessage(kWhatEOS, this);
   1332         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
   1333         msg->setInt32("finalResult", finalResult);
   1334         msg->post(delayUs);
   1335         return;
   1336     }
   1337     sp<AMessage> notify = mNotify->dup();
   1338     notify->setInt32("what", kWhatEOS);
   1339     notify->setInt32("audio", static_cast<int32_t>(audio));
   1340     notify->setInt32("finalResult", finalResult);
   1341     notify->post(delayUs);
   1342 }
   1343 
   1344 void NuPlayer::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
   1345     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
   1346     msg->setInt32("reason", reason);
   1347     msg->post();
   1348 }
   1349 
   1350 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
   1351     int32_t audio;
   1352     CHECK(msg->findInt32("audio", &audio));
   1353 
   1354     if (dropBufferIfStale(audio, msg)) {
   1355         return;
   1356     }
   1357 
   1358     if (audio) {
   1359         mHasAudio = true;
   1360     } else {
   1361         mHasVideo = true;
   1362     }
   1363 
   1364     if (mHasVideo) {
   1365         if (mVideoScheduler == NULL) {
   1366             mVideoScheduler = new VideoFrameScheduler();
   1367             mVideoScheduler->init();
   1368         }
   1369     }
   1370 
   1371     sp<ABuffer> buffer;
   1372     CHECK(msg->findBuffer("buffer", &buffer));
   1373 
   1374     sp<AMessage> notifyConsumed;
   1375     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
   1376 
   1377     QueueEntry entry;
   1378     entry.mBuffer = buffer;
   1379     entry.mNotifyConsumed = notifyConsumed;
   1380     entry.mOffset = 0;
   1381     entry.mFinalResult = OK;
   1382     entry.mBufferOrdinal = ++mTotalBuffersQueued;
   1383 
   1384     if (audio) {
   1385         Mutex::Autolock autoLock(mLock);
   1386         mAudioQueue.push_back(entry);
   1387         postDrainAudioQueue_l();
   1388     } else {
   1389         mVideoQueue.push_back(entry);
   1390         postDrainVideoQueue();
   1391     }
   1392 
   1393     Mutex::Autolock autoLock(mLock);
   1394     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
   1395         return;
   1396     }
   1397 
   1398     sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
   1399     sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
   1400 
   1401     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
   1402         // EOS signalled on either queue.
   1403         syncQueuesDone_l();
   1404         return;
   1405     }
   1406 
   1407     int64_t firstAudioTimeUs;
   1408     int64_t firstVideoTimeUs;
   1409     CHECK(firstAudioBuffer->meta()
   1410             ->findInt64("timeUs", &firstAudioTimeUs));
   1411     CHECK(firstVideoBuffer->meta()
   1412             ->findInt64("timeUs", &firstVideoTimeUs));
   1413 
   1414     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
   1415 
   1416     ALOGV("queueDiff = %.2f secs", diff / 1E6);
   1417 
   1418     if (diff > 100000ll) {
   1419         // Audio data starts More than 0.1 secs before video.
   1420         // Drop some audio.
   1421 
   1422         (*mAudioQueue.begin()).mNotifyConsumed->post();
   1423         mAudioQueue.erase(mAudioQueue.begin());
   1424         return;
   1425     }
   1426 
   1427     syncQueuesDone_l();
   1428 }
   1429 
   1430 void NuPlayer::Renderer::syncQueuesDone_l() {
   1431     if (!mSyncQueues) {
   1432         return;
   1433     }
   1434 
   1435     mSyncQueues = false;
   1436 
   1437     if (!mAudioQueue.empty()) {
   1438         postDrainAudioQueue_l();
   1439     }
   1440 
   1441     if (!mVideoQueue.empty()) {
   1442         mLock.unlock();
   1443         postDrainVideoQueue();
   1444         mLock.lock();
   1445     }
   1446 }
   1447 
   1448 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
   1449     int32_t audio;
   1450     CHECK(msg->findInt32("audio", &audio));
   1451 
   1452     if (dropBufferIfStale(audio, msg)) {
   1453         return;
   1454     }
   1455 
   1456     int32_t finalResult;
   1457     CHECK(msg->findInt32("finalResult", &finalResult));
   1458 
   1459     QueueEntry entry;
   1460     entry.mOffset = 0;
   1461     entry.mFinalResult = finalResult;
   1462 
   1463     if (audio) {
   1464         Mutex::Autolock autoLock(mLock);
   1465         if (mAudioQueue.empty() && mSyncQueues) {
   1466             syncQueuesDone_l();
   1467         }
   1468         mAudioQueue.push_back(entry);
   1469         postDrainAudioQueue_l();
   1470     } else {
   1471         if (mVideoQueue.empty() && getSyncQueues()) {
   1472             Mutex::Autolock autoLock(mLock);
   1473             syncQueuesDone_l();
   1474         }
   1475         mVideoQueue.push_back(entry);
   1476         postDrainVideoQueue();
   1477     }
   1478 }
   1479 
   1480 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
   1481     int32_t audio, notifyComplete;
   1482     CHECK(msg->findInt32("audio", &audio));
   1483 
   1484     {
   1485         Mutex::Autolock autoLock(mLock);
   1486         if (audio) {
   1487             notifyComplete = mNotifyCompleteAudio;
   1488             mNotifyCompleteAudio = false;
   1489             mLastAudioMediaTimeUs = -1;
   1490         } else {
   1491             notifyComplete = mNotifyCompleteVideo;
   1492             mNotifyCompleteVideo = false;
   1493         }
   1494 
   1495         // If we're currently syncing the queues, i.e. dropping audio while
   1496         // aligning the first audio/video buffer times and only one of the
   1497         // two queues has data, we may starve that queue by not requesting
   1498         // more buffers from the decoder. If the other source then encounters
   1499         // a discontinuity that leads to flushing, we'll never find the
   1500         // corresponding discontinuity on the other queue.
   1501         // Therefore we'll stop syncing the queues if at least one of them
   1502         // is flushed.
   1503         syncQueuesDone_l();
   1504         clearAnchorTime_l();
   1505     }
   1506 
   1507     ALOGV("flushing %s", audio ? "audio" : "video");
   1508     if (audio) {
   1509         {
   1510             Mutex::Autolock autoLock(mLock);
   1511             flushQueue(&mAudioQueue);
   1512 
   1513             ++mAudioDrainGeneration;
   1514             ++mAudioEOSGeneration;
   1515             prepareForMediaRenderingStart_l();
   1516 
   1517             // the frame count will be reset after flush.
   1518             clearAudioFirstAnchorTime_l();
   1519         }
   1520 
   1521         mDrainAudioQueuePending = false;
   1522 
   1523         if (offloadingAudio()) {
   1524             mAudioSink->pause();
   1525             mAudioSink->flush();
   1526             if (!mPaused) {
   1527                 mAudioSink->start();
   1528             }
   1529         } else {
   1530             mAudioSink->pause();
   1531             mAudioSink->flush();
   1532             // Call stop() to signal to the AudioSink to completely fill the
   1533             // internal buffer before resuming playback.
   1534             // FIXME: this is ignored after flush().
   1535             mAudioSink->stop();
   1536             if (mPaused) {
   1537                 // Race condition: if renderer is paused and audio sink is stopped,
   1538                 // we need to make sure that the audio track buffer fully drains
   1539                 // before delivering data.
   1540                 // FIXME: remove this if we can detect if stop() is complete.
   1541                 const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
   1542                 mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
   1543             } else {
   1544                 mAudioSink->start();
   1545             }
   1546             mNumFramesWritten = 0;
   1547         }
   1548         mNextAudioClockUpdateTimeUs = -1;
   1549     } else {
   1550         flushQueue(&mVideoQueue);
   1551 
   1552         mDrainVideoQueuePending = false;
   1553 
   1554         if (mVideoScheduler != NULL) {
   1555             mVideoScheduler->restart();
   1556         }
   1557 
   1558         Mutex::Autolock autoLock(mLock);
   1559         ++mVideoDrainGeneration;
   1560         prepareForMediaRenderingStart_l();
   1561     }
   1562 
   1563     mVideoSampleReceived = false;
   1564 
   1565     if (notifyComplete) {
   1566         notifyFlushComplete(audio);
   1567     }
   1568 }
   1569 
   1570 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
   1571     while (!queue->empty()) {
   1572         QueueEntry *entry = &*queue->begin();
   1573 
   1574         if (entry->mBuffer != NULL) {
   1575             entry->mNotifyConsumed->post();
   1576         }
   1577 
   1578         queue->erase(queue->begin());
   1579         entry = NULL;
   1580     }
   1581 }
   1582 
   1583 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
   1584     sp<AMessage> notify = mNotify->dup();
   1585     notify->setInt32("what", kWhatFlushComplete);
   1586     notify->setInt32("audio", static_cast<int32_t>(audio));
   1587     notify->post();
   1588 }
   1589 
   1590 bool NuPlayer::Renderer::dropBufferIfStale(
   1591         bool audio, const sp<AMessage> &msg) {
   1592     int32_t queueGeneration;
   1593     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
   1594 
   1595     if (queueGeneration == getQueueGeneration(audio)) {
   1596         return false;
   1597     }
   1598 
   1599     sp<AMessage> notifyConsumed;
   1600     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
   1601         notifyConsumed->post();
   1602     }
   1603 
   1604     return true;
   1605 }
   1606 
   1607 void NuPlayer::Renderer::onAudioSinkChanged() {
   1608     if (offloadingAudio()) {
   1609         return;
   1610     }
   1611     CHECK(!mDrainAudioQueuePending);
   1612     mNumFramesWritten = 0;
   1613     {
   1614         Mutex::Autolock autoLock(mLock);
   1615         mAnchorNumFramesWritten = -1;
   1616     }
   1617     uint32_t written;
   1618     if (mAudioSink->getFramesWritten(&written) == OK) {
   1619         mNumFramesWritten = written;
   1620     }
   1621 }
   1622 
   1623 void NuPlayer::Renderer::onDisableOffloadAudio() {
   1624     Mutex::Autolock autoLock(mLock);
   1625     mFlags &= ~FLAG_OFFLOAD_AUDIO;
   1626     ++mAudioDrainGeneration;
   1627     if (mAudioRenderingStartGeneration != -1) {
   1628         prepareForMediaRenderingStart_l();
   1629     }
   1630 }
   1631 
   1632 void NuPlayer::Renderer::onEnableOffloadAudio() {
   1633     Mutex::Autolock autoLock(mLock);
   1634     mFlags |= FLAG_OFFLOAD_AUDIO;
   1635     ++mAudioDrainGeneration;
   1636     if (mAudioRenderingStartGeneration != -1) {
   1637         prepareForMediaRenderingStart_l();
   1638     }
   1639 }
   1640 
   1641 void NuPlayer::Renderer::onPause() {
   1642     if (mPaused) {
   1643         return;
   1644     }
   1645 
   1646     {
   1647         Mutex::Autolock autoLock(mLock);
   1648         // we do not increment audio drain generation so that we fill audio buffer during pause.
   1649         ++mVideoDrainGeneration;
   1650         prepareForMediaRenderingStart_l();
   1651         mPaused = true;
   1652         mMediaClock->setPlaybackRate(0.0);
   1653     }
   1654 
   1655     mDrainAudioQueuePending = false;
   1656     mDrainVideoQueuePending = false;
   1657 
   1658     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
   1659     mAudioSink->pause();
   1660     startAudioOffloadPauseTimeout();
   1661 
   1662     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
   1663           mAudioQueue.size(), mVideoQueue.size());
   1664 }
   1665 
   1666 void NuPlayer::Renderer::onResume() {
   1667     if (!mPaused) {
   1668         return;
   1669     }
   1670 
   1671     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
   1672     cancelAudioOffloadPauseTimeout();
   1673     if (mAudioSink->ready()) {
   1674         status_t err = mAudioSink->start();
   1675         if (err != OK) {
   1676             ALOGE("cannot start AudioSink err %d", err);
   1677             notifyAudioTearDown(kDueToError);
   1678         }
   1679     }
   1680 
   1681     {
   1682         Mutex::Autolock autoLock(mLock);
   1683         mPaused = false;
   1684         // rendering started message may have been delayed if we were paused.
   1685         if (mRenderingDataDelivered) {
   1686             notifyIfMediaRenderingStarted_l();
   1687         }
   1688         // configure audiosink as we did not do it when pausing
   1689         if (mAudioSink != NULL && mAudioSink->ready()) {
   1690             mAudioSink->setPlaybackRate(mPlaybackSettings);
   1691         }
   1692 
   1693         mMediaClock->setPlaybackRate(mPlaybackRate);
   1694 
   1695         if (!mAudioQueue.empty()) {
   1696             postDrainAudioQueue_l();
   1697         }
   1698     }
   1699 
   1700     if (!mVideoQueue.empty()) {
   1701         postDrainVideoQueue();
   1702     }
   1703 }
   1704 
   1705 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
   1706     if (mVideoScheduler == NULL) {
   1707         mVideoScheduler = new VideoFrameScheduler();
   1708     }
   1709     mVideoScheduler->init(fps);
   1710 }
   1711 
   1712 int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
   1713     Mutex::Autolock autoLock(mLock);
   1714     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
   1715 }
   1716 
   1717 int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
   1718     Mutex::Autolock autoLock(mLock);
   1719     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
   1720 }
   1721 
   1722 bool NuPlayer::Renderer::getSyncQueues() {
   1723     Mutex::Autolock autoLock(mLock);
   1724     return mSyncQueues;
   1725 }
   1726 
   1727 void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
   1728     if (mAudioTornDown) {
   1729         return;
   1730     }
   1731     mAudioTornDown = true;
   1732 
   1733     int64_t currentPositionUs;
   1734     sp<AMessage> notify = mNotify->dup();
   1735     if (getCurrentPosition(&currentPositionUs) == OK) {
   1736         notify->setInt64("positionUs", currentPositionUs);
   1737     }
   1738 
   1739     mAudioSink->stop();
   1740     mAudioSink->flush();
   1741 
   1742     notify->setInt32("what", kWhatAudioTearDown);
   1743     notify->setInt32("reason", reason);
   1744     notify->post();
   1745 }
   1746 
   1747 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
   1748     if (offloadingAudio()) {
   1749         mWakeLock->acquire();
   1750         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
   1751         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
   1752         msg->post(kOffloadPauseMaxUs);
   1753     }
   1754 }
   1755 
   1756 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
   1757     // We may have called startAudioOffloadPauseTimeout() without
   1758     // the AudioSink open and with offloadingAudio enabled.
   1759     //
   1760     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
   1761     // we always release the wakelock and increment the pause timeout generation.
   1762     //
   1763     // Note: The acquired wakelock prevents the device from suspending
   1764     // immediately after offload pause (in case a resume happens shortly thereafter).
   1765     mWakeLock->release(true);
   1766     ++mAudioOffloadPauseTimeoutGeneration;
   1767 }
   1768 
   1769 status_t NuPlayer::Renderer::onOpenAudioSink(
   1770         const sp<AMessage> &format,
   1771         bool offloadOnly,
   1772         bool hasVideo,
   1773         uint32_t flags) {
   1774     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
   1775             offloadOnly, offloadingAudio());
   1776     bool audioSinkChanged = false;
   1777 
   1778     int32_t numChannels;
   1779     CHECK(format->findInt32("channel-count", &numChannels));
   1780 
   1781     int32_t channelMask;
   1782     if (!format->findInt32("channel-mask", &channelMask)) {
   1783         // signal to the AudioSink to derive the mask from count.
   1784         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
   1785     }
   1786 
   1787     int32_t sampleRate;
   1788     CHECK(format->findInt32("sample-rate", &sampleRate));
   1789 
   1790     if (offloadingAudio()) {
   1791         audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
   1792         AString mime;
   1793         CHECK(format->findString("mime", &mime));
   1794         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
   1795 
   1796         if (err != OK) {
   1797             ALOGE("Couldn't map mime \"%s\" to a valid "
   1798                     "audio_format", mime.c_str());
   1799             onDisableOffloadAudio();
   1800         } else {
   1801             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
   1802                     mime.c_str(), audioFormat);
   1803 
   1804             int avgBitRate = -1;
   1805             format->findInt32("bitrate", &avgBitRate);
   1806 
   1807             int32_t aacProfile = -1;
   1808             if (audioFormat == AUDIO_FORMAT_AAC
   1809                     && format->findInt32("aac-profile", &aacProfile)) {
   1810                 // Redefine AAC format as per aac profile
   1811                 mapAACProfileToAudioFormat(
   1812                         audioFormat,
   1813                         aacProfile);
   1814             }
   1815 
   1816             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
   1817             offloadInfo.duration_us = -1;
   1818             format->findInt64(
   1819                     "durationUs", &offloadInfo.duration_us);
   1820             offloadInfo.sample_rate = sampleRate;
   1821             offloadInfo.channel_mask = channelMask;
   1822             offloadInfo.format = audioFormat;
   1823             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
   1824             offloadInfo.bit_rate = avgBitRate;
   1825             offloadInfo.has_video = hasVideo;
   1826             offloadInfo.is_streaming = true;
   1827 
   1828             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
   1829                 ALOGV("openAudioSink: no change in offload mode");
   1830                 // no change from previous configuration, everything ok.
   1831                 return OK;
   1832             }
   1833             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   1834 
   1835             ALOGV("openAudioSink: try to open AudioSink in offload mode");
   1836             uint32_t offloadFlags = flags;
   1837             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
   1838             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
   1839             audioSinkChanged = true;
   1840             mAudioSink->close();
   1841 
   1842             err = mAudioSink->open(
   1843                     sampleRate,
   1844                     numChannels,
   1845                     (audio_channel_mask_t)channelMask,
   1846                     audioFormat,
   1847                     0 /* bufferCount - unused */,
   1848                     &NuPlayer::Renderer::AudioSinkCallback,
   1849                     this,
   1850                     (audio_output_flags_t)offloadFlags,
   1851                     &offloadInfo);
   1852 
   1853             if (err == OK) {
   1854                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
   1855             }
   1856 
   1857             if (err == OK) {
   1858                 // If the playback is offloaded to h/w, we pass
   1859                 // the HAL some metadata information.
   1860                 // We don't want to do this for PCM because it
   1861                 // will be going through the AudioFlinger mixer
   1862                 // before reaching the hardware.
   1863                 // TODO
   1864                 mCurrentOffloadInfo = offloadInfo;
   1865                 if (!mPaused) { // for preview mode, don't start if paused
   1866                     err = mAudioSink->start();
   1867                 }
   1868                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
   1869             }
   1870             if (err != OK) {
   1871                 // Clean up, fall back to non offload mode.
   1872                 mAudioSink->close();
   1873                 onDisableOffloadAudio();
   1874                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   1875                 ALOGV("openAudioSink: offload failed");
   1876                 if (offloadOnly) {
   1877                     notifyAudioTearDown(kForceNonOffload);
   1878                 }
   1879             } else {
   1880                 mUseAudioCallback = true;  // offload mode transfers data through callback
   1881                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
   1882             }
   1883         }
   1884     }
   1885     if (!offloadOnly && !offloadingAudio()) {
   1886         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
   1887         uint32_t pcmFlags = flags;
   1888         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
   1889 
   1890         const PcmInfo info = {
   1891                 (audio_channel_mask_t)channelMask,
   1892                 (audio_output_flags_t)pcmFlags,
   1893                 AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
   1894                 numChannels,
   1895                 sampleRate
   1896         };
   1897         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
   1898             ALOGV("openAudioSink: no change in pcm mode");
   1899             // no change from previous configuration, everything ok.
   1900             return OK;
   1901         }
   1902 
   1903         audioSinkChanged = true;
   1904         mAudioSink->close();
   1905         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   1906         // Note: It is possible to set up the callback, but not use it to send audio data.
   1907         // This requires a fix in AudioSink to explicitly specify the transfer mode.
   1908         mUseAudioCallback = getUseAudioCallbackSetting();
   1909         if (mUseAudioCallback) {
   1910             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
   1911         }
   1912 
   1913         // Compute the desired buffer size.
   1914         // For callback mode, the amount of time before wakeup is about half the buffer size.
   1915         const uint32_t frameCount =
   1916                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
   1917 
   1918         // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
   1919         // AudioSink. We don't want this when there's video because it will cause a video seek to
   1920         // the previous I frame. But we do want this when there's only audio because it will give
   1921         // NuPlayer a chance to switch from non-offload mode to offload mode.
   1922         // So we only set doNotReconnect when there's no video.
   1923         const bool doNotReconnect = !hasVideo;
   1924 
   1925         // We should always be able to set our playback settings if the sink is closed.
   1926         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
   1927                 "onOpenAudioSink: can't set playback rate on closed sink");
   1928         status_t err = mAudioSink->open(
   1929                     sampleRate,
   1930                     numChannels,
   1931                     (audio_channel_mask_t)channelMask,
   1932                     AUDIO_FORMAT_PCM_16_BIT,
   1933                     0 /* bufferCount - unused */,
   1934                     mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
   1935                     mUseAudioCallback ? this : NULL,
   1936                     (audio_output_flags_t)pcmFlags,
   1937                     NULL,
   1938                     doNotReconnect,
   1939                     frameCount);
   1940         if (err != OK) {
   1941             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
   1942             mAudioSink->close();
   1943             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   1944             return err;
   1945         }
   1946         mCurrentPcmInfo = info;
   1947         if (!mPaused) { // for preview mode, don't start if paused
   1948             mAudioSink->start();
   1949         }
   1950     }
   1951     if (audioSinkChanged) {
   1952         onAudioSinkChanged();
   1953     }
   1954     mAudioTornDown = false;
   1955     return OK;
   1956 }
   1957 
   1958 void NuPlayer::Renderer::onCloseAudioSink() {
   1959     mAudioSink->close();
   1960     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   1961     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   1962 }
   1963 
   1964 }  // namespace android
   1965 
   1966