Home | History | Annotate | Download | only in nuplayer2
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "NuPlayer2Renderer"
     19 #include <utils/Log.h>
     20 
     21 #include "JWakeLock.h"
     22 #include "NuPlayer2Renderer.h"
     23 #include <algorithm>
     24 #include <cutils/properties.h>
     25 #include <media/stagefright/foundation/ADebug.h>
     26 #include <media/stagefright/foundation/AMessage.h>
     27 #include <media/stagefright/foundation/AUtils.h>
     28 #include <media/stagefright/MediaClock.h>
     29 #include <media/stagefright/MediaErrors.h>
     30 #include <media/stagefright/Utils.h>
     31 #include <media/stagefright/VideoFrameScheduler.h>
     32 #include <media/MediaCodecBuffer.h>
     33 
     34 #include <inttypes.h>
     35 
     36 namespace android {
     37 
     38 /*
     39  * Example of common configuration settings in shell script form
     40 
     41    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
     42    adb shell setprop audio.offload.disable 1
     43 
     44    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
     45    adb shell setprop audio.offload.video 1
     46 
     47    #Use audio callbacks for PCM data
     48    adb shell setprop media.stagefright.audio.cbk 1
     49 
     50    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
     51    adb shell setprop media.stagefright.audio.deep 1
     52 
     53    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
     54    adb shell setprop media.stagefright.audio.sink 1000
     55 
     56  * These configurations take effect for the next track played (not the current track).
     57  */
     58 
     59 static inline bool getUseAudioCallbackSetting() {
     60     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
     61 }
     62 
     63 static inline int32_t getAudioSinkPcmMsSetting() {
     64     return property_get_int32(
     65             "media.stagefright.audio.sink", 500 /* default_value */);
     66 }
     67 
     68 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
     69 // is closed to allow the audio DSP to power down.
     70 static const int64_t kOffloadPauseMaxUs = 10000000ll;
     71 
     72 // Maximum allowed delay from AudioSink, 1.5 seconds.
     73 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000ll;
     74 
     75 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
     76 
     77 // static
     78 const NuPlayer2::Renderer::PcmInfo NuPlayer2::Renderer::AUDIO_PCMINFO_INITIALIZER = {
     79         AUDIO_CHANNEL_NONE,
     80         AUDIO_OUTPUT_FLAG_NONE,
     81         AUDIO_FORMAT_INVALID,
     82         0, // mNumChannels
     83         0 // mSampleRate
     84 };
     85 
     86 // static
     87 const int64_t NuPlayer2::Renderer::kMinPositionUpdateDelayUs = 100000ll;
     88 
     89 NuPlayer2::Renderer::Renderer(
     90         const sp<MediaPlayer2Interface::AudioSink> &sink,
     91         const sp<MediaClock> &mediaClock,
     92         const sp<AMessage> &notify,
     93         uint32_t flags)
     94     : mAudioSink(sink),
     95       mUseVirtualAudioSink(false),
     96       mNotify(notify),
     97       mFlags(flags),
     98       mNumFramesWritten(0),
     99       mDrainAudioQueuePending(false),
    100       mDrainVideoQueuePending(false),
    101       mAudioQueueGeneration(0),
    102       mVideoQueueGeneration(0),
    103       mAudioDrainGeneration(0),
    104       mVideoDrainGeneration(0),
    105       mAudioEOSGeneration(0),
    106       mMediaClock(mediaClock),
    107       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
    108       mAudioFirstAnchorTimeMediaUs(-1),
    109       mAnchorTimeMediaUs(-1),
    110       mAnchorNumFramesWritten(-1),
    111       mVideoLateByUs(0ll),
    112       mNextVideoTimeMediaUs(-1),
    113       mHasAudio(false),
    114       mHasVideo(false),
    115       mNotifyCompleteAudio(false),
    116       mNotifyCompleteVideo(false),
    117       mSyncQueues(false),
    118       mPaused(false),
    119       mPauseDrainAudioAllowedUs(0),
    120       mVideoSampleReceived(false),
    121       mVideoRenderingStarted(false),
    122       mVideoRenderingStartGeneration(0),
    123       mAudioRenderingStartGeneration(0),
    124       mRenderingDataDelivered(false),
    125       mNextAudioClockUpdateTimeUs(-1),
    126       mLastAudioMediaTimeUs(-1),
    127       mAudioOffloadPauseTimeoutGeneration(0),
    128       mAudioTornDown(false),
    129       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
    130       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
    131       mTotalBuffersQueued(0),
    132       mLastAudioBufferDrained(0),
    133       mUseAudioCallback(false),
    134       mWakeLock(new JWakeLock()) {
    135     CHECK(mediaClock != NULL);
    136     mPlaybackRate = mPlaybackSettings.mSpeed;
    137     mMediaClock->setPlaybackRate(mPlaybackRate);
    138 }
    139 
    140 NuPlayer2::Renderer::~Renderer() {
    141     if (offloadingAudio()) {
    142         mAudioSink->stop();
    143         mAudioSink->flush();
    144         mAudioSink->close();
    145     }
    146 
    147     // Try to avoid racing condition in case callback is still on.
    148     Mutex::Autolock autoLock(mLock);
    149     if (mUseAudioCallback) {
    150         flushQueue(&mAudioQueue);
    151         flushQueue(&mVideoQueue);
    152     }
    153     mWakeLock.clear();
    154     mVideoScheduler.clear();
    155     mNotify.clear();
    156     mAudioSink.clear();
    157 }
    158 
    159 void NuPlayer2::Renderer::queueBuffer(
    160         bool audio,
    161         const sp<MediaCodecBuffer> &buffer,
    162         const sp<AMessage> &notifyConsumed) {
    163     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
    164     msg->setInt32("queueGeneration", getQueueGeneration(audio));
    165     msg->setInt32("audio", static_cast<int32_t>(audio));
    166     msg->setObject("buffer", buffer);
    167     msg->setMessage("notifyConsumed", notifyConsumed);
    168     msg->post();
    169 }
    170 
    171 void NuPlayer2::Renderer::queueEOS(bool audio, status_t finalResult) {
    172     CHECK_NE(finalResult, (status_t)OK);
    173 
    174     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
    175     msg->setInt32("queueGeneration", getQueueGeneration(audio));
    176     msg->setInt32("audio", static_cast<int32_t>(audio));
    177     msg->setInt32("finalResult", finalResult);
    178     msg->post();
    179 }
    180 
    181 status_t NuPlayer2::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
    182     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
    183     writeToAMessage(msg, rate);
    184     sp<AMessage> response;
    185     status_t err = msg->postAndAwaitResponse(&response);
    186     if (err == OK && response != NULL) {
    187         CHECK(response->findInt32("err", &err));
    188     }
    189     return err;
    190 }
    191 
    192 status_t NuPlayer2::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
    193     if (rate.mSpeed == 0.f) {
    194         onPause();
    195         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
    196         // have to correspond to the any non-0 speed (e.g old speed). Keep
    197         // settings nonetheless, using the old speed, in case audiosink changes.
    198         AudioPlaybackRate newRate = rate;
    199         newRate.mSpeed = mPlaybackSettings.mSpeed;
    200         mPlaybackSettings = newRate;
    201         return OK;
    202     }
    203 
    204     if (mAudioSink != NULL && mAudioSink->ready()) {
    205         status_t err = mAudioSink->setPlaybackRate(rate);
    206         if (err != OK) {
    207             return err;
    208         }
    209     }
    210     mPlaybackSettings = rate;
    211     mPlaybackRate = rate.mSpeed;
    212     mMediaClock->setPlaybackRate(mPlaybackRate);
    213     return OK;
    214 }
    215 
    216 status_t NuPlayer2::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
    217     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
    218     sp<AMessage> response;
    219     status_t err = msg->postAndAwaitResponse(&response);
    220     if (err == OK && response != NULL) {
    221         CHECK(response->findInt32("err", &err));
    222         if (err == OK) {
    223             readFromAMessage(response, rate);
    224         }
    225     }
    226     return err;
    227 }
    228 
    229 status_t NuPlayer2::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
    230     if (mAudioSink != NULL && mAudioSink->ready()) {
    231         status_t err = mAudioSink->getPlaybackRate(rate);
    232         if (err == OK) {
    233             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
    234                 ALOGW("correcting mismatch in internal/external playback rate");
    235             }
    236             // get playback settings used by audiosink, as it may be
    237             // slightly off due to audiosink not taking small changes.
    238             mPlaybackSettings = *rate;
    239             if (mPaused) {
    240                 rate->mSpeed = 0.f;
    241             }
    242         }
    243         return err;
    244     }
    245     *rate = mPlaybackSettings;
    246     return OK;
    247 }
    248 
    249 status_t NuPlayer2::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
    250     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
    251     writeToAMessage(msg, sync, videoFpsHint);
    252     sp<AMessage> response;
    253     status_t err = msg->postAndAwaitResponse(&response);
    254     if (err == OK && response != NULL) {
    255         CHECK(response->findInt32("err", &err));
    256     }
    257     return err;
    258 }
    259 
    260 status_t NuPlayer2::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
    261     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
    262         return BAD_VALUE;
    263     }
    264     // TODO: support sync sources
    265     return INVALID_OPERATION;
    266 }
    267 
    268 status_t NuPlayer2::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
    269     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
    270     sp<AMessage> response;
    271     status_t err = msg->postAndAwaitResponse(&response);
    272     if (err == OK && response != NULL) {
    273         CHECK(response->findInt32("err", &err));
    274         if (err == OK) {
    275             readFromAMessage(response, sync, videoFps);
    276         }
    277     }
    278     return err;
    279 }
    280 
    281 status_t NuPlayer2::Renderer::onGetSyncSettings(
    282         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
    283     *sync = mSyncSettings;
    284     *videoFps = -1.f;
    285     return OK;
    286 }
    287 
    288 void NuPlayer2::Renderer::flush(bool audio, bool notifyComplete) {
    289     {
    290         Mutex::Autolock autoLock(mLock);
    291         if (audio) {
    292             mNotifyCompleteAudio |= notifyComplete;
    293             clearAudioFirstAnchorTime_l();
    294             ++mAudioQueueGeneration;
    295             ++mAudioDrainGeneration;
    296         } else {
    297             mNotifyCompleteVideo |= notifyComplete;
    298             ++mVideoQueueGeneration;
    299             ++mVideoDrainGeneration;
    300         }
    301 
    302         mMediaClock->clearAnchor();
    303         mVideoLateByUs = 0;
    304         mNextVideoTimeMediaUs = -1;
    305         mSyncQueues = false;
    306     }
    307 
    308     sp<AMessage> msg = new AMessage(kWhatFlush, this);
    309     msg->setInt32("audio", static_cast<int32_t>(audio));
    310     msg->post();
    311 }
    312 
    313 void NuPlayer2::Renderer::signalTimeDiscontinuity() {
    314 }
    315 
    316 void NuPlayer2::Renderer::signalDisableOffloadAudio() {
    317     (new AMessage(kWhatDisableOffloadAudio, this))->post();
    318 }
    319 
    320 void NuPlayer2::Renderer::signalEnableOffloadAudio() {
    321     (new AMessage(kWhatEnableOffloadAudio, this))->post();
    322 }
    323 
    324 void NuPlayer2::Renderer::pause() {
    325     (new AMessage(kWhatPause, this))->post();
    326 }
    327 
    328 void NuPlayer2::Renderer::resume() {
    329     (new AMessage(kWhatResume, this))->post();
    330 }
    331 
    332 void NuPlayer2::Renderer::setVideoFrameRate(float fps) {
    333     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
    334     msg->setFloat("frame-rate", fps);
    335     msg->post();
    336 }
    337 
    338 // Called on any threads without mLock acquired.
    339 status_t NuPlayer2::Renderer::getCurrentPosition(int64_t *mediaUs) {
    340     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
    341     if (result == OK) {
    342         return result;
    343     }
    344 
    345     // MediaClock has not started yet. Try to start it if possible.
    346     {
    347         Mutex::Autolock autoLock(mLock);
    348         if (mAudioFirstAnchorTimeMediaUs == -1) {
    349             return result;
    350         }
    351 
    352         AudioTimestamp ts;
    353         status_t res = mAudioSink->getTimestamp(ts);
    354         if (res != OK) {
    355             return result;
    356         }
    357 
    358         // AudioSink has rendered some frames.
    359         int64_t nowUs = ALooper::GetNowUs();
    360         int64_t nowMediaUs = mAudioSink->getPlayedOutDurationUs(nowUs)
    361                 + mAudioFirstAnchorTimeMediaUs;
    362         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
    363     }
    364 
    365     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
    366 }
    367 
    368 void NuPlayer2::Renderer::clearAudioFirstAnchorTime_l() {
    369     mAudioFirstAnchorTimeMediaUs = -1;
    370     mMediaClock->setStartingTimeMedia(-1);
    371 }
    372 
    373 void NuPlayer2::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
    374     if (mAudioFirstAnchorTimeMediaUs == -1) {
    375         mAudioFirstAnchorTimeMediaUs = mediaUs;
    376         mMediaClock->setStartingTimeMedia(mediaUs);
    377     }
    378 }
    379 
    380 // Called on renderer looper.
    381 void NuPlayer2::Renderer::clearAnchorTime() {
    382     mMediaClock->clearAnchor();
    383     mAnchorTimeMediaUs = -1;
    384     mAnchorNumFramesWritten = -1;
    385 }
    386 
    387 void NuPlayer2::Renderer::setVideoLateByUs(int64_t lateUs) {
    388     Mutex::Autolock autoLock(mLock);
    389     mVideoLateByUs = lateUs;
    390 }
    391 
    392 int64_t NuPlayer2::Renderer::getVideoLateByUs() {
    393     Mutex::Autolock autoLock(mLock);
    394     return mVideoLateByUs;
    395 }
    396 
    397 status_t NuPlayer2::Renderer::openAudioSink(
    398         const sp<AMessage> &format,
    399         bool offloadOnly,
    400         bool hasVideo,
    401         uint32_t flags,
    402         bool *isOffloaded,
    403         bool isStreaming) {
    404     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
    405     msg->setMessage("format", format);
    406     msg->setInt32("offload-only", offloadOnly);
    407     msg->setInt32("has-video", hasVideo);
    408     msg->setInt32("flags", flags);
    409     msg->setInt32("isStreaming", isStreaming);
    410 
    411     sp<AMessage> response;
    412     status_t postStatus = msg->postAndAwaitResponse(&response);
    413 
    414     int32_t err;
    415     if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
    416         err = INVALID_OPERATION;
    417     } else if (err == OK && isOffloaded != NULL) {
    418         int32_t offload;
    419         CHECK(response->findInt32("offload", &offload));
    420         *isOffloaded = (offload != 0);
    421     }
    422     return err;
    423 }
    424 
    425 void NuPlayer2::Renderer::closeAudioSink() {
    426     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
    427 
    428     sp<AMessage> response;
    429     msg->postAndAwaitResponse(&response);
    430 }
    431 
    432 void NuPlayer2::Renderer::changeAudioFormat(
    433         const sp<AMessage> &format,
    434         bool offloadOnly,
    435         bool hasVideo,
    436         uint32_t flags,
    437         bool isStreaming,
    438         const sp<AMessage> &notify) {
    439     sp<AMessage> meta = new AMessage;
    440     meta->setMessage("format", format);
    441     meta->setInt32("offload-only", offloadOnly);
    442     meta->setInt32("has-video", hasVideo);
    443     meta->setInt32("flags", flags);
    444     meta->setInt32("isStreaming", isStreaming);
    445 
    446     sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
    447     msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
    448     msg->setMessage("notify", notify);
    449     msg->setMessage("meta", meta);
    450     msg->post();
    451 }
    452 
    453 void NuPlayer2::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    454     switch (msg->what()) {
    455         case kWhatOpenAudioSink:
    456         {
    457             sp<AMessage> format;
    458             CHECK(msg->findMessage("format", &format));
    459 
    460             int32_t offloadOnly;
    461             CHECK(msg->findInt32("offload-only", &offloadOnly));
    462 
    463             int32_t hasVideo;
    464             CHECK(msg->findInt32("has-video", &hasVideo));
    465 
    466             uint32_t flags;
    467             CHECK(msg->findInt32("flags", (int32_t *)&flags));
    468 
    469             uint32_t isStreaming;
    470             CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
    471 
    472             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
    473 
    474             sp<AMessage> response = new AMessage;
    475             response->setInt32("err", err);
    476             response->setInt32("offload", offloadingAudio());
    477 
    478             sp<AReplyToken> replyID;
    479             CHECK(msg->senderAwaitsResponse(&replyID));
    480             response->postReply(replyID);
    481 
    482             break;
    483         }
    484 
    485         case kWhatCloseAudioSink:
    486         {
    487             sp<AReplyToken> replyID;
    488             CHECK(msg->senderAwaitsResponse(&replyID));
    489 
    490             onCloseAudioSink();
    491 
    492             sp<AMessage> response = new AMessage;
    493             response->postReply(replyID);
    494             break;
    495         }
    496 
    497         case kWhatStopAudioSink:
    498         {
    499             mAudioSink->stop();
    500             break;
    501         }
    502 
    503         case kWhatChangeAudioFormat:
    504         {
    505             int32_t queueGeneration;
    506             CHECK(msg->findInt32("queueGeneration", &queueGeneration));
    507 
    508             sp<AMessage> notify;
    509             CHECK(msg->findMessage("notify", &notify));
    510 
    511             if (offloadingAudio()) {
    512                 ALOGW("changeAudioFormat should NOT be called in offload mode");
    513                 notify->setInt32("err", INVALID_OPERATION);
    514                 notify->post();
    515                 break;
    516             }
    517 
    518             sp<AMessage> meta;
    519             CHECK(msg->findMessage("meta", &meta));
    520 
    521             if (queueGeneration != getQueueGeneration(true /* audio */)
    522                     || mAudioQueue.empty()) {
    523                 onChangeAudioFormat(meta, notify);
    524                 break;
    525             }
    526 
    527             QueueEntry entry;
    528             entry.mNotifyConsumed = notify;
    529             entry.mMeta = meta;
    530 
    531             Mutex::Autolock autoLock(mLock);
    532             mAudioQueue.push_back(entry);
    533             postDrainAudioQueue_l();
    534 
    535             break;
    536         }
    537 
    538         case kWhatDrainAudioQueue:
    539         {
    540             mDrainAudioQueuePending = false;
    541 
    542             int32_t generation;
    543             CHECK(msg->findInt32("drainGeneration", &generation));
    544             if (generation != getDrainGeneration(true /* audio */)) {
    545                 break;
    546             }
    547 
    548             if (onDrainAudioQueue()) {
    549                 uint32_t numFramesPlayed;
    550                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
    551                          (status_t)OK);
    552 
    553                 // Handle AudioTrack race when start is immediately called after flush.
    554                 uint32_t numFramesPendingPlayout =
    555                     (mNumFramesWritten > numFramesPlayed ?
    556                         mNumFramesWritten - numFramesPlayed : 0);
    557 
    558                 // This is how long the audio sink will have data to
    559                 // play back.
    560                 int64_t delayUs =
    561                     mAudioSink->msecsPerFrame()
    562                         * numFramesPendingPlayout * 1000ll;
    563                 if (mPlaybackRate > 1.0f) {
    564                     delayUs /= mPlaybackRate;
    565                 }
    566 
    567                 // Let's give it more data after about half that time
    568                 // has elapsed.
    569                 delayUs /= 2;
    570                 // check the buffer size to estimate maximum delay permitted.
    571                 const int64_t maxDrainDelayUs = std::max(
    572                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
    573                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
    574                         (long long)delayUs, (long long)maxDrainDelayUs);
    575                 Mutex::Autolock autoLock(mLock);
    576                 postDrainAudioQueue_l(delayUs);
    577             }
    578             break;
    579         }
    580 
    581         case kWhatDrainVideoQueue:
    582         {
    583             int32_t generation;
    584             CHECK(msg->findInt32("drainGeneration", &generation));
    585             if (generation != getDrainGeneration(false /* audio */)) {
    586                 break;
    587             }
    588 
    589             mDrainVideoQueuePending = false;
    590 
    591             onDrainVideoQueue();
    592 
    593             postDrainVideoQueue();
    594             break;
    595         }
    596 
    597         case kWhatPostDrainVideoQueue:
    598         {
    599             int32_t generation;
    600             CHECK(msg->findInt32("drainGeneration", &generation));
    601             if (generation != getDrainGeneration(false /* audio */)) {
    602                 break;
    603             }
    604 
    605             mDrainVideoQueuePending = false;
    606             postDrainVideoQueue();
    607             break;
    608         }
    609 
    610         case kWhatQueueBuffer:
    611         {
    612             onQueueBuffer(msg);
    613             break;
    614         }
    615 
    616         case kWhatQueueEOS:
    617         {
    618             onQueueEOS(msg);
    619             break;
    620         }
    621 
    622         case kWhatEOS:
    623         {
    624             int32_t generation;
    625             CHECK(msg->findInt32("audioEOSGeneration", &generation));
    626             if (generation != mAudioEOSGeneration) {
    627                 break;
    628             }
    629             status_t finalResult;
    630             CHECK(msg->findInt32("finalResult", &finalResult));
    631             notifyEOS(true /* audio */, finalResult);
    632             break;
    633         }
    634 
    635         case kWhatConfigPlayback:
    636         {
    637             sp<AReplyToken> replyID;
    638             CHECK(msg->senderAwaitsResponse(&replyID));
    639             AudioPlaybackRate rate;
    640             readFromAMessage(msg, &rate);
    641             status_t err = onConfigPlayback(rate);
    642             sp<AMessage> response = new AMessage;
    643             response->setInt32("err", err);
    644             response->postReply(replyID);
    645             break;
    646         }
    647 
    648         case kWhatGetPlaybackSettings:
    649         {
    650             sp<AReplyToken> replyID;
    651             CHECK(msg->senderAwaitsResponse(&replyID));
    652             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
    653             status_t err = onGetPlaybackSettings(&rate);
    654             sp<AMessage> response = new AMessage;
    655             if (err == OK) {
    656                 writeToAMessage(response, rate);
    657             }
    658             response->setInt32("err", err);
    659             response->postReply(replyID);
    660             break;
    661         }
    662 
    663         case kWhatConfigSync:
    664         {
    665             sp<AReplyToken> replyID;
    666             CHECK(msg->senderAwaitsResponse(&replyID));
    667             AVSyncSettings sync;
    668             float videoFpsHint;
    669             readFromAMessage(msg, &sync, &videoFpsHint);
    670             status_t err = onConfigSync(sync, videoFpsHint);
    671             sp<AMessage> response = new AMessage;
    672             response->setInt32("err", err);
    673             response->postReply(replyID);
    674             break;
    675         }
    676 
    677         case kWhatGetSyncSettings:
    678         {
    679             sp<AReplyToken> replyID;
    680             CHECK(msg->senderAwaitsResponse(&replyID));
    681 
    682             ALOGV("kWhatGetSyncSettings");
    683             AVSyncSettings sync;
    684             float videoFps = -1.f;
    685             status_t err = onGetSyncSettings(&sync, &videoFps);
    686             sp<AMessage> response = new AMessage;
    687             if (err == OK) {
    688                 writeToAMessage(response, sync, videoFps);
    689             }
    690             response->setInt32("err", err);
    691             response->postReply(replyID);
    692             break;
    693         }
    694 
    695         case kWhatFlush:
    696         {
    697             onFlush(msg);
    698             break;
    699         }
    700 
    701         case kWhatDisableOffloadAudio:
    702         {
    703             onDisableOffloadAudio();
    704             break;
    705         }
    706 
    707         case kWhatEnableOffloadAudio:
    708         {
    709             onEnableOffloadAudio();
    710             break;
    711         }
    712 
    713         case kWhatPause:
    714         {
    715             onPause();
    716             break;
    717         }
    718 
    719         case kWhatResume:
    720         {
    721             onResume();
    722             break;
    723         }
    724 
    725         case kWhatSetVideoFrameRate:
    726         {
    727             float fps;
    728             CHECK(msg->findFloat("frame-rate", &fps));
    729             onSetVideoFrameRate(fps);
    730             break;
    731         }
    732 
    733         case kWhatAudioTearDown:
    734         {
    735             int32_t reason;
    736             CHECK(msg->findInt32("reason", &reason));
    737 
    738             onAudioTearDown((AudioTearDownReason)reason);
    739             break;
    740         }
    741 
    742         case kWhatAudioOffloadPauseTimeout:
    743         {
    744             int32_t generation;
    745             CHECK(msg->findInt32("drainGeneration", &generation));
    746             if (generation != mAudioOffloadPauseTimeoutGeneration) {
    747                 break;
    748             }
    749             ALOGV("Audio Offload tear down due to pause timeout.");
    750             onAudioTearDown(kDueToTimeout);
    751             mWakeLock->release();
    752             break;
    753         }
    754 
    755         default:
    756             TRESPASS();
    757             break;
    758     }
    759 }
    760 
    761 void NuPlayer2::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
    762     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
    763         return;
    764     }
    765 
    766     if (mAudioQueue.empty()) {
    767         return;
    768     }
    769 
    770     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
    771     if (mPaused) {
    772         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
    773         if (diffUs > delayUs) {
    774             delayUs = diffUs;
    775         }
    776     }
    777 
    778     mDrainAudioQueuePending = true;
    779     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
    780     msg->setInt32("drainGeneration", mAudioDrainGeneration);
    781     msg->post(delayUs);
    782 }
    783 
    784 void NuPlayer2::Renderer::prepareForMediaRenderingStart_l() {
    785     mAudioRenderingStartGeneration = mAudioDrainGeneration;
    786     mVideoRenderingStartGeneration = mVideoDrainGeneration;
    787     mRenderingDataDelivered = false;
    788 }
    789 
    790 void NuPlayer2::Renderer::notifyIfMediaRenderingStarted_l() {
    791     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
    792         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
    793         mRenderingDataDelivered = true;
    794         if (mPaused) {
    795             return;
    796         }
    797         mVideoRenderingStartGeneration = -1;
    798         mAudioRenderingStartGeneration = -1;
    799 
    800         sp<AMessage> notify = mNotify->dup();
    801         notify->setInt32("what", kWhatMediaRenderingStart);
    802         notify->post();
    803     }
    804 }
    805 
    806 // static
    807 size_t NuPlayer2::Renderer::AudioSinkCallback(
    808         MediaPlayer2Interface::AudioSink * /* audioSink */,
    809         void *buffer,
    810         size_t size,
    811         void *cookie,
    812         MediaPlayer2Interface::AudioSink::cb_event_t event) {
    813     NuPlayer2::Renderer *me = (NuPlayer2::Renderer *)cookie;
    814 
    815     switch (event) {
    816         case MediaPlayer2Interface::AudioSink::CB_EVENT_FILL_BUFFER:
    817         {
    818             return me->fillAudioBuffer(buffer, size);
    819             break;
    820         }
    821 
    822         case MediaPlayer2Interface::AudioSink::CB_EVENT_STREAM_END:
    823         {
    824             ALOGV("AudioSink::CB_EVENT_STREAM_END");
    825             me->notifyEOSCallback();
    826             break;
    827         }
    828 
    829         case MediaPlayer2Interface::AudioSink::CB_EVENT_TEAR_DOWN:
    830         {
    831             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
    832             me->notifyAudioTearDown(kDueToError);
    833             break;
    834         }
    835     }
    836 
    837     return 0;
    838 }
    839 
    840 void NuPlayer2::Renderer::notifyEOSCallback() {
    841     Mutex::Autolock autoLock(mLock);
    842 
    843     if (!mUseAudioCallback) {
    844         return;
    845     }
    846 
    847     notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
    848 }
    849 
    850 size_t NuPlayer2::Renderer::fillAudioBuffer(void *buffer, size_t size) {
    851     Mutex::Autolock autoLock(mLock);
    852 
    853     if (!mUseAudioCallback) {
    854         return 0;
    855     }
    856 
    857     bool hasEOS = false;
    858 
    859     size_t sizeCopied = 0;
    860     bool firstEntry = true;
    861     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
    862     while (sizeCopied < size && !mAudioQueue.empty()) {
    863         entry = &*mAudioQueue.begin();
    864 
    865         if (entry->mBuffer == NULL) { // EOS
    866             hasEOS = true;
    867             mAudioQueue.erase(mAudioQueue.begin());
    868             break;
    869         }
    870 
    871         if (firstEntry && entry->mOffset == 0) {
    872             firstEntry = false;
    873             int64_t mediaTimeUs;
    874             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
    875             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
    876             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
    877         }
    878 
    879         size_t copy = entry->mBuffer->size() - entry->mOffset;
    880         size_t sizeRemaining = size - sizeCopied;
    881         if (copy > sizeRemaining) {
    882             copy = sizeRemaining;
    883         }
    884 
    885         memcpy((char *)buffer + sizeCopied,
    886                entry->mBuffer->data() + entry->mOffset,
    887                copy);
    888 
    889         entry->mOffset += copy;
    890         if (entry->mOffset == entry->mBuffer->size()) {
    891             entry->mNotifyConsumed->post();
    892             mAudioQueue.erase(mAudioQueue.begin());
    893             entry = NULL;
    894         }
    895         sizeCopied += copy;
    896 
    897         notifyIfMediaRenderingStarted_l();
    898     }
    899 
    900     if (mAudioFirstAnchorTimeMediaUs >= 0) {
    901         int64_t nowUs = ALooper::GetNowUs();
    902         int64_t nowMediaUs =
    903             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
    904         // we don't know how much data we are queueing for offloaded tracks.
    905         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
    906     }
    907 
    908     // for non-offloaded audio, we need to compute the frames written because
    909     // there is no EVENT_STREAM_END notification. The frames written gives
    910     // an estimate on the pending played out duration.
    911     if (!offloadingAudio()) {
    912         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
    913     }
    914 
    915     if (hasEOS) {
    916         (new AMessage(kWhatStopAudioSink, this))->post();
    917         // As there is currently no EVENT_STREAM_END callback notification for
    918         // non-offloaded audio tracks, we need to post the EOS ourselves.
    919         if (!offloadingAudio()) {
    920             int64_t postEOSDelayUs = 0;
    921             if (mAudioSink->needsTrailingPadding()) {
    922                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
    923             }
    924             ALOGV("fillAudioBuffer: notifyEOS_l "
    925                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
    926                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
    927             notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
    928         }
    929     }
    930     return sizeCopied;
    931 }
    932 
    933 void NuPlayer2::Renderer::drainAudioQueueUntilLastEOS() {
    934     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
    935     bool foundEOS = false;
    936     while (it != mAudioQueue.end()) {
    937         int32_t eos;
    938         QueueEntry *entry = &*it++;
    939         if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
    940                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
    941             itEOS = it;
    942             foundEOS = true;
    943         }
    944     }
    945 
    946     if (foundEOS) {
    947         // post all replies before EOS and drop the samples
    948         for (it = mAudioQueue.begin(); it != itEOS; it++) {
    949             if (it->mBuffer == nullptr) {
    950                 if (it->mNotifyConsumed == nullptr) {
    951                     // delay doesn't matter as we don't even have an AudioTrack
    952                     notifyEOS(true /* audio */, it->mFinalResult);
    953                 } else {
    954                     // TAG for re-opening audio sink.
    955                     onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
    956                 }
    957             } else {
    958                 it->mNotifyConsumed->post();
    959             }
    960         }
    961         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
    962     }
    963 }
    964 
    965 bool NuPlayer2::Renderer::onDrainAudioQueue() {
    966     // do not drain audio during teardown as queued buffers may be invalid.
    967     if (mAudioTornDown) {
    968         return false;
    969     }
    970     // TODO: This call to getPosition checks if AudioTrack has been created
    971     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
    972     // CHECKs on getPosition will fail.
    973     // We still need to figure out why AudioTrack is not created when
    974     // this function is called. One possible reason could be leftover
    975     // audio. Another possible place is to check whether decoder
    976     // has received INFO_FORMAT_CHANGED as the first buffer since
    977     // AudioSink is opened there, and possible interactions with flush
    978     // immediately after start. Investigate error message
    979     // "vorbis_dsp_synthesis returned -135", along with RTSP.
    980     uint32_t numFramesPlayed;
    981     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
    982         // When getPosition fails, renderer will not reschedule the draining
    983         // unless new samples are queued.
    984         // If we have pending EOS (or "eos" marker for discontinuities), we need
    985         // to post these now as NuPlayer2Decoder might be waiting for it.
    986         drainAudioQueueUntilLastEOS();
    987 
    988         ALOGW("onDrainAudioQueue(): audio sink is not ready");
    989         return false;
    990     }
    991 
    992 #if 0
    993     ssize_t numFramesAvailableToWrite =
    994         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
    995 
    996     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
    997         ALOGI("audio sink underrun");
    998     } else {
    999         ALOGV("audio queue has %d frames left to play",
   1000              mAudioSink->frameCount() - numFramesAvailableToWrite);
   1001     }
   1002 #endif
   1003 
   1004     uint32_t prevFramesWritten = mNumFramesWritten;
   1005     while (!mAudioQueue.empty()) {
   1006         QueueEntry *entry = &*mAudioQueue.begin();
   1007 
   1008         if (entry->mBuffer == NULL) {
   1009             if (entry->mNotifyConsumed != nullptr) {
   1010                 // TAG for re-open audio sink.
   1011                 onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
   1012                 mAudioQueue.erase(mAudioQueue.begin());
   1013                 continue;
   1014             }
   1015 
   1016             // EOS
   1017             if (mPaused) {
   1018                 // Do not notify EOS when paused.
   1019                 // This is needed to avoid switch to next clip while in pause.
   1020                 ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
   1021                 return false;
   1022             }
   1023 
   1024             int64_t postEOSDelayUs = 0;
   1025             if (mAudioSink->needsTrailingPadding()) {
   1026                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
   1027             }
   1028             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
   1029             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
   1030 
   1031             mAudioQueue.erase(mAudioQueue.begin());
   1032             entry = NULL;
   1033             if (mAudioSink->needsTrailingPadding()) {
   1034                 // If we're not in gapless playback (i.e. through setNextPlayer), we
   1035                 // need to stop the track here, because that will play out the last
   1036                 // little bit at the end of the file. Otherwise short files won't play.
   1037                 mAudioSink->stop();
   1038                 mNumFramesWritten = 0;
   1039             }
   1040             return false;
   1041         }
   1042 
   1043         mLastAudioBufferDrained = entry->mBufferOrdinal;
   1044 
   1045         // ignore 0-sized buffer which could be EOS marker with no data
   1046         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
   1047             int64_t mediaTimeUs;
   1048             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1049             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
   1050                     mediaTimeUs / 1E6);
   1051             onNewAudioMediaTime(mediaTimeUs);
   1052         }
   1053 
   1054         size_t copy = entry->mBuffer->size() - entry->mOffset;
   1055 
   1056         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
   1057                                             copy, false /* blocking */);
   1058         if (written < 0) {
   1059             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
   1060             if (written == WOULD_BLOCK) {
   1061                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
   1062             } else {
   1063                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
   1064                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
   1065                 // true, in which case the NuPlayer2 will handle the reconnect.
   1066                 notifyAudioTearDown(kDueToError);
   1067             }
   1068             break;
   1069         }
   1070 
   1071         entry->mOffset += written;
   1072         size_t remainder = entry->mBuffer->size() - entry->mOffset;
   1073         if ((ssize_t)remainder < mAudioSink->frameSize()) {
   1074             if (remainder > 0) {
   1075                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
   1076                         remainder);
   1077                 entry->mOffset += remainder;
   1078                 copy -= remainder;
   1079             }
   1080 
   1081             entry->mNotifyConsumed->post();
   1082             mAudioQueue.erase(mAudioQueue.begin());
   1083 
   1084             entry = NULL;
   1085         }
   1086 
   1087         size_t copiedFrames = written / mAudioSink->frameSize();
   1088         mNumFramesWritten += copiedFrames;
   1089 
   1090         {
   1091             Mutex::Autolock autoLock(mLock);
   1092             int64_t maxTimeMedia;
   1093             maxTimeMedia =
   1094                 mAnchorTimeMediaUs +
   1095                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
   1096                                 * 1000LL * mAudioSink->msecsPerFrame());
   1097             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
   1098 
   1099             notifyIfMediaRenderingStarted_l();
   1100         }
   1101 
   1102         if (written != (ssize_t)copy) {
   1103             // A short count was received from AudioSink::write()
   1104             //
   1105             // AudioSink write is called in non-blocking mode.
   1106             // It may return with a short count when:
   1107             //
   1108             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
   1109             //    discarded.
   1110             // 2) The data to be copied exceeds the available buffer in AudioSink.
   1111             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
   1112             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
   1113 
   1114             // (Case 1)
   1115             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
   1116             // needs to fail, as we should not carry over fractional frames between calls.
   1117             CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
   1118 
   1119             // (Case 2, 3, 4)
   1120             // Return early to the caller.
   1121             // Beware of calling immediately again as this may busy-loop if you are not careful.
   1122             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
   1123             break;
   1124         }
   1125     }
   1126 
   1127     // calculate whether we need to reschedule another write.
   1128     bool reschedule = !mAudioQueue.empty()
   1129             && (!mPaused
   1130                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
   1131     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
   1132     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
   1133     return reschedule;
   1134 }
   1135 
   1136 int64_t NuPlayer2::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
   1137     int32_t sampleRate = offloadingAudio() ?
   1138             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
   1139     if (sampleRate == 0) {
   1140         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
   1141         return 0;
   1142     }
   1143     // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
   1144     return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
   1145 }
   1146 
   1147 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
   1148 int64_t NuPlayer2::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
   1149     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
   1150     if (mUseVirtualAudioSink) {
   1151         int64_t nowUs = ALooper::GetNowUs();
   1152         int64_t mediaUs;
   1153         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
   1154             return 0ll;
   1155         } else {
   1156             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
   1157         }
   1158     }
   1159 
   1160     const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
   1161     int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
   1162     if (pendingUs < 0) {
   1163         // This shouldn't happen unless the timestamp is stale.
   1164         ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
   1165                 "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
   1166                 __func__, (long long)pendingUs,
   1167                 (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
   1168         pendingUs = 0;
   1169     }
   1170     return pendingUs;
   1171 }
   1172 
   1173 int64_t NuPlayer2::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
   1174     int64_t realUs;
   1175     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
   1176         // If failed to get current position, e.g. due to audio clock is
   1177         // not ready, then just play out video immediately without delay.
   1178         return nowUs;
   1179     }
   1180     return realUs;
   1181 }
   1182 
   1183 void NuPlayer2::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
   1184     Mutex::Autolock autoLock(mLock);
   1185     // TRICKY: vorbis decoder generates multiple frames with the same
   1186     // timestamp, so only update on the first frame with a given timestamp
   1187     if (mediaTimeUs == mAnchorTimeMediaUs) {
   1188         return;
   1189     }
   1190     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
   1191 
   1192     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
   1193     if (mNextAudioClockUpdateTimeUs == -1) {
   1194         AudioTimestamp ts;
   1195         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
   1196             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
   1197         }
   1198     }
   1199     int64_t nowUs = ALooper::GetNowUs();
   1200     if (mNextAudioClockUpdateTimeUs >= 0) {
   1201         if (nowUs >= mNextAudioClockUpdateTimeUs) {
   1202             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
   1203             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
   1204             mUseVirtualAudioSink = false;
   1205             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
   1206         }
   1207     } else {
   1208         int64_t unused;
   1209         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
   1210                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
   1211                         > kMaxAllowedAudioSinkDelayUs)) {
   1212             // Enough data has been sent to AudioSink, but AudioSink has not rendered
   1213             // any data yet. Something is wrong with AudioSink, e.g., the device is not
   1214             // connected to audio out.
   1215             // Switch to system clock. This essentially creates a virtual AudioSink with
   1216             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
   1217             // This virtual AudioSink renders audio data starting from the very first sample
   1218             // and it's paced by system clock.
   1219             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
   1220             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
   1221             mUseVirtualAudioSink = true;
   1222         }
   1223     }
   1224     mAnchorNumFramesWritten = mNumFramesWritten;
   1225     mAnchorTimeMediaUs = mediaTimeUs;
   1226 }
   1227 
   1228 // Called without mLock acquired.
   1229 void NuPlayer2::Renderer::postDrainVideoQueue() {
   1230     if (mDrainVideoQueuePending
   1231             || getSyncQueues()
   1232             || (mPaused && mVideoSampleReceived)) {
   1233         return;
   1234     }
   1235 
   1236     if (mVideoQueue.empty()) {
   1237         return;
   1238     }
   1239 
   1240     QueueEntry &entry = *mVideoQueue.begin();
   1241 
   1242     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
   1243     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
   1244 
   1245     if (entry.mBuffer == NULL) {
   1246         // EOS doesn't carry a timestamp.
   1247         msg->post();
   1248         mDrainVideoQueuePending = true;
   1249         return;
   1250     }
   1251 
   1252     int64_t nowUs = ALooper::GetNowUs();
   1253     if (mFlags & FLAG_REAL_TIME) {
   1254         int64_t realTimeUs;
   1255         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
   1256 
   1257         realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
   1258 
   1259         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
   1260 
   1261         int64_t delayUs = realTimeUs - nowUs;
   1262 
   1263         ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
   1264         // post 2 display refreshes before rendering is due
   1265         msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
   1266 
   1267         mDrainVideoQueuePending = true;
   1268         return;
   1269     }
   1270 
   1271     int64_t mediaTimeUs;
   1272     CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1273 
   1274     {
   1275         Mutex::Autolock autoLock(mLock);
   1276         if (mAnchorTimeMediaUs < 0) {
   1277             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
   1278             mAnchorTimeMediaUs = mediaTimeUs;
   1279         }
   1280     }
   1281     mNextVideoTimeMediaUs = mediaTimeUs + 100000;
   1282     if (!mHasAudio) {
   1283         // smooth out videos >= 10fps
   1284         mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
   1285     }
   1286 
   1287     if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
   1288         msg->post();
   1289     } else {
   1290         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
   1291 
   1292         // post 2 display refreshes before rendering is due
   1293         mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
   1294     }
   1295 
   1296     mDrainVideoQueuePending = true;
   1297 }
   1298 
   1299 void NuPlayer2::Renderer::onDrainVideoQueue() {
   1300     if (mVideoQueue.empty()) {
   1301         return;
   1302     }
   1303 
   1304     QueueEntry *entry = &*mVideoQueue.begin();
   1305 
   1306     if (entry->mBuffer == NULL) {
   1307         // EOS
   1308 
   1309         notifyEOS(false /* audio */, entry->mFinalResult);
   1310 
   1311         mVideoQueue.erase(mVideoQueue.begin());
   1312         entry = NULL;
   1313 
   1314         setVideoLateByUs(0);
   1315         return;
   1316     }
   1317 
   1318     int64_t nowUs = ALooper::GetNowUs();
   1319     int64_t realTimeUs;
   1320     int64_t mediaTimeUs = -1;
   1321     if (mFlags & FLAG_REAL_TIME) {
   1322         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
   1323     } else {
   1324         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
   1325 
   1326         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
   1327     }
   1328     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
   1329 
   1330     bool tooLate = false;
   1331 
   1332     if (!mPaused) {
   1333         setVideoLateByUs(nowUs - realTimeUs);
   1334         tooLate = (mVideoLateByUs > 40000);
   1335 
   1336         if (tooLate) {
   1337             ALOGV("video late by %lld us (%.2f secs)",
   1338                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
   1339         } else {
   1340             int64_t mediaUs = 0;
   1341             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
   1342             ALOGV("rendering video at media time %.2f secs",
   1343                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
   1344                     mediaUs) / 1E6);
   1345 
   1346             if (!(mFlags & FLAG_REAL_TIME)
   1347                     && mLastAudioMediaTimeUs != -1
   1348                     && mediaTimeUs > mLastAudioMediaTimeUs) {
   1349                 // If audio ends before video, video continues to drive media clock.
   1350                 // Also smooth out videos >= 10fps.
   1351                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
   1352             }
   1353         }
   1354     } else {
   1355         setVideoLateByUs(0);
   1356         if (!mVideoSampleReceived && !mHasAudio) {
   1357             // This will ensure that the first frame after a flush won't be used as anchor
   1358             // when renderer is in paused state, because resume can happen any time after seek.
   1359             clearAnchorTime();
   1360         }
   1361     }
   1362 
   1363     // Always render the first video frame while keeping stats on A/V sync.
   1364     if (!mVideoSampleReceived) {
   1365         realTimeUs = nowUs;
   1366         tooLate = false;
   1367     }
   1368 
   1369     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
   1370     entry->mNotifyConsumed->setInt32("render", !tooLate);
   1371     entry->mNotifyConsumed->post();
   1372     mVideoQueue.erase(mVideoQueue.begin());
   1373     entry = NULL;
   1374 
   1375     mVideoSampleReceived = true;
   1376 
   1377     if (!mPaused) {
   1378         if (!mVideoRenderingStarted) {
   1379             mVideoRenderingStarted = true;
   1380             notifyVideoRenderingStart();
   1381         }
   1382         Mutex::Autolock autoLock(mLock);
   1383         notifyIfMediaRenderingStarted_l();
   1384     }
   1385 }
   1386 
   1387 void NuPlayer2::Renderer::notifyVideoRenderingStart() {
   1388     sp<AMessage> notify = mNotify->dup();
   1389     notify->setInt32("what", kWhatVideoRenderingStart);
   1390     notify->post();
   1391 }
   1392 
   1393 void NuPlayer2::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
   1394     Mutex::Autolock autoLock(mLock);
   1395     notifyEOS_l(audio, finalResult, delayUs);
   1396 }
   1397 
   1398 void NuPlayer2::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
   1399     if (audio && delayUs > 0) {
   1400         sp<AMessage> msg = new AMessage(kWhatEOS, this);
   1401         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
   1402         msg->setInt32("finalResult", finalResult);
   1403         msg->post(delayUs);
   1404         return;
   1405     }
   1406     sp<AMessage> notify = mNotify->dup();
   1407     notify->setInt32("what", kWhatEOS);
   1408     notify->setInt32("audio", static_cast<int32_t>(audio));
   1409     notify->setInt32("finalResult", finalResult);
   1410     notify->post(delayUs);
   1411 
   1412     if (audio) {
   1413         // Video might outlive audio. Clear anchor to enable video only case.
   1414         mAnchorTimeMediaUs = -1;
   1415         mHasAudio = false;
   1416         if (mNextVideoTimeMediaUs >= 0) {
   1417             int64_t mediaUs = 0;
   1418             mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
   1419             if (mNextVideoTimeMediaUs > mediaUs) {
   1420                 mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
   1421             }
   1422         }
   1423     }
   1424 }
   1425 
   1426 void NuPlayer2::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
   1427     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
   1428     msg->setInt32("reason", reason);
   1429     msg->post();
   1430 }
   1431 
   1432 void NuPlayer2::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
   1433     int32_t audio;
   1434     CHECK(msg->findInt32("audio", &audio));
   1435 
   1436     if (dropBufferIfStale(audio, msg)) {
   1437         return;
   1438     }
   1439 
   1440     if (audio) {
   1441         mHasAudio = true;
   1442     } else {
   1443         mHasVideo = true;
   1444     }
   1445 
   1446     if (mHasVideo) {
   1447         if (mVideoScheduler == NULL) {
   1448             mVideoScheduler = new VideoFrameScheduler();
   1449             mVideoScheduler->init();
   1450         }
   1451     }
   1452 
   1453     sp<RefBase> obj;
   1454     CHECK(msg->findObject("buffer", &obj));
   1455     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
   1456 
   1457     sp<AMessage> notifyConsumed;
   1458     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
   1459 
   1460     QueueEntry entry;
   1461     entry.mBuffer = buffer;
   1462     entry.mNotifyConsumed = notifyConsumed;
   1463     entry.mOffset = 0;
   1464     entry.mFinalResult = OK;
   1465     entry.mBufferOrdinal = ++mTotalBuffersQueued;
   1466 
   1467     if (audio) {
   1468         Mutex::Autolock autoLock(mLock);
   1469         mAudioQueue.push_back(entry);
   1470         postDrainAudioQueue_l();
   1471     } else {
   1472         mVideoQueue.push_back(entry);
   1473         postDrainVideoQueue();
   1474     }
   1475 
   1476     Mutex::Autolock autoLock(mLock);
   1477     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
   1478         return;
   1479     }
   1480 
   1481     sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
   1482     sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
   1483 
   1484     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
   1485         // EOS signalled on either queue.
   1486         syncQueuesDone_l();
   1487         return;
   1488     }
   1489 
   1490     int64_t firstAudioTimeUs;
   1491     int64_t firstVideoTimeUs;
   1492     CHECK(firstAudioBuffer->meta()
   1493             ->findInt64("timeUs", &firstAudioTimeUs));
   1494     CHECK(firstVideoBuffer->meta()
   1495             ->findInt64("timeUs", &firstVideoTimeUs));
   1496 
   1497     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
   1498 
   1499     ALOGV("queueDiff = %.2f secs", diff / 1E6);
   1500 
   1501     if (diff > 100000ll) {
   1502         // Audio data starts More than 0.1 secs before video.
   1503         // Drop some audio.
   1504 
   1505         (*mAudioQueue.begin()).mNotifyConsumed->post();
   1506         mAudioQueue.erase(mAudioQueue.begin());
   1507         return;
   1508     }
   1509 
   1510     syncQueuesDone_l();
   1511 }
   1512 
   1513 void NuPlayer2::Renderer::syncQueuesDone_l() {
   1514     if (!mSyncQueues) {
   1515         return;
   1516     }
   1517 
   1518     mSyncQueues = false;
   1519 
   1520     if (!mAudioQueue.empty()) {
   1521         postDrainAudioQueue_l();
   1522     }
   1523 
   1524     if (!mVideoQueue.empty()) {
   1525         mLock.unlock();
   1526         postDrainVideoQueue();
   1527         mLock.lock();
   1528     }
   1529 }
   1530 
   1531 void NuPlayer2::Renderer::onQueueEOS(const sp<AMessage> &msg) {
   1532     int32_t audio;
   1533     CHECK(msg->findInt32("audio", &audio));
   1534 
   1535     if (dropBufferIfStale(audio, msg)) {
   1536         return;
   1537     }
   1538 
   1539     int32_t finalResult;
   1540     CHECK(msg->findInt32("finalResult", &finalResult));
   1541 
   1542     QueueEntry entry;
   1543     entry.mOffset = 0;
   1544     entry.mFinalResult = finalResult;
   1545 
   1546     if (audio) {
   1547         Mutex::Autolock autoLock(mLock);
   1548         if (mAudioQueue.empty() && mSyncQueues) {
   1549             syncQueuesDone_l();
   1550         }
   1551         mAudioQueue.push_back(entry);
   1552         postDrainAudioQueue_l();
   1553     } else {
   1554         if (mVideoQueue.empty() && getSyncQueues()) {
   1555             Mutex::Autolock autoLock(mLock);
   1556             syncQueuesDone_l();
   1557         }
   1558         mVideoQueue.push_back(entry);
   1559         postDrainVideoQueue();
   1560     }
   1561 }
   1562 
   1563 void NuPlayer2::Renderer::onFlush(const sp<AMessage> &msg) {
   1564     int32_t audio, notifyComplete;
   1565     CHECK(msg->findInt32("audio", &audio));
   1566 
   1567     {
   1568         Mutex::Autolock autoLock(mLock);
   1569         if (audio) {
   1570             notifyComplete = mNotifyCompleteAudio;
   1571             mNotifyCompleteAudio = false;
   1572             mLastAudioMediaTimeUs = -1;
   1573         } else {
   1574             notifyComplete = mNotifyCompleteVideo;
   1575             mNotifyCompleteVideo = false;
   1576         }
   1577 
   1578         // If we're currently syncing the queues, i.e. dropping audio while
   1579         // aligning the first audio/video buffer times and only one of the
   1580         // two queues has data, we may starve that queue by not requesting
   1581         // more buffers from the decoder. If the other source then encounters
   1582         // a discontinuity that leads to flushing, we'll never find the
   1583         // corresponding discontinuity on the other queue.
   1584         // Therefore we'll stop syncing the queues if at least one of them
   1585         // is flushed.
   1586         syncQueuesDone_l();
   1587     }
   1588     clearAnchorTime();
   1589 
   1590     ALOGV("flushing %s", audio ? "audio" : "video");
   1591     if (audio) {
   1592         {
   1593             Mutex::Autolock autoLock(mLock);
   1594             flushQueue(&mAudioQueue);
   1595 
   1596             ++mAudioDrainGeneration;
   1597             ++mAudioEOSGeneration;
   1598             prepareForMediaRenderingStart_l();
   1599 
   1600             // the frame count will be reset after flush.
   1601             clearAudioFirstAnchorTime_l();
   1602         }
   1603 
   1604         mDrainAudioQueuePending = false;
   1605 
   1606         if (offloadingAudio()) {
   1607             mAudioSink->pause();
   1608             mAudioSink->flush();
   1609             if (!mPaused) {
   1610                 mAudioSink->start();
   1611             }
   1612         } else {
   1613             mAudioSink->pause();
   1614             mAudioSink->flush();
   1615             // Call stop() to signal to the AudioSink to completely fill the
   1616             // internal buffer before resuming playback.
   1617             // FIXME: this is ignored after flush().
   1618             mAudioSink->stop();
   1619             if (mPaused) {
   1620                 // Race condition: if renderer is paused and audio sink is stopped,
   1621                 // we need to make sure that the audio track buffer fully drains
   1622                 // before delivering data.
   1623                 // FIXME: remove this if we can detect if stop() is complete.
   1624                 const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
   1625                 mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
   1626             } else {
   1627                 mAudioSink->start();
   1628             }
   1629             mNumFramesWritten = 0;
   1630         }
   1631         mNextAudioClockUpdateTimeUs = -1;
   1632     } else {
   1633         flushQueue(&mVideoQueue);
   1634 
   1635         mDrainVideoQueuePending = false;
   1636 
   1637         if (mVideoScheduler != NULL) {
   1638             mVideoScheduler->restart();
   1639         }
   1640 
   1641         Mutex::Autolock autoLock(mLock);
   1642         ++mVideoDrainGeneration;
   1643         prepareForMediaRenderingStart_l();
   1644     }
   1645 
   1646     mVideoSampleReceived = false;
   1647 
   1648     if (notifyComplete) {
   1649         notifyFlushComplete(audio);
   1650     }
   1651 }
   1652 
   1653 void NuPlayer2::Renderer::flushQueue(List<QueueEntry> *queue) {
   1654     while (!queue->empty()) {
   1655         QueueEntry *entry = &*queue->begin();
   1656 
   1657         if (entry->mBuffer != NULL) {
   1658             entry->mNotifyConsumed->post();
   1659         } else if (entry->mNotifyConsumed != nullptr) {
   1660             // Is it needed to open audio sink now?
   1661             onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
   1662         }
   1663 
   1664         queue->erase(queue->begin());
   1665         entry = NULL;
   1666     }
   1667 }
   1668 
   1669 void NuPlayer2::Renderer::notifyFlushComplete(bool audio) {
   1670     sp<AMessage> notify = mNotify->dup();
   1671     notify->setInt32("what", kWhatFlushComplete);
   1672     notify->setInt32("audio", static_cast<int32_t>(audio));
   1673     notify->post();
   1674 }
   1675 
   1676 bool NuPlayer2::Renderer::dropBufferIfStale(
   1677         bool audio, const sp<AMessage> &msg) {
   1678     int32_t queueGeneration;
   1679     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
   1680 
   1681     if (queueGeneration == getQueueGeneration(audio)) {
   1682         return false;
   1683     }
   1684 
   1685     sp<AMessage> notifyConsumed;
   1686     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
   1687         notifyConsumed->post();
   1688     }
   1689 
   1690     return true;
   1691 }
   1692 
   1693 void NuPlayer2::Renderer::onAudioSinkChanged() {
   1694     if (offloadingAudio()) {
   1695         return;
   1696     }
   1697     CHECK(!mDrainAudioQueuePending);
   1698     mNumFramesWritten = 0;
   1699     mAnchorNumFramesWritten = -1;
   1700     uint32_t written;
   1701     if (mAudioSink->getFramesWritten(&written) == OK) {
   1702         mNumFramesWritten = written;
   1703     }
   1704 }
   1705 
   1706 void NuPlayer2::Renderer::onDisableOffloadAudio() {
   1707     Mutex::Autolock autoLock(mLock);
   1708     mFlags &= ~FLAG_OFFLOAD_AUDIO;
   1709     ++mAudioDrainGeneration;
   1710     if (mAudioRenderingStartGeneration != -1) {
   1711         prepareForMediaRenderingStart_l();
   1712     }
   1713 }
   1714 
   1715 void NuPlayer2::Renderer::onEnableOffloadAudio() {
   1716     Mutex::Autolock autoLock(mLock);
   1717     mFlags |= FLAG_OFFLOAD_AUDIO;
   1718     ++mAudioDrainGeneration;
   1719     if (mAudioRenderingStartGeneration != -1) {
   1720         prepareForMediaRenderingStart_l();
   1721     }
   1722 }
   1723 
   1724 void NuPlayer2::Renderer::onPause() {
   1725     if (mPaused) {
   1726         return;
   1727     }
   1728 
   1729     {
   1730         Mutex::Autolock autoLock(mLock);
   1731         // we do not increment audio drain generation so that we fill audio buffer during pause.
   1732         ++mVideoDrainGeneration;
   1733         prepareForMediaRenderingStart_l();
   1734         mPaused = true;
   1735         mMediaClock->setPlaybackRate(0.0);
   1736     }
   1737 
   1738     mDrainAudioQueuePending = false;
   1739     mDrainVideoQueuePending = false;
   1740 
   1741     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
   1742     mAudioSink->pause();
   1743     startAudioOffloadPauseTimeout();
   1744 
   1745     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
   1746           mAudioQueue.size(), mVideoQueue.size());
   1747 }
   1748 
   1749 void NuPlayer2::Renderer::onResume() {
   1750     if (!mPaused) {
   1751         return;
   1752     }
   1753 
   1754     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
   1755     cancelAudioOffloadPauseTimeout();
   1756     if (mAudioSink->ready()) {
   1757         status_t err = mAudioSink->start();
   1758         if (err != OK) {
   1759             ALOGE("cannot start AudioSink err %d", err);
   1760             notifyAudioTearDown(kDueToError);
   1761         }
   1762     }
   1763 
   1764     {
   1765         Mutex::Autolock autoLock(mLock);
   1766         mPaused = false;
   1767         // rendering started message may have been delayed if we were paused.
   1768         if (mRenderingDataDelivered) {
   1769             notifyIfMediaRenderingStarted_l();
   1770         }
   1771         // configure audiosink as we did not do it when pausing
   1772         if (mAudioSink != NULL && mAudioSink->ready()) {
   1773             mAudioSink->setPlaybackRate(mPlaybackSettings);
   1774         }
   1775 
   1776         mMediaClock->setPlaybackRate(mPlaybackRate);
   1777 
   1778         if (!mAudioQueue.empty()) {
   1779             postDrainAudioQueue_l();
   1780         }
   1781     }
   1782 
   1783     if (!mVideoQueue.empty()) {
   1784         postDrainVideoQueue();
   1785     }
   1786 }
   1787 
   1788 void NuPlayer2::Renderer::onSetVideoFrameRate(float fps) {
   1789     if (mVideoScheduler == NULL) {
   1790         mVideoScheduler = new VideoFrameScheduler();
   1791     }
   1792     mVideoScheduler->init(fps);
   1793 }
   1794 
   1795 int32_t NuPlayer2::Renderer::getQueueGeneration(bool audio) {
   1796     Mutex::Autolock autoLock(mLock);
   1797     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
   1798 }
   1799 
   1800 int32_t NuPlayer2::Renderer::getDrainGeneration(bool audio) {
   1801     Mutex::Autolock autoLock(mLock);
   1802     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
   1803 }
   1804 
   1805 bool NuPlayer2::Renderer::getSyncQueues() {
   1806     Mutex::Autolock autoLock(mLock);
   1807     return mSyncQueues;
   1808 }
   1809 
   1810 void NuPlayer2::Renderer::onAudioTearDown(AudioTearDownReason reason) {
   1811     if (mAudioTornDown) {
   1812         return;
   1813     }
   1814     mAudioTornDown = true;
   1815 
   1816     int64_t currentPositionUs;
   1817     sp<AMessage> notify = mNotify->dup();
   1818     if (getCurrentPosition(&currentPositionUs) == OK) {
   1819         notify->setInt64("positionUs", currentPositionUs);
   1820     }
   1821 
   1822     mAudioSink->stop();
   1823     mAudioSink->flush();
   1824 
   1825     notify->setInt32("what", kWhatAudioTearDown);
   1826     notify->setInt32("reason", reason);
   1827     notify->post();
   1828 }
   1829 
   1830 void NuPlayer2::Renderer::startAudioOffloadPauseTimeout() {
   1831     if (offloadingAudio()) {
   1832         mWakeLock->acquire();
   1833         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
   1834         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
   1835         msg->post(kOffloadPauseMaxUs);
   1836     }
   1837 }
   1838 
   1839 void NuPlayer2::Renderer::cancelAudioOffloadPauseTimeout() {
   1840     // We may have called startAudioOffloadPauseTimeout() without
   1841     // the AudioSink open and with offloadingAudio enabled.
   1842     //
   1843     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
   1844     // we always release the wakelock and increment the pause timeout generation.
   1845     //
   1846     // Note: The acquired wakelock prevents the device from suspending
   1847     // immediately after offload pause (in case a resume happens shortly thereafter).
   1848     mWakeLock->release(true);
   1849     ++mAudioOffloadPauseTimeoutGeneration;
   1850 }
   1851 
   1852 status_t NuPlayer2::Renderer::onOpenAudioSink(
   1853         const sp<AMessage> &format,
   1854         bool offloadOnly,
   1855         bool hasVideo,
   1856         uint32_t flags,
   1857         bool isStreaming) {
   1858     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
   1859             offloadOnly, offloadingAudio());
   1860     bool audioSinkChanged = false;
   1861 
   1862     int32_t numChannels;
   1863     CHECK(format->findInt32("channel-count", &numChannels));
   1864 
   1865     int32_t channelMask;
   1866     if (!format->findInt32("channel-mask", &channelMask)) {
   1867         // signal to the AudioSink to derive the mask from count.
   1868         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
   1869     }
   1870 
   1871     int32_t sampleRate;
   1872     CHECK(format->findInt32("sample-rate", &sampleRate));
   1873 
   1874     if (offloadingAudio()) {
   1875         audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
   1876         AString mime;
   1877         CHECK(format->findString("mime", &mime));
   1878         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
   1879 
   1880         if (err != OK) {
   1881             ALOGE("Couldn't map mime \"%s\" to a valid "
   1882                     "audio_format", mime.c_str());
   1883             onDisableOffloadAudio();
   1884         } else {
   1885             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
   1886                     mime.c_str(), audioFormat);
   1887 
   1888             int avgBitRate = -1;
   1889             format->findInt32("bitrate", &avgBitRate);
   1890 
   1891             int32_t aacProfile = -1;
   1892             if (audioFormat == AUDIO_FORMAT_AAC
   1893                     && format->findInt32("aac-profile", &aacProfile)) {
   1894                 // Redefine AAC format as per aac profile
   1895                 mapAACProfileToAudioFormat(
   1896                         audioFormat,
   1897                         aacProfile);
   1898             }
   1899 
   1900             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
   1901             offloadInfo.duration_us = -1;
   1902             format->findInt64(
   1903                     "durationUs", &offloadInfo.duration_us);
   1904             offloadInfo.sample_rate = sampleRate;
   1905             offloadInfo.channel_mask = channelMask;
   1906             offloadInfo.format = audioFormat;
   1907             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
   1908             offloadInfo.bit_rate = avgBitRate;
   1909             offloadInfo.has_video = hasVideo;
   1910             offloadInfo.is_streaming = isStreaming;
   1911 
   1912             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
   1913                 ALOGV("openAudioSink: no change in offload mode");
   1914                 // no change from previous configuration, everything ok.
   1915                 return OK;
   1916             }
   1917             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   1918 
   1919             ALOGV("openAudioSink: try to open AudioSink in offload mode");
   1920             uint32_t offloadFlags = flags;
   1921             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
   1922             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
   1923             audioSinkChanged = true;
   1924             mAudioSink->close();
   1925 
   1926             err = mAudioSink->open(
   1927                     sampleRate,
   1928                     numChannels,
   1929                     (audio_channel_mask_t)channelMask,
   1930                     audioFormat,
   1931                     0 /* bufferCount - unused */,
   1932                     &NuPlayer2::Renderer::AudioSinkCallback,
   1933                     this,
   1934                     (audio_output_flags_t)offloadFlags,
   1935                     &offloadInfo);
   1936 
   1937             if (err == OK) {
   1938                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
   1939             }
   1940 
   1941             if (err == OK) {
   1942                 // If the playback is offloaded to h/w, we pass
   1943                 // the HAL some metadata information.
   1944                 // We don't want to do this for PCM because it
   1945                 // will be going through the AudioFlinger mixer
   1946                 // before reaching the hardware.
   1947                 // TODO
   1948                 mCurrentOffloadInfo = offloadInfo;
   1949                 if (!mPaused) { // for preview mode, don't start if paused
   1950                     err = mAudioSink->start();
   1951                 }
   1952                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
   1953             }
   1954             if (err != OK) {
   1955                 // Clean up, fall back to non offload mode.
   1956                 mAudioSink->close();
   1957                 onDisableOffloadAudio();
   1958                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   1959                 ALOGV("openAudioSink: offload failed");
   1960                 if (offloadOnly) {
   1961                     notifyAudioTearDown(kForceNonOffload);
   1962                 }
   1963             } else {
   1964                 mUseAudioCallback = true;  // offload mode transfers data through callback
   1965                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
   1966             }
   1967         }
   1968     }
   1969     if (!offloadOnly && !offloadingAudio()) {
   1970         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
   1971         uint32_t pcmFlags = flags;
   1972         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
   1973 
   1974         const PcmInfo info = {
   1975                 (audio_channel_mask_t)channelMask,
   1976                 (audio_output_flags_t)pcmFlags,
   1977                 AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
   1978                 numChannels,
   1979                 sampleRate
   1980         };
   1981         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
   1982             ALOGV("openAudioSink: no change in pcm mode");
   1983             // no change from previous configuration, everything ok.
   1984             return OK;
   1985         }
   1986 
   1987         audioSinkChanged = true;
   1988         mAudioSink->close();
   1989         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   1990         // Note: It is possible to set up the callback, but not use it to send audio data.
   1991         // This requires a fix in AudioSink to explicitly specify the transfer mode.
   1992         mUseAudioCallback = getUseAudioCallbackSetting();
   1993         if (mUseAudioCallback) {
   1994             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
   1995         }
   1996 
   1997         // Compute the desired buffer size.
   1998         // For callback mode, the amount of time before wakeup is about half the buffer size.
   1999         const uint32_t frameCount =
   2000                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
   2001 
   2002         // The doNotReconnect means AudioSink will signal back and let NuPlayer2 to re-construct
   2003         // AudioSink. We don't want this when there's video because it will cause a video seek to
   2004         // the previous I frame. But we do want this when there's only audio because it will give
   2005         // NuPlayer2 a chance to switch from non-offload mode to offload mode.
   2006         // So we only set doNotReconnect when there's no video.
   2007         const bool doNotReconnect = !hasVideo;
   2008 
   2009         // We should always be able to set our playback settings if the sink is closed.
   2010         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
   2011                 "onOpenAudioSink: can't set playback rate on closed sink");
   2012         status_t err = mAudioSink->open(
   2013                     sampleRate,
   2014                     numChannels,
   2015                     (audio_channel_mask_t)channelMask,
   2016                     AUDIO_FORMAT_PCM_16_BIT,
   2017                     0 /* bufferCount - unused */,
   2018                     mUseAudioCallback ? &NuPlayer2::Renderer::AudioSinkCallback : NULL,
   2019                     mUseAudioCallback ? this : NULL,
   2020                     (audio_output_flags_t)pcmFlags,
   2021                     NULL,
   2022                     doNotReconnect,
   2023                     frameCount);
   2024         if (err != OK) {
   2025             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
   2026             mAudioSink->close();
   2027             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   2028             return err;
   2029         }
   2030         mCurrentPcmInfo = info;
   2031         if (!mPaused) { // for preview mode, don't start if paused
   2032             mAudioSink->start();
   2033         }
   2034     }
   2035     if (audioSinkChanged) {
   2036         onAudioSinkChanged();
   2037     }
   2038     mAudioTornDown = false;
   2039     return OK;
   2040 }
   2041 
   2042 void NuPlayer2::Renderer::onCloseAudioSink() {
   2043     mAudioSink->close();
   2044     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
   2045     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
   2046 }
   2047 
   2048 void NuPlayer2::Renderer::onChangeAudioFormat(
   2049         const sp<AMessage> &meta, const sp<AMessage> &notify) {
   2050     sp<AMessage> format;
   2051     CHECK(meta->findMessage("format", &format));
   2052 
   2053     int32_t offloadOnly;
   2054     CHECK(meta->findInt32("offload-only", &offloadOnly));
   2055 
   2056     int32_t hasVideo;
   2057     CHECK(meta->findInt32("has-video", &hasVideo));
   2058 
   2059     uint32_t flags;
   2060     CHECK(meta->findInt32("flags", (int32_t *)&flags));
   2061 
   2062     uint32_t isStreaming;
   2063     CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
   2064 
   2065     status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
   2066 
   2067     if (err != OK) {
   2068         notify->setInt32("err", err);
   2069     }
   2070     notify->post();
   2071 }
   2072 
   2073 }  // namespace android
   2074 
   2075