1 /* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "NuPlayerRenderer" 19 #include <utils/Log.h> 20 21 #include "NuPlayerRenderer.h" 22 23 #include <media/stagefright/foundation/ABuffer.h> 24 #include <media/stagefright/foundation/ADebug.h> 25 #include <media/stagefright/foundation/AMessage.h> 26 27 namespace android { 28 29 // static 30 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; 31 32 NuPlayer::Renderer::Renderer( 33 const sp<MediaPlayerBase::AudioSink> &sink, 34 const sp<AMessage> ¬ify, 35 uint32_t flags) 36 : mAudioSink(sink), 37 mNotify(notify), 38 mFlags(flags), 39 mNumFramesWritten(0), 40 mDrainAudioQueuePending(false), 41 mDrainVideoQueuePending(false), 42 mAudioQueueGeneration(0), 43 mVideoQueueGeneration(0), 44 mAnchorTimeMediaUs(-1), 45 mAnchorTimeRealUs(-1), 46 mFlushingAudio(false), 47 mFlushingVideo(false), 48 mHasAudio(false), 49 mHasVideo(false), 50 mSyncQueues(false), 51 mPaused(false), 52 mVideoRenderingStarted(false), 53 mVideoRenderingStartGeneration(0), 54 mAudioRenderingStartGeneration(0), 55 mLastPositionUpdateUs(-1ll), 56 mVideoLateByUs(0ll) { 57 } 58 59 NuPlayer::Renderer::~Renderer() { 60 } 61 62 void NuPlayer::Renderer::queueBuffer( 63 bool audio, 64 const sp<ABuffer> &buffer, 65 const sp<AMessage> ¬ifyConsumed) { 66 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 67 msg->setInt32("audio", static_cast<int32_t>(audio)); 68 msg->setBuffer("buffer", buffer); 69 msg->setMessage("notifyConsumed", notifyConsumed); 70 msg->post(); 71 } 72 73 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 74 CHECK_NE(finalResult, (status_t)OK); 75 76 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 77 msg->setInt32("audio", static_cast<int32_t>(audio)); 78 msg->setInt32("finalResult", finalResult); 79 msg->post(); 80 } 81 82 void NuPlayer::Renderer::flush(bool audio) { 83 { 84 Mutex::Autolock autoLock(mFlushLock); 85 if (audio) { 86 CHECK(!mFlushingAudio); 87 mFlushingAudio = true; 88 } else { 89 CHECK(!mFlushingVideo); 90 mFlushingVideo = true; 91 } 92 } 93 94 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 95 msg->setInt32("audio", static_cast<int32_t>(audio)); 96 msg->post(); 97 } 98 99 void NuPlayer::Renderer::signalTimeDiscontinuity() { 100 // CHECK(mAudioQueue.empty()); 101 // CHECK(mVideoQueue.empty()); 102 mAnchorTimeMediaUs = -1; 103 mAnchorTimeRealUs = -1; 104 mSyncQueues = false; 105 } 106 107 void NuPlayer::Renderer::pause() { 108 (new AMessage(kWhatPause, id()))->post(); 109 } 110 111 void NuPlayer::Renderer::resume() { 112 (new AMessage(kWhatResume, id()))->post(); 113 } 114 115 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 116 switch (msg->what()) { 117 case kWhatDrainAudioQueue: 118 { 119 int32_t generation; 120 CHECK(msg->findInt32("generation", &generation)); 121 if (generation != mAudioQueueGeneration) { 122 break; 123 } 124 125 mDrainAudioQueuePending = false; 126 127 if (onDrainAudioQueue()) { 128 uint32_t numFramesPlayed; 129 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), 130 (status_t)OK); 131 132 uint32_t numFramesPendingPlayout = 133 mNumFramesWritten - numFramesPlayed; 134 135 // This is how long the audio sink will have data to 136 // play back. 137 int64_t delayUs = 138 mAudioSink->msecsPerFrame() 139 * numFramesPendingPlayout * 1000ll; 140 141 // Let's give it more data after about half that time 142 // has elapsed. 143 postDrainAudioQueue(delayUs / 2); 144 } 145 break; 146 } 147 148 case kWhatDrainVideoQueue: 149 { 150 int32_t generation; 151 CHECK(msg->findInt32("generation", &generation)); 152 if (generation != mVideoQueueGeneration) { 153 break; 154 } 155 156 mDrainVideoQueuePending = false; 157 158 onDrainVideoQueue(); 159 160 postDrainVideoQueue(); 161 break; 162 } 163 164 case kWhatQueueBuffer: 165 { 166 onQueueBuffer(msg); 167 break; 168 } 169 170 case kWhatQueueEOS: 171 { 172 onQueueEOS(msg); 173 break; 174 } 175 176 case kWhatFlush: 177 { 178 onFlush(msg); 179 break; 180 } 181 182 case kWhatAudioSinkChanged: 183 { 184 onAudioSinkChanged(); 185 break; 186 } 187 188 case kWhatPause: 189 { 190 onPause(); 191 break; 192 } 193 194 case kWhatResume: 195 { 196 onResume(); 197 break; 198 } 199 200 default: 201 TRESPASS(); 202 break; 203 } 204 } 205 206 void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) { 207 if (mDrainAudioQueuePending || mSyncQueues || mPaused) { 208 return; 209 } 210 211 if (mAudioQueue.empty()) { 212 return; 213 } 214 215 mDrainAudioQueuePending = true; 216 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 217 msg->setInt32("generation", mAudioQueueGeneration); 218 msg->post(delayUs); 219 } 220 221 void NuPlayer::Renderer::signalAudioSinkChanged() { 222 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 223 } 224 225 void NuPlayer::Renderer::prepareForMediaRenderingStart() { 226 mAudioRenderingStartGeneration = mAudioQueueGeneration; 227 mVideoRenderingStartGeneration = mVideoQueueGeneration; 228 } 229 230 void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { 231 if (mVideoRenderingStartGeneration == mVideoQueueGeneration && 232 mAudioRenderingStartGeneration == mAudioQueueGeneration) { 233 mVideoRenderingStartGeneration = -1; 234 mAudioRenderingStartGeneration = -1; 235 236 sp<AMessage> notify = mNotify->dup(); 237 notify->setInt32("what", kWhatMediaRenderingStart); 238 notify->post(); 239 } 240 } 241 242 bool NuPlayer::Renderer::onDrainAudioQueue() { 243 uint32_t numFramesPlayed; 244 if (mAudioSink->getPosition(&numFramesPlayed) != OK) { 245 return false; 246 } 247 248 ssize_t numFramesAvailableToWrite = 249 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 250 251 #if 0 252 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { 253 ALOGI("audio sink underrun"); 254 } else { 255 ALOGV("audio queue has %d frames left to play", 256 mAudioSink->frameCount() - numFramesAvailableToWrite); 257 } 258 #endif 259 260 size_t numBytesAvailableToWrite = 261 numFramesAvailableToWrite * mAudioSink->frameSize(); 262 263 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { 264 QueueEntry *entry = &*mAudioQueue.begin(); 265 266 if (entry->mBuffer == NULL) { 267 // EOS 268 269 notifyEOS(true /* audio */, entry->mFinalResult); 270 271 mAudioQueue.erase(mAudioQueue.begin()); 272 entry = NULL; 273 return false; 274 } 275 276 if (entry->mOffset == 0) { 277 int64_t mediaTimeUs; 278 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 279 280 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 281 282 mAnchorTimeMediaUs = mediaTimeUs; 283 284 uint32_t numFramesPlayed; 285 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 286 287 uint32_t numFramesPendingPlayout = 288 mNumFramesWritten - numFramesPlayed; 289 290 int64_t realTimeOffsetUs = 291 (mAudioSink->latency() / 2 /* XXX */ 292 + numFramesPendingPlayout 293 * mAudioSink->msecsPerFrame()) * 1000ll; 294 295 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 296 297 mAnchorTimeRealUs = 298 ALooper::GetNowUs() + realTimeOffsetUs; 299 } 300 301 size_t copy = entry->mBuffer->size() - entry->mOffset; 302 if (copy > numBytesAvailableToWrite) { 303 copy = numBytesAvailableToWrite; 304 } 305 306 CHECK_EQ(mAudioSink->write( 307 entry->mBuffer->data() + entry->mOffset, copy), 308 (ssize_t)copy); 309 310 entry->mOffset += copy; 311 if (entry->mOffset == entry->mBuffer->size()) { 312 entry->mNotifyConsumed->post(); 313 mAudioQueue.erase(mAudioQueue.begin()); 314 315 entry = NULL; 316 } 317 318 numBytesAvailableToWrite -= copy; 319 size_t copiedFrames = copy / mAudioSink->frameSize(); 320 mNumFramesWritten += copiedFrames; 321 322 notifyIfMediaRenderingStarted(); 323 } 324 325 notifyPosition(); 326 327 return !mAudioQueue.empty(); 328 } 329 330 void NuPlayer::Renderer::postDrainVideoQueue() { 331 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 332 return; 333 } 334 335 if (mVideoQueue.empty()) { 336 return; 337 } 338 339 QueueEntry &entry = *mVideoQueue.begin(); 340 341 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 342 msg->setInt32("generation", mVideoQueueGeneration); 343 344 int64_t delayUs; 345 346 if (entry.mBuffer == NULL) { 347 // EOS doesn't carry a timestamp. 348 delayUs = 0; 349 } else if (mFlags & FLAG_REAL_TIME) { 350 int64_t mediaTimeUs; 351 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 352 353 delayUs = mediaTimeUs - ALooper::GetNowUs(); 354 } else { 355 int64_t mediaTimeUs; 356 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 357 358 if (mAnchorTimeMediaUs < 0) { 359 delayUs = 0; 360 361 if (!mHasAudio) { 362 mAnchorTimeMediaUs = mediaTimeUs; 363 mAnchorTimeRealUs = ALooper::GetNowUs(); 364 } 365 } else { 366 int64_t realTimeUs = 367 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 368 369 delayUs = realTimeUs - ALooper::GetNowUs(); 370 } 371 } 372 373 msg->post(delayUs); 374 375 mDrainVideoQueuePending = true; 376 } 377 378 void NuPlayer::Renderer::onDrainVideoQueue() { 379 if (mVideoQueue.empty()) { 380 return; 381 } 382 383 QueueEntry *entry = &*mVideoQueue.begin(); 384 385 if (entry->mBuffer == NULL) { 386 // EOS 387 388 notifyEOS(false /* audio */, entry->mFinalResult); 389 390 mVideoQueue.erase(mVideoQueue.begin()); 391 entry = NULL; 392 393 mVideoLateByUs = 0ll; 394 395 notifyPosition(); 396 return; 397 } 398 399 int64_t realTimeUs; 400 if (mFlags & FLAG_REAL_TIME) { 401 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); 402 } else { 403 int64_t mediaTimeUs; 404 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 405 406 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; 407 } 408 409 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; 410 bool tooLate = (mVideoLateByUs > 40000); 411 412 if (tooLate) { 413 ALOGV("video late by %lld us (%.2f secs)", 414 mVideoLateByUs, mVideoLateByUs / 1E6); 415 } else { 416 ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6); 417 } 418 419 entry->mNotifyConsumed->setInt32("render", !tooLate); 420 entry->mNotifyConsumed->post(); 421 mVideoQueue.erase(mVideoQueue.begin()); 422 entry = NULL; 423 424 if (!mVideoRenderingStarted) { 425 mVideoRenderingStarted = true; 426 notifyVideoRenderingStart(); 427 } 428 429 notifyIfMediaRenderingStarted(); 430 431 notifyPosition(); 432 } 433 434 void NuPlayer::Renderer::notifyVideoRenderingStart() { 435 sp<AMessage> notify = mNotify->dup(); 436 notify->setInt32("what", kWhatVideoRenderingStart); 437 notify->post(); 438 } 439 440 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) { 441 sp<AMessage> notify = mNotify->dup(); 442 notify->setInt32("what", kWhatEOS); 443 notify->setInt32("audio", static_cast<int32_t>(audio)); 444 notify->setInt32("finalResult", finalResult); 445 notify->post(); 446 } 447 448 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 449 int32_t audio; 450 CHECK(msg->findInt32("audio", &audio)); 451 452 if (audio) { 453 mHasAudio = true; 454 } else { 455 mHasVideo = true; 456 } 457 458 if (dropBufferWhileFlushing(audio, msg)) { 459 return; 460 } 461 462 sp<ABuffer> buffer; 463 CHECK(msg->findBuffer("buffer", &buffer)); 464 465 sp<AMessage> notifyConsumed; 466 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 467 468 QueueEntry entry; 469 entry.mBuffer = buffer; 470 entry.mNotifyConsumed = notifyConsumed; 471 entry.mOffset = 0; 472 entry.mFinalResult = OK; 473 474 if (audio) { 475 mAudioQueue.push_back(entry); 476 postDrainAudioQueue(); 477 } else { 478 mVideoQueue.push_back(entry); 479 postDrainVideoQueue(); 480 } 481 482 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { 483 return; 484 } 485 486 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer; 487 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer; 488 489 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) { 490 // EOS signalled on either queue. 491 syncQueuesDone(); 492 return; 493 } 494 495 int64_t firstAudioTimeUs; 496 int64_t firstVideoTimeUs; 497 CHECK(firstAudioBuffer->meta() 498 ->findInt64("timeUs", &firstAudioTimeUs)); 499 CHECK(firstVideoBuffer->meta() 500 ->findInt64("timeUs", &firstVideoTimeUs)); 501 502 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 503 504 ALOGV("queueDiff = %.2f secs", diff / 1E6); 505 506 if (diff > 100000ll) { 507 // Audio data starts More than 0.1 secs before video. 508 // Drop some audio. 509 510 (*mAudioQueue.begin()).mNotifyConsumed->post(); 511 mAudioQueue.erase(mAudioQueue.begin()); 512 return; 513 } 514 515 syncQueuesDone(); 516 } 517 518 void NuPlayer::Renderer::syncQueuesDone() { 519 if (!mSyncQueues) { 520 return; 521 } 522 523 mSyncQueues = false; 524 525 if (!mAudioQueue.empty()) { 526 postDrainAudioQueue(); 527 } 528 529 if (!mVideoQueue.empty()) { 530 postDrainVideoQueue(); 531 } 532 } 533 534 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 535 int32_t audio; 536 CHECK(msg->findInt32("audio", &audio)); 537 538 if (dropBufferWhileFlushing(audio, msg)) { 539 return; 540 } 541 542 int32_t finalResult; 543 CHECK(msg->findInt32("finalResult", &finalResult)); 544 545 QueueEntry entry; 546 entry.mOffset = 0; 547 entry.mFinalResult = finalResult; 548 549 if (audio) { 550 if (mAudioQueue.empty() && mSyncQueues) { 551 syncQueuesDone(); 552 } 553 mAudioQueue.push_back(entry); 554 postDrainAudioQueue(); 555 } else { 556 if (mVideoQueue.empty() && mSyncQueues) { 557 syncQueuesDone(); 558 } 559 mVideoQueue.push_back(entry); 560 postDrainVideoQueue(); 561 } 562 } 563 564 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 565 int32_t audio; 566 CHECK(msg->findInt32("audio", &audio)); 567 568 // If we're currently syncing the queues, i.e. dropping audio while 569 // aligning the first audio/video buffer times and only one of the 570 // two queues has data, we may starve that queue by not requesting 571 // more buffers from the decoder. If the other source then encounters 572 // a discontinuity that leads to flushing, we'll never find the 573 // corresponding discontinuity on the other queue. 574 // Therefore we'll stop syncing the queues if at least one of them 575 // is flushed. 576 syncQueuesDone(); 577 578 ALOGV("flushing %s", audio ? "audio" : "video"); 579 if (audio) { 580 flushQueue(&mAudioQueue); 581 582 Mutex::Autolock autoLock(mFlushLock); 583 mFlushingAudio = false; 584 585 mDrainAudioQueuePending = false; 586 ++mAudioQueueGeneration; 587 588 prepareForMediaRenderingStart(); 589 } else { 590 flushQueue(&mVideoQueue); 591 592 Mutex::Autolock autoLock(mFlushLock); 593 mFlushingVideo = false; 594 595 mDrainVideoQueuePending = false; 596 ++mVideoQueueGeneration; 597 598 prepareForMediaRenderingStart(); 599 } 600 601 notifyFlushComplete(audio); 602 } 603 604 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 605 while (!queue->empty()) { 606 QueueEntry *entry = &*queue->begin(); 607 608 if (entry->mBuffer != NULL) { 609 entry->mNotifyConsumed->post(); 610 } 611 612 queue->erase(queue->begin()); 613 entry = NULL; 614 } 615 } 616 617 void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 618 sp<AMessage> notify = mNotify->dup(); 619 notify->setInt32("what", kWhatFlushComplete); 620 notify->setInt32("audio", static_cast<int32_t>(audio)); 621 notify->post(); 622 } 623 624 bool NuPlayer::Renderer::dropBufferWhileFlushing( 625 bool audio, const sp<AMessage> &msg) { 626 bool flushing = false; 627 628 { 629 Mutex::Autolock autoLock(mFlushLock); 630 if (audio) { 631 flushing = mFlushingAudio; 632 } else { 633 flushing = mFlushingVideo; 634 } 635 } 636 637 if (!flushing) { 638 return false; 639 } 640 641 sp<AMessage> notifyConsumed; 642 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 643 notifyConsumed->post(); 644 } 645 646 return true; 647 } 648 649 void NuPlayer::Renderer::onAudioSinkChanged() { 650 CHECK(!mDrainAudioQueuePending); 651 mNumFramesWritten = 0; 652 uint32_t written; 653 if (mAudioSink->getFramesWritten(&written) == OK) { 654 mNumFramesWritten = written; 655 } 656 } 657 658 void NuPlayer::Renderer::notifyPosition() { 659 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 660 return; 661 } 662 663 int64_t nowUs = ALooper::GetNowUs(); 664 665 if (mLastPositionUpdateUs >= 0 666 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) { 667 return; 668 } 669 mLastPositionUpdateUs = nowUs; 670 671 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 672 673 sp<AMessage> notify = mNotify->dup(); 674 notify->setInt32("what", kWhatPosition); 675 notify->setInt64("positionUs", positionUs); 676 notify->setInt64("videoLateByUs", mVideoLateByUs); 677 notify->post(); 678 } 679 680 void NuPlayer::Renderer::onPause() { 681 CHECK(!mPaused); 682 683 mDrainAudioQueuePending = false; 684 ++mAudioQueueGeneration; 685 686 mDrainVideoQueuePending = false; 687 ++mVideoQueueGeneration; 688 689 prepareForMediaRenderingStart(); 690 691 if (mHasAudio) { 692 mAudioSink->pause(); 693 } 694 695 ALOGV("now paused audio queue has %d entries, video has %d entries", 696 mAudioQueue.size(), mVideoQueue.size()); 697 698 mPaused = true; 699 } 700 701 void NuPlayer::Renderer::onResume() { 702 if (!mPaused) { 703 return; 704 } 705 706 if (mHasAudio) { 707 mAudioSink->start(); 708 } 709 710 mPaused = false; 711 712 if (!mAudioQueue.empty()) { 713 postDrainAudioQueue(); 714 } 715 716 if (!mVideoQueue.empty()) { 717 postDrainVideoQueue(); 718 } 719 } 720 721 } // namespace android 722 723