1 /* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "AwesomePlayer" 19 #include <utils/Log.h> 20 21 #include <dlfcn.h> 22 23 #include "include/ARTSPController.h" 24 #include "include/AwesomePlayer.h" 25 #include "include/LiveSource.h" 26 #include "include/SoftwareRenderer.h" 27 #include "include/NuCachedSource2.h" 28 #include "include/ThrottledSource.h" 29 #include "include/MPEG2TSExtractor.h" 30 31 #include "ARTPSession.h" 32 #include "APacketSource.h" 33 #include "ASessionDescription.h" 34 #include "UDPPusher.h" 35 36 #include <binder/IPCThreadState.h> 37 #include <media/stagefright/AudioPlayer.h> 38 #include <media/stagefright/DataSource.h> 39 #include <media/stagefright/FileSource.h> 40 #include <media/stagefright/MediaBuffer.h> 41 #include <media/stagefright/MediaDefs.h> 42 #include <media/stagefright/MediaExtractor.h> 43 #include <media/stagefright/MediaDebug.h> 44 #include <media/stagefright/MediaSource.h> 45 #include <media/stagefright/MetaData.h> 46 #include <media/stagefright/OMXCodec.h> 47 48 #include <surfaceflinger/ISurface.h> 49 50 #include <media/stagefright/foundation/ALooper.h> 51 52 namespace android { 53 54 static int64_t kLowWaterMarkUs = 2000000ll; // 2secs 55 static int64_t kHighWaterMarkUs = 10000000ll; // 10secs 56 static const size_t kLowWaterMarkBytes = 40000; 57 static const size_t kHighWaterMarkBytes = 200000; 58 59 struct AwesomeEvent : public TimedEventQueue::Event { 60 AwesomeEvent( 61 AwesomePlayer *player, 62 void (AwesomePlayer::*method)()) 63 : mPlayer(player), 64 mMethod(method) { 65 } 66 67 protected: 68 virtual ~AwesomeEvent() {} 69 70 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { 71 (mPlayer->*mMethod)(); 72 } 73 74 private: 75 AwesomePlayer *mPlayer; 76 void (AwesomePlayer::*mMethod)(); 77 78 AwesomeEvent(const AwesomeEvent &); 79 AwesomeEvent &operator=(const AwesomeEvent &); 80 }; 81 82 struct AwesomeRemoteRenderer : public AwesomeRenderer { 83 AwesomeRemoteRenderer(const sp<IOMXRenderer> &target) 84 : mTarget(target) { 85 } 86 87 virtual status_t initCheck() const { 88 return OK; 89 } 90 91 virtual void render(MediaBuffer *buffer) { 92 void *id; 93 if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) { 94 mTarget->render((IOMX::buffer_id)id); 95 } 96 } 97 98 private: 99 sp<IOMXRenderer> mTarget; 100 101 AwesomeRemoteRenderer(const AwesomeRemoteRenderer &); 102 AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &); 103 }; 104 105 struct AwesomeLocalRenderer : public AwesomeRenderer { 106 AwesomeLocalRenderer( 107 bool previewOnly, 108 const char *componentName, 109 OMX_COLOR_FORMATTYPE colorFormat, 110 const sp<ISurface> &surface, 111 size_t displayWidth, size_t displayHeight, 112 size_t decodedWidth, size_t decodedHeight, 113 int32_t rotationDegrees) 114 : mInitCheck(NO_INIT), 115 mTarget(NULL), 116 mLibHandle(NULL) { 117 mInitCheck = init(previewOnly, componentName, 118 colorFormat, surface, displayWidth, 119 displayHeight, decodedWidth, decodedHeight, 120 rotationDegrees); 121 } 122 123 virtual status_t initCheck() const { 124 return mInitCheck; 125 } 126 127 virtual void render(MediaBuffer *buffer) { 128 render((const uint8_t *)buffer->data() + buffer->range_offset(), 129 buffer->range_length()); 130 } 131 132 void render(const void *data, size_t size) { 133 mTarget->render(data, size, NULL); 134 } 135 136 protected: 137 virtual ~AwesomeLocalRenderer() { 138 delete mTarget; 139 mTarget = NULL; 140 141 if (mLibHandle) { 142 dlclose(mLibHandle); 143 mLibHandle = NULL; 144 } 145 } 146 147 private: 148 status_t mInitCheck; 149 VideoRenderer *mTarget; 150 void *mLibHandle; 151 152 status_t init( 153 bool previewOnly, 154 const char *componentName, 155 OMX_COLOR_FORMATTYPE colorFormat, 156 const sp<ISurface> &surface, 157 size_t displayWidth, size_t displayHeight, 158 size_t decodedWidth, size_t decodedHeight, 159 int32_t rotationDegrees); 160 161 AwesomeLocalRenderer(const AwesomeLocalRenderer &); 162 AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; 163 }; 164 165 status_t AwesomeLocalRenderer::init( 166 bool previewOnly, 167 const char *componentName, 168 OMX_COLOR_FORMATTYPE colorFormat, 169 const sp<ISurface> &surface, 170 size_t displayWidth, size_t displayHeight, 171 size_t decodedWidth, size_t decodedHeight, 172 int32_t rotationDegrees) { 173 if (!previewOnly) { 174 // We will stick to the vanilla software-color-converting renderer 175 // for "previewOnly" mode, to avoid unneccessarily switching overlays 176 // more often than necessary. 177 178 mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW); 179 180 if (mLibHandle) { 181 typedef VideoRenderer *(*CreateRendererWithRotationFunc)( 182 const sp<ISurface> &surface, 183 const char *componentName, 184 OMX_COLOR_FORMATTYPE colorFormat, 185 size_t displayWidth, size_t displayHeight, 186 size_t decodedWidth, size_t decodedHeight, 187 int32_t rotationDegrees); 188 189 typedef VideoRenderer *(*CreateRendererFunc)( 190 const sp<ISurface> &surface, 191 const char *componentName, 192 OMX_COLOR_FORMATTYPE colorFormat, 193 size_t displayWidth, size_t displayHeight, 194 size_t decodedWidth, size_t decodedHeight); 195 196 CreateRendererWithRotationFunc funcWithRotation = 197 (CreateRendererWithRotationFunc)dlsym( 198 mLibHandle, 199 "_Z26createRendererWithRotationRKN7android2spINS_8" 200 "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji"); 201 202 if (funcWithRotation) { 203 mTarget = 204 (*funcWithRotation)( 205 surface, componentName, colorFormat, 206 displayWidth, displayHeight, 207 decodedWidth, decodedHeight, 208 rotationDegrees); 209 } else { 210 if (rotationDegrees != 0) { 211 LOGW("renderer does not support rotation."); 212 } 213 214 CreateRendererFunc func = 215 (CreateRendererFunc)dlsym( 216 mLibHandle, 217 "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" 218 "OMX_COLOR_FORMATTYPEjjjj"); 219 220 if (func) { 221 mTarget = 222 (*func)(surface, componentName, colorFormat, 223 displayWidth, displayHeight, 224 decodedWidth, decodedHeight); 225 } 226 } 227 } 228 } 229 230 if (mTarget != NULL) { 231 return OK; 232 } 233 234 mTarget = new SoftwareRenderer( 235 colorFormat, surface, displayWidth, displayHeight, 236 decodedWidth, decodedHeight, rotationDegrees); 237 238 return ((SoftwareRenderer *)mTarget)->initCheck(); 239 } 240 241 AwesomePlayer::AwesomePlayer() 242 : mQueueStarted(false), 243 mTimeSource(NULL), 244 mVideoRendererIsPreview(false), 245 mAudioPlayer(NULL), 246 mFlags(0), 247 mExtractorFlags(0), 248 mLastVideoBuffer(NULL), 249 mVideoBuffer(NULL), 250 mSuspensionState(NULL) { 251 CHECK_EQ(mClient.connect(), OK); 252 253 DataSource::RegisterDefaultSniffers(); 254 255 mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent); 256 mVideoEventPending = false; 257 mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone); 258 mStreamDoneEventPending = false; 259 mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); 260 mBufferingEventPending = false; 261 262 mCheckAudioStatusEvent = new AwesomeEvent( 263 this, &AwesomePlayer::onCheckAudioStatus); 264 265 mAudioStatusEventPending = false; 266 267 reset(); 268 } 269 270 AwesomePlayer::~AwesomePlayer() { 271 if (mQueueStarted) { 272 mQueue.stop(); 273 } 274 275 reset(); 276 277 mClient.disconnect(); 278 } 279 280 void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { 281 mQueue.cancelEvent(mVideoEvent->eventID()); 282 mVideoEventPending = false; 283 mQueue.cancelEvent(mStreamDoneEvent->eventID()); 284 mStreamDoneEventPending = false; 285 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); 286 mAudioStatusEventPending = false; 287 288 if (!keepBufferingGoing) { 289 mQueue.cancelEvent(mBufferingEvent->eventID()); 290 mBufferingEventPending = false; 291 } 292 } 293 294 void AwesomePlayer::setListener(const wp<MediaPlayerBase> &listener) { 295 Mutex::Autolock autoLock(mLock); 296 mListener = listener; 297 } 298 299 status_t AwesomePlayer::setDataSource( 300 const char *uri, const KeyedVector<String8, String8> *headers) { 301 Mutex::Autolock autoLock(mLock); 302 return setDataSource_l(uri, headers); 303 } 304 305 status_t AwesomePlayer::setDataSource_l( 306 const char *uri, const KeyedVector<String8, String8> *headers) { 307 reset_l(); 308 309 mUri = uri; 310 311 if (headers) { 312 mUriHeaders = *headers; 313 } 314 315 // The actual work will be done during preparation in the call to 316 // ::finishSetDataSource_l to avoid blocking the calling thread in 317 // setDataSource for any significant time. 318 319 return OK; 320 } 321 322 status_t AwesomePlayer::setDataSource( 323 int fd, int64_t offset, int64_t length) { 324 Mutex::Autolock autoLock(mLock); 325 326 reset_l(); 327 328 sp<DataSource> dataSource = new FileSource(fd, offset, length); 329 330 status_t err = dataSource->initCheck(); 331 332 if (err != OK) { 333 return err; 334 } 335 336 mFileSource = dataSource; 337 338 return setDataSource_l(dataSource); 339 } 340 341 status_t AwesomePlayer::setDataSource_l( 342 const sp<DataSource> &dataSource) { 343 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 344 345 if (extractor == NULL) { 346 return UNKNOWN_ERROR; 347 } 348 349 return setDataSource_l(extractor); 350 } 351 352 status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { 353 // Attempt to approximate overall stream bitrate by summing all 354 // tracks' individual bitrates, if not all of them advertise bitrate, 355 // we have to fail. 356 357 int64_t totalBitRate = 0; 358 359 for (size_t i = 0; i < extractor->countTracks(); ++i) { 360 sp<MetaData> meta = extractor->getTrackMetaData(i); 361 362 int32_t bitrate; 363 if (!meta->findInt32(kKeyBitRate, &bitrate)) { 364 totalBitRate = -1; 365 break; 366 } 367 368 totalBitRate += bitrate; 369 } 370 371 mBitrate = totalBitRate; 372 373 LOGV("mBitrate = %lld bits/sec", mBitrate); 374 375 bool haveAudio = false; 376 bool haveVideo = false; 377 for (size_t i = 0; i < extractor->countTracks(); ++i) { 378 sp<MetaData> meta = extractor->getTrackMetaData(i); 379 380 const char *mime; 381 CHECK(meta->findCString(kKeyMIMEType, &mime)); 382 383 if (!haveVideo && !strncasecmp(mime, "video/", 6)) { 384 setVideoSource(extractor->getTrack(i)); 385 haveVideo = true; 386 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { 387 setAudioSource(extractor->getTrack(i)); 388 haveAudio = true; 389 390 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { 391 // Only do this for vorbis audio, none of the other audio 392 // formats even support this ringtone specific hack and 393 // retrieving the metadata on some extractors may turn out 394 // to be very expensive. 395 sp<MetaData> fileMeta = extractor->getMetaData(); 396 int32_t loop; 397 if (fileMeta != NULL 398 && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) { 399 mFlags |= AUTO_LOOPING; 400 } 401 } 402 } 403 404 if (haveAudio && haveVideo) { 405 break; 406 } 407 } 408 409 if (!haveAudio && !haveVideo) { 410 return UNKNOWN_ERROR; 411 } 412 413 mExtractorFlags = extractor->flags(); 414 415 return OK; 416 } 417 418 void AwesomePlayer::reset() { 419 Mutex::Autolock autoLock(mLock); 420 reset_l(); 421 } 422 423 void AwesomePlayer::reset_l() { 424 if (mFlags & PREPARING) { 425 mFlags |= PREPARE_CANCELLED; 426 if (mConnectingDataSource != NULL) { 427 LOGI("interrupting the connection process"); 428 mConnectingDataSource->disconnect(); 429 } 430 } 431 432 while (mFlags & PREPARING) { 433 mPreparedCondition.wait(mLock); 434 } 435 436 cancelPlayerEvents(); 437 438 mCachedSource.clear(); 439 mAudioTrack.clear(); 440 mVideoTrack.clear(); 441 442 // Shutdown audio first, so that the respone to the reset request 443 // appears to happen instantaneously as far as the user is concerned 444 // If we did this later, audio would continue playing while we 445 // shutdown the video-related resources and the player appear to 446 // not be as responsive to a reset request. 447 if (mAudioPlayer == NULL && mAudioSource != NULL) { 448 // If we had an audio player, it would have effectively 449 // taken possession of the audio source and stopped it when 450 // _it_ is stopped. Otherwise this is still our responsibility. 451 mAudioSource->stop(); 452 } 453 mAudioSource.clear(); 454 455 mTimeSource = NULL; 456 457 delete mAudioPlayer; 458 mAudioPlayer = NULL; 459 460 mVideoRenderer.clear(); 461 462 if (mLastVideoBuffer) { 463 mLastVideoBuffer->release(); 464 mLastVideoBuffer = NULL; 465 } 466 467 if (mVideoBuffer) { 468 mVideoBuffer->release(); 469 mVideoBuffer = NULL; 470 } 471 472 if (mRTSPController != NULL) { 473 mRTSPController->disconnect(); 474 mRTSPController.clear(); 475 } 476 477 mRTPPusher.clear(); 478 mRTCPPusher.clear(); 479 mRTPSession.clear(); 480 481 if (mVideoSource != NULL) { 482 mVideoSource->stop(); 483 484 // The following hack is necessary to ensure that the OMX 485 // component is completely released by the time we may try 486 // to instantiate it again. 487 wp<MediaSource> tmp = mVideoSource; 488 mVideoSource.clear(); 489 while (tmp.promote() != NULL) { 490 usleep(1000); 491 } 492 IPCThreadState::self()->flushCommands(); 493 } 494 495 mDurationUs = -1; 496 mFlags = 0; 497 mExtractorFlags = 0; 498 mVideoWidth = mVideoHeight = -1; 499 mTimeSourceDeltaUs = 0; 500 mVideoTimeUs = 0; 501 502 mSeeking = false; 503 mSeekNotificationSent = false; 504 mSeekTimeUs = 0; 505 506 mUri.setTo(""); 507 mUriHeaders.clear(); 508 509 mFileSource.clear(); 510 511 delete mSuspensionState; 512 mSuspensionState = NULL; 513 514 mBitrate = -1; 515 } 516 517 void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { 518 if (mListener != NULL) { 519 sp<MediaPlayerBase> listener = mListener.promote(); 520 521 if (listener != NULL) { 522 listener->sendEvent(msg, ext1, ext2); 523 } 524 } 525 } 526 527 bool AwesomePlayer::getBitrate(int64_t *bitrate) { 528 off_t size; 529 if (mDurationUs >= 0 && mCachedSource != NULL 530 && mCachedSource->getSize(&size) == OK) { 531 *bitrate = size * 8000000ll / mDurationUs; // in bits/sec 532 return true; 533 } 534 535 if (mBitrate >= 0) { 536 *bitrate = mBitrate; 537 return true; 538 } 539 540 *bitrate = 0; 541 542 return false; 543 } 544 545 // Returns true iff cached duration is available/applicable. 546 bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { 547 int64_t bitrate; 548 549 if (mRTSPController != NULL) { 550 *durationUs = mRTSPController->getQueueDurationUs(eos); 551 return true; 552 } else if (mCachedSource != NULL && getBitrate(&bitrate)) { 553 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(eos); 554 *durationUs = cachedDataRemaining * 8000000ll / bitrate; 555 return true; 556 } 557 558 return false; 559 } 560 561 void AwesomePlayer::onBufferingUpdate() { 562 Mutex::Autolock autoLock(mLock); 563 if (!mBufferingEventPending) { 564 return; 565 } 566 mBufferingEventPending = false; 567 568 if (mCachedSource != NULL) { 569 bool eos; 570 size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&eos); 571 572 if (eos) { 573 notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); 574 if (mFlags & PREPARING) { 575 LOGV("cache has reached EOS, prepare is done."); 576 finishAsyncPrepare_l(); 577 } 578 } else { 579 int64_t bitrate; 580 if (getBitrate(&bitrate)) { 581 size_t cachedSize = mCachedSource->cachedSize(); 582 int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; 583 584 int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; 585 if (percentage > 100) { 586 percentage = 100; 587 } 588 589 notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); 590 } else { 591 // We don't know the bitrate of the stream, use absolute size 592 // limits to maintain the cache. 593 594 if ((mFlags & PLAYING) && !eos 595 && (cachedDataRemaining < kLowWaterMarkBytes)) { 596 LOGI("cache is running low (< %d) , pausing.", 597 kLowWaterMarkBytes); 598 mFlags |= CACHE_UNDERRUN; 599 pause_l(); 600 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 601 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { 602 if (mFlags & CACHE_UNDERRUN) { 603 LOGI("cache has filled up (> %d), resuming.", 604 kHighWaterMarkBytes); 605 mFlags &= ~CACHE_UNDERRUN; 606 play_l(); 607 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 608 } else if (mFlags & PREPARING) { 609 LOGV("cache has filled up (> %d), prepare is done", 610 kHighWaterMarkBytes); 611 finishAsyncPrepare_l(); 612 } 613 } 614 } 615 } 616 } 617 618 int64_t cachedDurationUs; 619 bool eos; 620 if (getCachedDuration_l(&cachedDurationUs, &eos)) { 621 if ((mFlags & PLAYING) && !eos 622 && (cachedDurationUs < kLowWaterMarkUs)) { 623 LOGI("cache is running low (%.2f secs) , pausing.", 624 cachedDurationUs / 1E6); 625 mFlags |= CACHE_UNDERRUN; 626 pause_l(); 627 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); 628 } else if (eos || cachedDurationUs > kHighWaterMarkUs) { 629 if (mFlags & CACHE_UNDERRUN) { 630 LOGI("cache has filled up (%.2f secs), resuming.", 631 cachedDurationUs / 1E6); 632 mFlags &= ~CACHE_UNDERRUN; 633 play_l(); 634 notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END); 635 } else if (mFlags & PREPARING) { 636 LOGV("cache has filled up (%.2f secs), prepare is done", 637 cachedDurationUs / 1E6); 638 finishAsyncPrepare_l(); 639 } 640 } 641 } 642 643 postBufferingEvent_l(); 644 } 645 646 void AwesomePlayer::partial_reset_l() { 647 // Only reset the video renderer and shut down the video decoder. 648 // Then instantiate a new video decoder and resume video playback. 649 650 mVideoRenderer.clear(); 651 652 if (mLastVideoBuffer) { 653 mLastVideoBuffer->release(); 654 mLastVideoBuffer = NULL; 655 } 656 657 if (mVideoBuffer) { 658 mVideoBuffer->release(); 659 mVideoBuffer = NULL; 660 } 661 662 { 663 mVideoSource->stop(); 664 665 // The following hack is necessary to ensure that the OMX 666 // component is completely released by the time we may try 667 // to instantiate it again. 668 wp<MediaSource> tmp = mVideoSource; 669 mVideoSource.clear(); 670 while (tmp.promote() != NULL) { 671 usleep(1000); 672 } 673 IPCThreadState::self()->flushCommands(); 674 } 675 676 CHECK_EQ(OK, initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData)); 677 } 678 679 void AwesomePlayer::onStreamDone() { 680 // Posted whenever any stream finishes playing. 681 682 Mutex::Autolock autoLock(mLock); 683 if (!mStreamDoneEventPending) { 684 return; 685 } 686 mStreamDoneEventPending = false; 687 688 if (mStreamDoneStatus == INFO_DISCONTINUITY) { 689 // This special status is returned because an http live stream's 690 // video stream switched to a different bandwidth at this point 691 // and future data may have been encoded using different parameters. 692 // This requires us to shutdown the video decoder and reinstantiate 693 // a fresh one. 694 695 LOGV("INFO_DISCONTINUITY"); 696 697 CHECK(mVideoSource != NULL); 698 699 partial_reset_l(); 700 postVideoEvent_l(); 701 return; 702 } else if (mStreamDoneStatus != ERROR_END_OF_STREAM) { 703 LOGV("MEDIA_ERROR %d", mStreamDoneStatus); 704 705 notifyListener_l( 706 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus); 707 708 pause_l(true /* at eos */); 709 710 mFlags |= AT_EOS; 711 return; 712 } 713 714 const bool allDone = 715 (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS)) 716 && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS)); 717 718 if (!allDone) { 719 return; 720 } 721 722 if (mFlags & (LOOPING | AUTO_LOOPING)) { 723 seekTo_l(0); 724 725 if (mVideoSource != NULL) { 726 postVideoEvent_l(); 727 } 728 } else { 729 LOGV("MEDIA_PLAYBACK_COMPLETE"); 730 notifyListener_l(MEDIA_PLAYBACK_COMPLETE); 731 732 pause_l(true /* at eos */); 733 734 mFlags |= AT_EOS; 735 } 736 } 737 738 status_t AwesomePlayer::play() { 739 Mutex::Autolock autoLock(mLock); 740 741 mFlags &= ~CACHE_UNDERRUN; 742 743 return play_l(); 744 } 745 746 status_t AwesomePlayer::play_l() { 747 if (mFlags & PLAYING) { 748 return OK; 749 } 750 751 if (!(mFlags & PREPARED)) { 752 status_t err = prepare_l(); 753 754 if (err != OK) { 755 return err; 756 } 757 } 758 759 mFlags |= PLAYING; 760 mFlags |= FIRST_FRAME; 761 762 bool deferredAudioSeek = false; 763 764 if (mAudioSource != NULL) { 765 if (mAudioPlayer == NULL) { 766 if (mAudioSink != NULL) { 767 mAudioPlayer = new AudioPlayer(mAudioSink, this); 768 mAudioPlayer->setSource(mAudioSource); 769 770 // We've already started the MediaSource in order to enable 771 // the prefetcher to read its data. 772 status_t err = mAudioPlayer->start( 773 true /* sourceAlreadyStarted */); 774 775 if (err != OK) { 776 delete mAudioPlayer; 777 mAudioPlayer = NULL; 778 779 mFlags &= ~(PLAYING | FIRST_FRAME); 780 781 return err; 782 } 783 784 mTimeSource = mAudioPlayer; 785 786 deferredAudioSeek = true; 787 788 mWatchForAudioSeekComplete = false; 789 mWatchForAudioEOS = true; 790 } 791 } else { 792 mAudioPlayer->resume(); 793 } 794 } 795 796 if (mTimeSource == NULL && mAudioPlayer == NULL) { 797 mTimeSource = &mSystemTimeSource; 798 } 799 800 if (mVideoSource != NULL) { 801 // Kick off video playback 802 postVideoEvent_l(); 803 } 804 805 if (deferredAudioSeek) { 806 // If there was a seek request while we were paused 807 // and we're just starting up again, honor the request now. 808 seekAudioIfNecessary_l(); 809 } 810 811 if (mFlags & AT_EOS) { 812 // Legacy behaviour, if a stream finishes playing and then 813 // is started again, we play from the start... 814 seekTo_l(0); 815 } 816 817 return OK; 818 } 819 820 status_t AwesomePlayer::initRenderer_l() { 821 if (mISurface == NULL) { 822 return OK; 823 } 824 825 sp<MetaData> meta = mVideoSource->getFormat(); 826 827 int32_t format; 828 const char *component; 829 int32_t decodedWidth, decodedHeight; 830 CHECK(meta->findInt32(kKeyColorFormat, &format)); 831 CHECK(meta->findCString(kKeyDecoderComponent, &component)); 832 CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); 833 CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); 834 835 int32_t rotationDegrees; 836 if (!mVideoTrack->getFormat()->findInt32( 837 kKeyRotation, &rotationDegrees)) { 838 rotationDegrees = 0; 839 } 840 841 mVideoRenderer.clear(); 842 843 // Must ensure that mVideoRenderer's destructor is actually executed 844 // before creating a new one. 845 IPCThreadState::self()->flushCommands(); 846 847 if (!strncmp("OMX.", component, 4)) { 848 // Our OMX codecs allocate buffers on the media_server side 849 // therefore they require a remote IOMXRenderer that knows how 850 // to display them. 851 852 sp<IOMXRenderer> native = 853 mClient.interface()->createRenderer( 854 mISurface, component, 855 (OMX_COLOR_FORMATTYPE)format, 856 decodedWidth, decodedHeight, 857 mVideoWidth, mVideoHeight, 858 rotationDegrees); 859 860 if (native == NULL) { 861 return NO_INIT; 862 } 863 864 mVideoRenderer = new AwesomeRemoteRenderer(native); 865 } else { 866 // Other decoders are instantiated locally and as a consequence 867 // allocate their buffers in local address space. 868 mVideoRenderer = new AwesomeLocalRenderer( 869 false, // previewOnly 870 component, 871 (OMX_COLOR_FORMATTYPE)format, 872 mISurface, 873 mVideoWidth, mVideoHeight, 874 decodedWidth, decodedHeight, rotationDegrees); 875 } 876 877 return mVideoRenderer->initCheck(); 878 } 879 880 status_t AwesomePlayer::pause() { 881 Mutex::Autolock autoLock(mLock); 882 883 mFlags &= ~CACHE_UNDERRUN; 884 885 return pause_l(); 886 } 887 888 status_t AwesomePlayer::pause_l(bool at_eos) { 889 if (!(mFlags & PLAYING)) { 890 return OK; 891 } 892 893 cancelPlayerEvents(true /* keepBufferingGoing */); 894 895 if (mAudioPlayer != NULL) { 896 if (at_eos) { 897 // If we played the audio stream to completion we 898 // want to make sure that all samples remaining in the audio 899 // track's queue are played out. 900 mAudioPlayer->pause(true /* playPendingSamples */); 901 } else { 902 mAudioPlayer->pause(); 903 } 904 } 905 906 mFlags &= ~PLAYING; 907 908 return OK; 909 } 910 911 bool AwesomePlayer::isPlaying() const { 912 return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); 913 } 914 915 void AwesomePlayer::setISurface(const sp<ISurface> &isurface) { 916 Mutex::Autolock autoLock(mLock); 917 918 mISurface = isurface; 919 } 920 921 void AwesomePlayer::setAudioSink( 922 const sp<MediaPlayerBase::AudioSink> &audioSink) { 923 Mutex::Autolock autoLock(mLock); 924 925 mAudioSink = audioSink; 926 } 927 928 status_t AwesomePlayer::setLooping(bool shouldLoop) { 929 Mutex::Autolock autoLock(mLock); 930 931 mFlags = mFlags & ~LOOPING; 932 933 if (shouldLoop) { 934 mFlags |= LOOPING; 935 } 936 937 return OK; 938 } 939 940 status_t AwesomePlayer::getDuration(int64_t *durationUs) { 941 Mutex::Autolock autoLock(mMiscStateLock); 942 943 if (mDurationUs < 0) { 944 return UNKNOWN_ERROR; 945 } 946 947 *durationUs = mDurationUs; 948 949 return OK; 950 } 951 952 status_t AwesomePlayer::getPosition(int64_t *positionUs) { 953 if (mRTSPController != NULL) { 954 *positionUs = mRTSPController->getNormalPlayTimeUs(); 955 } 956 else if (mSeeking) { 957 *positionUs = mSeekTimeUs; 958 } else if (mVideoSource != NULL) { 959 Mutex::Autolock autoLock(mMiscStateLock); 960 *positionUs = mVideoTimeUs; 961 } else if (mAudioPlayer != NULL) { 962 *positionUs = mAudioPlayer->getMediaTimeUs(); 963 } else { 964 *positionUs = 0; 965 } 966 967 return OK; 968 } 969 970 status_t AwesomePlayer::seekTo(int64_t timeUs) { 971 if (mExtractorFlags & MediaExtractor::CAN_SEEK) { 972 Mutex::Autolock autoLock(mLock); 973 return seekTo_l(timeUs); 974 } 975 976 return OK; 977 } 978 979 // static 980 void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) { 981 static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone(); 982 } 983 984 void AwesomePlayer::onRTSPSeekDone() { 985 notifyListener_l(MEDIA_SEEK_COMPLETE); 986 mSeekNotificationSent = true; 987 } 988 989 status_t AwesomePlayer::seekTo_l(int64_t timeUs) { 990 if (mRTSPController != NULL) { 991 mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this); 992 return OK; 993 } 994 995 if (mFlags & CACHE_UNDERRUN) { 996 mFlags &= ~CACHE_UNDERRUN; 997 play_l(); 998 } 999 1000 mSeeking = true; 1001 mSeekNotificationSent = false; 1002 mSeekTimeUs = timeUs; 1003 mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS); 1004 1005 seekAudioIfNecessary_l(); 1006 1007 if (!(mFlags & PLAYING)) { 1008 LOGV("seeking while paused, sending SEEK_COMPLETE notification" 1009 " immediately."); 1010 1011 notifyListener_l(MEDIA_SEEK_COMPLETE); 1012 mSeekNotificationSent = true; 1013 } 1014 1015 return OK; 1016 } 1017 1018 void AwesomePlayer::seekAudioIfNecessary_l() { 1019 if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) { 1020 mAudioPlayer->seekTo(mSeekTimeUs); 1021 1022 mWatchForAudioSeekComplete = true; 1023 mWatchForAudioEOS = true; 1024 mSeekNotificationSent = false; 1025 } 1026 } 1027 1028 status_t AwesomePlayer::getVideoDimensions( 1029 int32_t *width, int32_t *height) const { 1030 Mutex::Autolock autoLock(mLock); 1031 1032 if (mVideoWidth < 0 || mVideoHeight < 0) { 1033 return UNKNOWN_ERROR; 1034 } 1035 1036 *width = mVideoWidth; 1037 *height = mVideoHeight; 1038 1039 return OK; 1040 } 1041 1042 void AwesomePlayer::setAudioSource(sp<MediaSource> source) { 1043 CHECK(source != NULL); 1044 1045 mAudioTrack = source; 1046 } 1047 1048 status_t AwesomePlayer::initAudioDecoder() { 1049 sp<MetaData> meta = mAudioTrack->getFormat(); 1050 1051 const char *mime; 1052 CHECK(meta->findCString(kKeyMIMEType, &mime)); 1053 1054 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1055 mAudioSource = mAudioTrack; 1056 } else { 1057 mAudioSource = OMXCodec::Create( 1058 mClient.interface(), mAudioTrack->getFormat(), 1059 false, // createEncoder 1060 mAudioTrack); 1061 } 1062 1063 if (mAudioSource != NULL) { 1064 int64_t durationUs; 1065 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1066 Mutex::Autolock autoLock(mMiscStateLock); 1067 if (mDurationUs < 0 || durationUs > mDurationUs) { 1068 mDurationUs = durationUs; 1069 } 1070 } 1071 1072 status_t err = mAudioSource->start(); 1073 1074 if (err != OK) { 1075 mAudioSource.clear(); 1076 return err; 1077 } 1078 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { 1079 // For legacy reasons we're simply going to ignore the absence 1080 // of an audio decoder for QCELP instead of aborting playback 1081 // altogether. 1082 return OK; 1083 } 1084 1085 return mAudioSource != NULL ? OK : UNKNOWN_ERROR; 1086 } 1087 1088 void AwesomePlayer::setVideoSource(sp<MediaSource> source) { 1089 CHECK(source != NULL); 1090 1091 mVideoTrack = source; 1092 } 1093 1094 status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { 1095 mVideoSource = OMXCodec::Create( 1096 mClient.interface(), mVideoTrack->getFormat(), 1097 false, // createEncoder 1098 mVideoTrack, 1099 NULL, flags); 1100 1101 if (mVideoSource != NULL) { 1102 int64_t durationUs; 1103 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1104 Mutex::Autolock autoLock(mMiscStateLock); 1105 if (mDurationUs < 0 || durationUs > mDurationUs) { 1106 mDurationUs = durationUs; 1107 } 1108 } 1109 1110 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); 1111 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); 1112 1113 status_t err = mVideoSource->start(); 1114 1115 if (err != OK) { 1116 mVideoSource.clear(); 1117 return err; 1118 } 1119 } 1120 1121 return mVideoSource != NULL ? OK : UNKNOWN_ERROR; 1122 } 1123 1124 void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { 1125 if (!mSeeking) { 1126 return; 1127 } 1128 1129 if (mAudioPlayer != NULL) { 1130 LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6); 1131 1132 // If we don't have a video time, seek audio to the originally 1133 // requested seek time instead. 1134 1135 mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs); 1136 mAudioPlayer->resume(); 1137 mWatchForAudioSeekComplete = true; 1138 mWatchForAudioEOS = true; 1139 } else if (!mSeekNotificationSent) { 1140 // If we're playing video only, report seek complete now, 1141 // otherwise audio player will notify us later. 1142 notifyListener_l(MEDIA_SEEK_COMPLETE); 1143 } 1144 1145 mFlags |= FIRST_FRAME; 1146 mSeeking = false; 1147 mSeekNotificationSent = false; 1148 } 1149 1150 void AwesomePlayer::onVideoEvent() { 1151 Mutex::Autolock autoLock(mLock); 1152 if (!mVideoEventPending) { 1153 // The event has been cancelled in reset_l() but had already 1154 // been scheduled for execution at that time. 1155 return; 1156 } 1157 mVideoEventPending = false; 1158 1159 if (mSeeking) { 1160 if (mLastVideoBuffer) { 1161 mLastVideoBuffer->release(); 1162 mLastVideoBuffer = NULL; 1163 } 1164 1165 if (mVideoBuffer) { 1166 mVideoBuffer->release(); 1167 mVideoBuffer = NULL; 1168 } 1169 1170 if (mCachedSource != NULL && mAudioSource != NULL) { 1171 // We're going to seek the video source first, followed by 1172 // the audio source. 1173 // In order to avoid jumps in the DataSource offset caused by 1174 // the audio codec prefetching data from the old locations 1175 // while the video codec is already reading data from the new 1176 // locations, we'll "pause" the audio source, causing it to 1177 // stop reading input data until a subsequent seek. 1178 1179 if (mAudioPlayer != NULL) { 1180 mAudioPlayer->pause(); 1181 } 1182 mAudioSource->pause(); 1183 } 1184 } 1185 1186 if (!mVideoBuffer) { 1187 MediaSource::ReadOptions options; 1188 if (mSeeking) { 1189 LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6); 1190 1191 options.setSeekTo( 1192 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC); 1193 } 1194 for (;;) { 1195 status_t err = mVideoSource->read(&mVideoBuffer, &options); 1196 options.clearSeekTo(); 1197 1198 if (err != OK) { 1199 CHECK_EQ(mVideoBuffer, NULL); 1200 1201 if (err == INFO_FORMAT_CHANGED) { 1202 LOGV("VideoSource signalled format change."); 1203 1204 if (mVideoRenderer != NULL) { 1205 mVideoRendererIsPreview = false; 1206 err = initRenderer_l(); 1207 1208 if (err == OK) { 1209 continue; 1210 } 1211 1212 // fall through 1213 } else { 1214 continue; 1215 } 1216 } 1217 1218 // So video playback is complete, but we may still have 1219 // a seek request pending that needs to be applied 1220 // to the audio track. 1221 if (mSeeking) { 1222 LOGV("video stream ended while seeking!"); 1223 } 1224 finishSeekIfNecessary(-1); 1225 1226 mFlags |= VIDEO_AT_EOS; 1227 postStreamDoneEvent_l(err); 1228 return; 1229 } 1230 1231 if (mVideoBuffer->range_length() == 0) { 1232 // Some decoders, notably the PV AVC software decoder 1233 // return spurious empty buffers that we just want to ignore. 1234 1235 mVideoBuffer->release(); 1236 mVideoBuffer = NULL; 1237 continue; 1238 } 1239 1240 break; 1241 } 1242 } 1243 1244 int64_t timeUs; 1245 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); 1246 1247 { 1248 Mutex::Autolock autoLock(mMiscStateLock); 1249 mVideoTimeUs = timeUs; 1250 } 1251 1252 bool wasSeeking = mSeeking; 1253 finishSeekIfNecessary(timeUs); 1254 1255 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; 1256 1257 if (mFlags & FIRST_FRAME) { 1258 mFlags &= ~FIRST_FRAME; 1259 1260 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; 1261 } 1262 1263 int64_t realTimeUs, mediaTimeUs; 1264 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL 1265 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { 1266 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; 1267 } 1268 1269 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; 1270 1271 int64_t latenessUs = nowUs - timeUs; 1272 1273 if (wasSeeking) { 1274 // Let's display the first frame after seeking right away. 1275 latenessUs = 0; 1276 } 1277 1278 if (mRTPSession != NULL) { 1279 // We'll completely ignore timestamps for gtalk videochat 1280 // and we'll play incoming video as fast as we get it. 1281 latenessUs = 0; 1282 } 1283 1284 if (latenessUs > 40000) { 1285 // We're more than 40ms late. 1286 LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); 1287 1288 mVideoBuffer->release(); 1289 mVideoBuffer = NULL; 1290 1291 postVideoEvent_l(); 1292 return; 1293 } 1294 1295 if (latenessUs < -10000) { 1296 // We're more than 10ms early. 1297 1298 postVideoEvent_l(10000); 1299 return; 1300 } 1301 1302 if (mVideoRendererIsPreview || mVideoRenderer == NULL) { 1303 mVideoRendererIsPreview = false; 1304 1305 status_t err = initRenderer_l(); 1306 1307 if (err != OK) { 1308 finishSeekIfNecessary(-1); 1309 1310 mFlags |= VIDEO_AT_EOS; 1311 postStreamDoneEvent_l(err); 1312 return; 1313 } 1314 } 1315 1316 if (mVideoRenderer != NULL) { 1317 mVideoRenderer->render(mVideoBuffer); 1318 } 1319 1320 if (mLastVideoBuffer) { 1321 mLastVideoBuffer->release(); 1322 mLastVideoBuffer = NULL; 1323 } 1324 mLastVideoBuffer = mVideoBuffer; 1325 mVideoBuffer = NULL; 1326 1327 postVideoEvent_l(); 1328 } 1329 1330 void AwesomePlayer::postVideoEvent_l(int64_t delayUs) { 1331 if (mVideoEventPending) { 1332 return; 1333 } 1334 1335 mVideoEventPending = true; 1336 mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs); 1337 } 1338 1339 void AwesomePlayer::postStreamDoneEvent_l(status_t status) { 1340 if (mStreamDoneEventPending) { 1341 return; 1342 } 1343 mStreamDoneEventPending = true; 1344 1345 mStreamDoneStatus = status; 1346 mQueue.postEvent(mStreamDoneEvent); 1347 } 1348 1349 void AwesomePlayer::postBufferingEvent_l() { 1350 if (mBufferingEventPending) { 1351 return; 1352 } 1353 mBufferingEventPending = true; 1354 mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); 1355 } 1356 1357 void AwesomePlayer::postCheckAudioStatusEvent_l() { 1358 if (mAudioStatusEventPending) { 1359 return; 1360 } 1361 mAudioStatusEventPending = true; 1362 mQueue.postEvent(mCheckAudioStatusEvent); 1363 } 1364 1365 void AwesomePlayer::onCheckAudioStatus() { 1366 Mutex::Autolock autoLock(mLock); 1367 if (!mAudioStatusEventPending) { 1368 // Event was dispatched and while we were blocking on the mutex, 1369 // has already been cancelled. 1370 return; 1371 } 1372 1373 mAudioStatusEventPending = false; 1374 1375 if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) { 1376 mWatchForAudioSeekComplete = false; 1377 1378 if (!mSeekNotificationSent) { 1379 notifyListener_l(MEDIA_SEEK_COMPLETE); 1380 mSeekNotificationSent = true; 1381 } 1382 1383 mSeeking = false; 1384 } 1385 1386 status_t finalStatus; 1387 if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) { 1388 mWatchForAudioEOS = false; 1389 mFlags |= AUDIO_AT_EOS; 1390 mFlags |= FIRST_FRAME; 1391 postStreamDoneEvent_l(finalStatus); 1392 } 1393 } 1394 1395 status_t AwesomePlayer::prepare() { 1396 Mutex::Autolock autoLock(mLock); 1397 return prepare_l(); 1398 } 1399 1400 status_t AwesomePlayer::prepare_l() { 1401 if (mFlags & PREPARED) { 1402 return OK; 1403 } 1404 1405 if (mFlags & PREPARING) { 1406 return UNKNOWN_ERROR; 1407 } 1408 1409 mIsAsyncPrepare = false; 1410 status_t err = prepareAsync_l(); 1411 1412 if (err != OK) { 1413 return err; 1414 } 1415 1416 while (mFlags & PREPARING) { 1417 mPreparedCondition.wait(mLock); 1418 } 1419 1420 return mPrepareResult; 1421 } 1422 1423 status_t AwesomePlayer::prepareAsync() { 1424 Mutex::Autolock autoLock(mLock); 1425 1426 if (mFlags & PREPARING) { 1427 return UNKNOWN_ERROR; // async prepare already pending 1428 } 1429 1430 mIsAsyncPrepare = true; 1431 return prepareAsync_l(); 1432 } 1433 1434 status_t AwesomePlayer::prepareAsync_l() { 1435 if (mFlags & PREPARING) { 1436 return UNKNOWN_ERROR; // async prepare already pending 1437 } 1438 1439 if (!mQueueStarted) { 1440 mQueue.start(); 1441 mQueueStarted = true; 1442 } 1443 1444 mFlags |= PREPARING; 1445 mAsyncPrepareEvent = new AwesomeEvent( 1446 this, &AwesomePlayer::onPrepareAsyncEvent); 1447 1448 mQueue.postEvent(mAsyncPrepareEvent); 1449 1450 return OK; 1451 } 1452 1453 status_t AwesomePlayer::finishSetDataSource_l() { 1454 sp<DataSource> dataSource; 1455 1456 if (!strncasecmp("http://", mUri.string(), 7)) { 1457 mConnectingDataSource = new NuHTTPDataSource; 1458 1459 mLock.unlock(); 1460 status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); 1461 mLock.lock(); 1462 1463 if (err != OK) { 1464 mConnectingDataSource.clear(); 1465 1466 LOGI("mConnectingDataSource->connect() returned %d", err); 1467 return err; 1468 } 1469 1470 #if 0 1471 mCachedSource = new NuCachedSource2( 1472 new ThrottledSource( 1473 mConnectingDataSource, 50 * 1024 /* bytes/sec */)); 1474 #else 1475 mCachedSource = new NuCachedSource2(mConnectingDataSource); 1476 #endif 1477 mConnectingDataSource.clear(); 1478 1479 dataSource = mCachedSource; 1480 1481 // We're going to prefill the cache before trying to instantiate 1482 // the extractor below, as the latter is an operation that otherwise 1483 // could block on the datasource for a significant amount of time. 1484 // During that time we'd be unable to abort the preparation phase 1485 // without this prefill. 1486 1487 mLock.unlock(); 1488 1489 for (;;) { 1490 bool eos; 1491 size_t cachedDataRemaining = 1492 mCachedSource->approxDataRemaining(&eos); 1493 1494 if (eos || cachedDataRemaining >= kHighWaterMarkBytes 1495 || (mFlags & PREPARE_CANCELLED)) { 1496 break; 1497 } 1498 1499 usleep(200000); 1500 } 1501 1502 mLock.lock(); 1503 1504 if (mFlags & PREPARE_CANCELLED) { 1505 LOGI("Prepare cancelled while waiting for initial cache fill."); 1506 return UNKNOWN_ERROR; 1507 } 1508 } else if (!strncasecmp(mUri.string(), "httplive://", 11)) { 1509 String8 uri("http://"); 1510 uri.append(mUri.string() + 11); 1511 1512 sp<LiveSource> liveSource = new LiveSource(uri.string()); 1513 1514 mCachedSource = new NuCachedSource2(liveSource); 1515 dataSource = mCachedSource; 1516 1517 sp<MediaExtractor> extractor = 1518 MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); 1519 1520 static_cast<MPEG2TSExtractor *>(extractor.get()) 1521 ->setLiveSource(liveSource); 1522 1523 return setDataSource_l(extractor); 1524 } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { 1525 if (mLooper == NULL) { 1526 mLooper = new ALooper; 1527 mLooper->setName("gtalk rtp"); 1528 mLooper->start( 1529 false /* runOnCallingThread */, 1530 false /* canCallJava */, 1531 PRIORITY_HIGHEST); 1532 } 1533 1534 const char *startOfCodecString = &mUri.string()[13]; 1535 const char *startOfSlash1 = strchr(startOfCodecString, '/'); 1536 if (startOfSlash1 == NULL) { 1537 return BAD_VALUE; 1538 } 1539 const char *startOfWidthString = &startOfSlash1[1]; 1540 const char *startOfSlash2 = strchr(startOfWidthString, '/'); 1541 if (startOfSlash2 == NULL) { 1542 return BAD_VALUE; 1543 } 1544 const char *startOfHeightString = &startOfSlash2[1]; 1545 1546 String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString); 1547 String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString); 1548 String8 heightString(startOfHeightString); 1549 1550 #if 0 1551 mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434); 1552 mLooper->registerHandler(mRTPPusher); 1553 1554 mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435); 1555 mLooper->registerHandler(mRTCPPusher); 1556 #endif 1557 1558 mRTPSession = new ARTPSession; 1559 mLooper->registerHandler(mRTPSession); 1560 1561 #if 0 1562 // My AMR SDP 1563 static const char *raw = 1564 "v=0\r\n" 1565 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1566 "s=QuickTime\r\n" 1567 "t=0 0\r\n" 1568 "a=range:npt=0-315\r\n" 1569 "a=isma-compliance:2,2.0,2\r\n" 1570 "m=audio 5434 RTP/AVP 97\r\n" 1571 "c=IN IP4 127.0.0.1\r\n" 1572 "b=AS:30\r\n" 1573 "a=rtpmap:97 AMR/8000/1\r\n" 1574 "a=fmtp:97 octet-align\r\n"; 1575 #elif 1 1576 String8 sdp; 1577 sdp.appendFormat( 1578 "v=0\r\n" 1579 "o=- 64 233572944 IN IP4 127.0.0.0\r\n" 1580 "s=QuickTime\r\n" 1581 "t=0 0\r\n" 1582 "a=range:npt=0-315\r\n" 1583 "a=isma-compliance:2,2.0,2\r\n" 1584 "m=video 5434 RTP/AVP 97\r\n" 1585 "c=IN IP4 127.0.0.1\r\n" 1586 "b=AS:30\r\n" 1587 "a=rtpmap:97 %s/90000\r\n" 1588 "a=cliprect:0,0,%s,%s\r\n" 1589 "a=framesize:97 %s-%s\r\n", 1590 1591 codecString.string(), 1592 heightString.string(), widthString.string(), 1593 widthString.string(), heightString.string() 1594 ); 1595 const char *raw = sdp.string(); 1596 1597 #endif 1598 1599 sp<ASessionDescription> desc = new ASessionDescription; 1600 CHECK(desc->setTo(raw, strlen(raw))); 1601 1602 CHECK_EQ(mRTPSession->setup(desc), (status_t)OK); 1603 1604 if (mRTPPusher != NULL) { 1605 mRTPPusher->start(); 1606 } 1607 1608 if (mRTCPPusher != NULL) { 1609 mRTCPPusher->start(); 1610 } 1611 1612 CHECK_EQ(mRTPSession->countTracks(), 1u); 1613 sp<MediaSource> source = mRTPSession->trackAt(0); 1614 1615 #if 0 1616 bool eos; 1617 while (((APacketSource *)source.get()) 1618 ->getQueuedDuration(&eos) < 5000000ll && !eos) { 1619 usleep(100000ll); 1620 } 1621 #endif 1622 1623 const char *mime; 1624 CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime)); 1625 1626 if (!strncasecmp("video/", mime, 6)) { 1627 setVideoSource(source); 1628 } else { 1629 CHECK(!strncasecmp("audio/", mime, 6)); 1630 setAudioSource(source); 1631 } 1632 1633 mExtractorFlags = MediaExtractor::CAN_PAUSE; 1634 1635 return OK; 1636 } else if (!strncasecmp("rtsp://", mUri.string(), 7)) { 1637 if (mLooper == NULL) { 1638 mLooper = new ALooper; 1639 mLooper->setName("rtsp"); 1640 mLooper->start(); 1641 } 1642 mRTSPController = new ARTSPController(mLooper); 1643 status_t err = mRTSPController->connect(mUri.string()); 1644 1645 LOGI("ARTSPController::connect returned %d", err); 1646 1647 if (err != OK) { 1648 mRTSPController.clear(); 1649 return err; 1650 } 1651 1652 sp<MediaExtractor> extractor = mRTSPController.get(); 1653 return setDataSource_l(extractor); 1654 } else { 1655 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); 1656 } 1657 1658 if (dataSource == NULL) { 1659 return UNKNOWN_ERROR; 1660 } 1661 1662 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); 1663 1664 if (extractor == NULL) { 1665 return UNKNOWN_ERROR; 1666 } 1667 1668 return setDataSource_l(extractor); 1669 } 1670 1671 void AwesomePlayer::abortPrepare(status_t err) { 1672 CHECK(err != OK); 1673 1674 if (mIsAsyncPrepare) { 1675 notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 1676 } 1677 1678 mPrepareResult = err; 1679 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1680 mAsyncPrepareEvent = NULL; 1681 mPreparedCondition.broadcast(); 1682 } 1683 1684 // static 1685 bool AwesomePlayer::ContinuePreparation(void *cookie) { 1686 AwesomePlayer *me = static_cast<AwesomePlayer *>(cookie); 1687 1688 return (me->mFlags & PREPARE_CANCELLED) == 0; 1689 } 1690 1691 void AwesomePlayer::onPrepareAsyncEvent() { 1692 Mutex::Autolock autoLock(mLock); 1693 1694 if (mFlags & PREPARE_CANCELLED) { 1695 LOGI("prepare was cancelled before doing anything"); 1696 abortPrepare(UNKNOWN_ERROR); 1697 return; 1698 } 1699 1700 if (mUri.size() > 0) { 1701 status_t err = finishSetDataSource_l(); 1702 1703 if (err != OK) { 1704 abortPrepare(err); 1705 return; 1706 } 1707 } 1708 1709 if (mVideoTrack != NULL && mVideoSource == NULL) { 1710 status_t err = initVideoDecoder(); 1711 1712 if (err != OK) { 1713 abortPrepare(err); 1714 return; 1715 } 1716 } 1717 1718 if (mAudioTrack != NULL && mAudioSource == NULL) { 1719 status_t err = initAudioDecoder(); 1720 1721 if (err != OK) { 1722 abortPrepare(err); 1723 return; 1724 } 1725 } 1726 1727 if (mCachedSource != NULL || mRTSPController != NULL) { 1728 postBufferingEvent_l(); 1729 } else { 1730 finishAsyncPrepare_l(); 1731 } 1732 } 1733 1734 void AwesomePlayer::finishAsyncPrepare_l() { 1735 if (mIsAsyncPrepare) { 1736 if (mVideoWidth < 0 || mVideoHeight < 0) { 1737 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); 1738 } else { 1739 int32_t rotationDegrees; 1740 if (!mVideoTrack->getFormat()->findInt32( 1741 kKeyRotation, &rotationDegrees)) { 1742 rotationDegrees = 0; 1743 } 1744 1745 #if 1 1746 if (rotationDegrees == 90 || rotationDegrees == 270) { 1747 notifyListener_l( 1748 MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth); 1749 } else 1750 #endif 1751 { 1752 notifyListener_l( 1753 MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight); 1754 } 1755 } 1756 1757 notifyListener_l(MEDIA_PREPARED); 1758 } 1759 1760 mPrepareResult = OK; 1761 mFlags &= ~(PREPARING|PREPARE_CANCELLED); 1762 mFlags |= PREPARED; 1763 mAsyncPrepareEvent = NULL; 1764 mPreparedCondition.broadcast(); 1765 } 1766 1767 status_t AwesomePlayer::suspend() { 1768 LOGV("suspend"); 1769 Mutex::Autolock autoLock(mLock); 1770 1771 if (mSuspensionState != NULL) { 1772 if (mLastVideoBuffer == NULL) { 1773 //go into here if video is suspended again 1774 //after resuming without being played between 1775 //them 1776 SuspensionState *state = mSuspensionState; 1777 mSuspensionState = NULL; 1778 reset_l(); 1779 mSuspensionState = state; 1780 return OK; 1781 } 1782 1783 delete mSuspensionState; 1784 mSuspensionState = NULL; 1785 } 1786 1787 if (mFlags & PREPARING) { 1788 mFlags |= PREPARE_CANCELLED; 1789 if (mConnectingDataSource != NULL) { 1790 LOGI("interrupting the connection process"); 1791 mConnectingDataSource->disconnect(); 1792 } 1793 } 1794 1795 while (mFlags & PREPARING) { 1796 mPreparedCondition.wait(mLock); 1797 } 1798 1799 SuspensionState *state = new SuspensionState; 1800 state->mUri = mUri; 1801 state->mUriHeaders = mUriHeaders; 1802 state->mFileSource = mFileSource; 1803 1804 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); 1805 getPosition(&state->mPositionUs); 1806 1807 if (mLastVideoBuffer) { 1808 size_t size = mLastVideoBuffer->range_length(); 1809 1810 if (size) { 1811 int32_t unreadable; 1812 if (!mLastVideoBuffer->meta_data()->findInt32( 1813 kKeyIsUnreadable, &unreadable) 1814 || unreadable == 0) { 1815 state->mLastVideoFrameSize = size; 1816 state->mLastVideoFrame = malloc(size); 1817 memcpy(state->mLastVideoFrame, 1818 (const uint8_t *)mLastVideoBuffer->data() 1819 + mLastVideoBuffer->range_offset(), 1820 size); 1821 1822 state->mVideoWidth = mVideoWidth; 1823 state->mVideoHeight = mVideoHeight; 1824 1825 sp<MetaData> meta = mVideoSource->getFormat(); 1826 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); 1827 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); 1828 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); 1829 } else { 1830 LOGV("Unable to save last video frame, we have no access to " 1831 "the decoded video data."); 1832 } 1833 } 1834 } 1835 1836 reset_l(); 1837 1838 mSuspensionState = state; 1839 1840 return OK; 1841 } 1842 1843 status_t AwesomePlayer::resume() { 1844 LOGV("resume"); 1845 Mutex::Autolock autoLock(mLock); 1846 1847 if (mSuspensionState == NULL) { 1848 return INVALID_OPERATION; 1849 } 1850 1851 SuspensionState *state = mSuspensionState; 1852 mSuspensionState = NULL; 1853 1854 status_t err; 1855 if (state->mFileSource != NULL) { 1856 err = setDataSource_l(state->mFileSource); 1857 1858 if (err == OK) { 1859 mFileSource = state->mFileSource; 1860 } 1861 } else { 1862 err = setDataSource_l(state->mUri, &state->mUriHeaders); 1863 } 1864 1865 if (err != OK) { 1866 delete state; 1867 state = NULL; 1868 1869 return err; 1870 } 1871 1872 seekTo_l(state->mPositionUs); 1873 1874 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); 1875 1876 if (state->mLastVideoFrame && mISurface != NULL) { 1877 mVideoRenderer = 1878 new AwesomeLocalRenderer( 1879 true, // previewOnly 1880 "", 1881 (OMX_COLOR_FORMATTYPE)state->mColorFormat, 1882 mISurface, 1883 state->mVideoWidth, 1884 state->mVideoHeight, 1885 state->mDecodedWidth, 1886 state->mDecodedHeight, 1887 0); 1888 1889 mVideoRendererIsPreview = true; 1890 1891 ((AwesomeLocalRenderer *)mVideoRenderer.get())->render( 1892 state->mLastVideoFrame, state->mLastVideoFrameSize); 1893 } 1894 1895 if (state->mFlags & PLAYING) { 1896 play_l(); 1897 } 1898 1899 mSuspensionState = state; 1900 state = NULL; 1901 1902 return OK; 1903 } 1904 1905 uint32_t AwesomePlayer::flags() const { 1906 return mExtractorFlags; 1907 } 1908 1909 void AwesomePlayer::postAudioEOS() { 1910 postCheckAudioStatusEvent_l(); 1911 } 1912 1913 void AwesomePlayer::postAudioSeekComplete() { 1914 postCheckAudioStatusEvent_l(); 1915 } 1916 1917 } // namespace android 1918 1919