1 /* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "ACodec" 19 20 #ifdef __LP64__ 21 #define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22 #endif 23 24 #include <inttypes.h> 25 #include <utils/Trace.h> 26 27 #include <gui/Surface.h> 28 29 #include <media/stagefright/ACodec.h> 30 31 #include <binder/MemoryDealer.h> 32 33 #include <media/stagefright/foundation/hexdump.h> 34 #include <media/stagefright/foundation/ABuffer.h> 35 #include <media/stagefright/foundation/ADebug.h> 36 #include <media/stagefright/foundation/AMessage.h> 37 #include <media/stagefright/foundation/AUtils.h> 38 39 #include <media/stagefright/BufferProducerWrapper.h> 40 #include <media/stagefright/MediaCodec.h> 41 #include <media/stagefright/MediaCodecList.h> 42 #include <media/stagefright/MediaDefs.h> 43 #include <media/stagefright/OMXClient.h> 44 #include <media/stagefright/PersistentSurface.h> 45 #include <media/stagefright/SurfaceUtils.h> 46 #include <media/hardware/HardwareAPI.h> 47 48 #include <OMX_AudioExt.h> 49 #include <OMX_VideoExt.h> 50 #include <OMX_Component.h> 51 #include <OMX_IndexExt.h> 52 #include <OMX_AsString.h> 53 54 #include "include/avc_utils.h" 55 #include "include/DataConverter.h" 56 #include "omx/OMXUtils.h" 57 58 namespace android { 59 60 enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62 }; 63 64 // OMX errors are directly mapped into status_t range if 65 // there is no corresponding MediaError status code. 66 // Use the statusFromOMXError(int32_t omxError) function. 67 // 68 // Currently this is a direct map. 69 // See frameworks/native/include/media/openmax/OMX_Core.h 70 // 71 // Vendor OMX errors from 0x90000000 - 0x9000FFFF 72 // Extension OMX errors from 0x8F000000 - 0x90000000 73 // Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74 // 75 76 // returns true if err is a recognized OMX error code. 77 // as OMX error is OMX_S32, this is an int32_t type 78 static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80 } 81 82 // converts an OMX error to a status_t 83 static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91 } 92 93 // checks and converts status_t to a non-side-effect status_t 94 static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104 } 105 106 struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112 private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116 }; 117 118 static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123 } 124 125 struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207 protected: 208 virtual ~CodecObserver() {} 209 210 private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214 }; 215 216 //////////////////////////////////////////////////////////////////////////////// 217 218 struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221 protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241 private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265 }; 266 267 //////////////////////////////////////////////////////////////////////////////// 268 269 struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278 protected: 279 virtual ~DeathNotifier() {} 280 281 private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285 }; 286 287 struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290 protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294 private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301 }; 302 303 //////////////////////////////////////////////////////////////////////////////// 304 305 struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308 protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312 private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324 }; 325 326 //////////////////////////////////////////////////////////////////////////////// 327 328 struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331 protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336 private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340 }; 341 342 //////////////////////////////////////////////////////////////////////////////// 343 344 struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347 protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352 private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354 }; 355 356 //////////////////////////////////////////////////////////////////////////////// 357 358 struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372 protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380 private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384 }; 385 386 //////////////////////////////////////////////////////////////////////////////// 387 388 struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391 protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399 private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401 }; 402 403 //////////////////////////////////////////////////////////////////////////////// 404 405 struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408 protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417 private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423 }; 424 425 //////////////////////////////////////////////////////////////////////////////// 426 427 struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430 protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436 private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438 }; 439 440 //////////////////////////////////////////////////////////////////////////////// 441 442 struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445 protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454 private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460 }; 461 462 //////////////////////////////////////////////////////////////////////////////// 463 464 void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471 } 472 473 void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480 } 481 482 void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486 } 487 488 void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492 } 493 494 //////////////////////////////////////////////////////////////////////////////// 495 496 ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 545 546 changeState(mUninitializedState); 547 } 548 549 ACodec::~ACodec() { 550 } 551 552 void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 553 mNotify = msg; 554 } 555 556 void ACodec::initiateSetup(const sp<AMessage> &msg) { 557 msg->setWhat(kWhatSetup); 558 msg->setTarget(this); 559 msg->post(); 560 } 561 562 void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 563 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 564 msg->setMessage("params", params); 565 msg->post(); 566 } 567 568 void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 569 msg->setWhat(kWhatAllocateComponent); 570 msg->setTarget(this); 571 msg->post(); 572 } 573 574 void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 575 msg->setWhat(kWhatConfigureComponent); 576 msg->setTarget(this); 577 msg->post(); 578 } 579 580 status_t ACodec::setSurface(const sp<Surface> &surface) { 581 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 582 msg->setObject("surface", surface); 583 584 sp<AMessage> response; 585 status_t err = msg->postAndAwaitResponse(&response); 586 587 if (err == OK) { 588 (void)response->findInt32("err", &err); 589 } 590 return err; 591 } 592 593 void ACodec::initiateCreateInputSurface() { 594 (new AMessage(kWhatCreateInputSurface, this))->post(); 595 } 596 597 void ACodec::initiateSetInputSurface( 598 const sp<PersistentSurface> &surface) { 599 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 600 msg->setObject("input-surface", surface); 601 msg->post(); 602 } 603 604 void ACodec::signalEndOfInputStream() { 605 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 606 } 607 608 void ACodec::initiateStart() { 609 (new AMessage(kWhatStart, this))->post(); 610 } 611 612 void ACodec::signalFlush() { 613 ALOGV("[%s] signalFlush", mComponentName.c_str()); 614 (new AMessage(kWhatFlush, this))->post(); 615 } 616 617 void ACodec::signalResume() { 618 (new AMessage(kWhatResume, this))->post(); 619 } 620 621 void ACodec::initiateShutdown(bool keepComponentAllocated) { 622 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 623 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 624 msg->post(); 625 if (!keepComponentAllocated) { 626 // ensure shutdown completes in 3 seconds 627 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 628 } 629 } 630 631 void ACodec::signalRequestIDRFrame() { 632 (new AMessage(kWhatRequestIDRFrame, this))->post(); 633 } 634 635 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 636 // Some codecs may return input buffers before having them processed. 637 // This causes a halt if we already signaled an EOS on the input 638 // port. For now keep submitting an output buffer if there was an 639 // EOS on the input port, but not yet on the output port. 640 void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 641 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 642 mMetadataBuffersToSubmit > 0) { 643 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 644 } 645 } 646 647 status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 648 // allow keeping unset surface 649 if (surface == NULL) { 650 if (mNativeWindow != NULL) { 651 ALOGW("cannot unset a surface"); 652 return INVALID_OPERATION; 653 } 654 return OK; 655 } 656 657 // cannot switch from bytebuffers to surface 658 if (mNativeWindow == NULL) { 659 ALOGW("component was not configured with a surface"); 660 return INVALID_OPERATION; 661 } 662 663 ANativeWindow *nativeWindow = surface.get(); 664 // if we have not yet started the codec, we can simply set the native window 665 if (mBuffers[kPortIndexInput].size() == 0) { 666 mNativeWindow = surface; 667 return OK; 668 } 669 670 // we do not support changing a tunneled surface after start 671 if (mTunneled) { 672 ALOGW("cannot change tunneled surface"); 673 return INVALID_OPERATION; 674 } 675 676 int usageBits = 0; 677 // no need to reconnect as we will not dequeue all buffers 678 status_t err = setupNativeWindowSizeFormatAndUsage( 679 nativeWindow, &usageBits, 680 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 681 if (err != OK) { 682 return err; 683 } 684 685 int ignoredFlags = kVideoGrallocUsage; 686 // New output surface is not allowed to add new usage flag except ignored ones. 687 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 688 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 689 return BAD_VALUE; 690 } 691 692 // get min undequeued count. We cannot switch to a surface that has a higher 693 // undequeued count than we allocated. 694 int minUndequeuedBuffers = 0; 695 err = nativeWindow->query( 696 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 697 &minUndequeuedBuffers); 698 if (err != 0) { 699 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 700 strerror(-err), -err); 701 return err; 702 } 703 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 704 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 705 minUndequeuedBuffers, mNumUndequeuedBuffers); 706 return BAD_VALUE; 707 } 708 709 // we cannot change the number of output buffers while OMX is running 710 // set up surface to the same count 711 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 712 ALOGV("setting up surface for %zu buffers", buffers.size()); 713 714 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 715 if (err != 0) { 716 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 717 -err); 718 return err; 719 } 720 721 // need to enable allocation when attaching 722 surface->getIGraphicBufferProducer()->allowAllocation(true); 723 724 // for meta data mode, we move dequeud buffers to the new surface. 725 // for non-meta mode, we must move all registered buffers 726 for (size_t i = 0; i < buffers.size(); ++i) { 727 const BufferInfo &info = buffers[i]; 728 // skip undequeued buffers for meta data mode 729 if (storingMetadataInDecodedBuffers() 730 && !mLegacyAdaptiveExperiment 731 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 732 ALOGV("skipping buffer"); 733 continue; 734 } 735 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 736 737 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 738 if (err != OK) { 739 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 740 info.mGraphicBuffer->getNativeBuffer(), 741 strerror(-err), -err); 742 return err; 743 } 744 } 745 746 // cancel undequeued buffers to new surface 747 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 748 for (size_t i = 0; i < buffers.size(); ++i) { 749 BufferInfo &info = buffers.editItemAt(i); 750 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 751 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 752 err = nativeWindow->cancelBuffer( 753 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 754 info.mFenceFd = -1; 755 if (err != OK) { 756 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 757 info.mGraphicBuffer->getNativeBuffer(), 758 strerror(-err), -err); 759 return err; 760 } 761 } 762 } 763 // disallow further allocation 764 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 765 } 766 767 // push blank buffers to previous window if requested 768 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 769 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 770 } 771 772 mNativeWindow = nativeWindow; 773 mNativeWindowUsageBits = usageBits; 774 return OK; 775 } 776 777 status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 778 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 779 780 CHECK(mDealer[portIndex] == NULL); 781 CHECK(mBuffers[portIndex].isEmpty()); 782 783 status_t err; 784 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 785 if (storingMetadataInDecodedBuffers()) { 786 err = allocateOutputMetadataBuffers(); 787 } else { 788 err = allocateOutputBuffersFromNativeWindow(); 789 } 790 } else { 791 OMX_PARAM_PORTDEFINITIONTYPE def; 792 InitOMXParams(&def); 793 def.nPortIndex = portIndex; 794 795 err = mOMX->getParameter( 796 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 797 798 if (err == OK) { 799 MetadataBufferType type = 800 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 801 size_t bufSize = def.nBufferSize; 802 if (type == kMetadataBufferTypeANWBuffer) { 803 bufSize = sizeof(VideoNativeMetadata); 804 } else if (type == kMetadataBufferTypeNativeHandleSource) { 805 bufSize = sizeof(VideoNativeHandleMetadata); 806 } 807 808 // If using gralloc or native source input metadata buffers, allocate largest 809 // metadata size as we prefer to generate native source metadata, but component 810 // may require gralloc source. For camera source, allocate at least enough 811 // size for native metadata buffers. 812 size_t allottedSize = bufSize; 813 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 814 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 815 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 816 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 817 } 818 819 size_t conversionBufferSize = 0; 820 821 sp<DataConverter> converter = mConverter[portIndex]; 822 if (converter != NULL) { 823 // here we assume sane conversions of max 4:1, so result fits in int32 824 if (portIndex == kPortIndexInput) { 825 conversionBufferSize = converter->sourceSize(bufSize); 826 } else { 827 conversionBufferSize = converter->targetSize(bufSize); 828 } 829 } 830 831 size_t alignment = MemoryDealer::getAllocationAlignment(); 832 833 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 834 mComponentName.c_str(), 835 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 836 portIndex == kPortIndexInput ? "input" : "output"); 837 838 // verify buffer sizes to avoid overflow in align() 839 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 // don't modify bufSize as OMX may not expect it to increase after negotiation 845 size_t alignedSize = align(bufSize, alignment); 846 size_t alignedConvSize = align(conversionBufferSize, alignment); 847 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 848 ALOGE("b/22885421"); 849 return NO_MEMORY; 850 } 851 852 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 853 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 854 855 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 856 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 857 if (mem == NULL || mem->pointer() == NULL) { 858 return NO_MEMORY; 859 } 860 861 BufferInfo info; 862 info.mStatus = BufferInfo::OWNED_BY_US; 863 info.mFenceFd = -1; 864 info.mRenderInfo = NULL; 865 info.mNativeHandle = NULL; 866 867 uint32_t requiresAllocateBufferBit = 868 (portIndex == kPortIndexInput) 869 ? kRequiresAllocateBufferOnInputPorts 870 : kRequiresAllocateBufferOnOutputPorts; 871 872 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 873 mem.clear(); 874 875 void *ptr = NULL; 876 sp<NativeHandle> native_handle; 877 err = mOMX->allocateSecureBuffer( 878 mNode, portIndex, bufSize, &info.mBufferID, 879 &ptr, &native_handle); 880 881 // TRICKY: this representation is unorthodox, but ACodec requires 882 // an ABuffer with a proper size to validate range offsets and lengths. 883 // Since mData is never referenced for secure input, it is used to store 884 // either the pointer to the secure buffer, or the opaque handle as on 885 // some devices ptr is actually an opaque handle, not a pointer. 886 887 // TRICKY2: use native handle as the base of the ABuffer if received one, 888 // because Widevine source only receives these base addresses. 889 const native_handle_t *native_handle_ptr = 890 native_handle == NULL ? NULL : native_handle->handle(); 891 info.mData = new ABuffer( 892 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 893 info.mNativeHandle = native_handle; 894 info.mCodecData = info.mData; 895 } else if (mQuirks & requiresAllocateBufferBit) { 896 err = mOMX->allocateBufferWithBackup( 897 mNode, portIndex, mem, &info.mBufferID, allottedSize); 898 } else { 899 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 900 } 901 902 if (mem != NULL) { 903 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 904 info.mCodecRef = mem; 905 906 if (type == kMetadataBufferTypeANWBuffer) { 907 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 908 } 909 910 // if we require conversion, allocate conversion buffer for client use; 911 // otherwise, reuse codec buffer 912 if (mConverter[portIndex] != NULL) { 913 CHECK_GT(conversionBufferSize, (size_t)0); 914 mem = mDealer[portIndex]->allocate(conversionBufferSize); 915 if (mem == NULL|| mem->pointer() == NULL) { 916 return NO_MEMORY; 917 } 918 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 919 info.mMemRef = mem; 920 } else { 921 info.mData = info.mCodecData; 922 info.mMemRef = info.mCodecRef; 923 } 924 } 925 926 mBuffers[portIndex].push(info); 927 } 928 } 929 } 930 931 if (err != OK) { 932 return err; 933 } 934 935 sp<AMessage> notify = mNotify->dup(); 936 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 937 938 notify->setInt32("portIndex", portIndex); 939 940 sp<PortDescription> desc = new PortDescription; 941 942 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 943 const BufferInfo &info = mBuffers[portIndex][i]; 944 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 945 } 946 947 notify->setObject("portDesc", desc); 948 notify->post(); 949 950 return OK; 951 } 952 953 status_t ACodec::setupNativeWindowSizeFormatAndUsage( 954 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 955 bool reconnect) { 956 OMX_PARAM_PORTDEFINITIONTYPE def; 957 InitOMXParams(&def); 958 def.nPortIndex = kPortIndexOutput; 959 960 status_t err = mOMX->getParameter( 961 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 962 963 if (err != OK) { 964 return err; 965 } 966 967 OMX_U32 usage = 0; 968 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 969 if (err != 0) { 970 ALOGW("querying usage flags from OMX IL component failed: %d", err); 971 // XXX: Currently this error is logged, but not fatal. 972 usage = 0; 973 } 974 int omxUsage = usage; 975 976 if (mFlags & kFlagIsGrallocUsageProtected) { 977 usage |= GRALLOC_USAGE_PROTECTED; 978 } 979 980 usage |= kVideoGrallocUsage; 981 *finalUsage = usage; 982 983 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 984 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 985 986 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 987 return setNativeWindowSizeFormatAndUsage( 988 nativeWindow, 989 def.format.video.nFrameWidth, 990 def.format.video.nFrameHeight, 991 def.format.video.eColorFormat, 992 mRotationDegrees, 993 usage, 994 reconnect); 995 } 996 997 status_t ACodec::configureOutputBuffersFromNativeWindow( 998 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 999 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1000 1001 OMX_PARAM_PORTDEFINITIONTYPE def; 1002 InitOMXParams(&def); 1003 def.nPortIndex = kPortIndexOutput; 1004 1005 status_t err = mOMX->getParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 if (err == OK) { 1009 err = setupNativeWindowSizeFormatAndUsage( 1010 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1011 } 1012 if (err != OK) { 1013 mNativeWindowUsageBits = 0; 1014 return err; 1015 } 1016 1017 // Exits here for tunneled video playback codecs -- i.e. skips native window 1018 // buffer allocation step as this is managed by the tunneled OMX omponent 1019 // itself and explicitly sets def.nBufferCountActual to 0. 1020 if (mTunneled) { 1021 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1022 def.nBufferCountActual = 0; 1023 err = mOMX->setParameter( 1024 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1025 1026 *minUndequeuedBuffers = 0; 1027 *bufferCount = 0; 1028 *bufferSize = 0; 1029 return err; 1030 } 1031 1032 *minUndequeuedBuffers = 0; 1033 err = mNativeWindow->query( 1034 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1035 (int *)minUndequeuedBuffers); 1036 1037 if (err != 0) { 1038 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1039 strerror(-err), -err); 1040 return err; 1041 } 1042 1043 // FIXME: assume that surface is controlled by app (native window 1044 // returns the number for the case when surface is not controlled by app) 1045 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1046 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1047 1048 // Use conservative allocation while also trying to reduce starvation 1049 // 1050 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1051 // minimum needed for the consumer to be able to work 1052 // 2. try to allocate two (2) additional buffers to reduce starvation from 1053 // the consumer 1054 // plus an extra buffer to account for incorrect minUndequeuedBufs 1055 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1056 OMX_U32 newBufferCount = 1057 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1058 def.nBufferCountActual = newBufferCount; 1059 err = mOMX->setParameter( 1060 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1061 1062 if (err == OK) { 1063 *minUndequeuedBuffers += extraBuffers; 1064 break; 1065 } 1066 1067 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1068 mComponentName.c_str(), newBufferCount, err); 1069 /* exit condition */ 1070 if (extraBuffers == 0) { 1071 return err; 1072 } 1073 } 1074 1075 err = native_window_set_buffer_count( 1076 mNativeWindow.get(), def.nBufferCountActual); 1077 1078 if (err != 0) { 1079 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1080 -err); 1081 return err; 1082 } 1083 1084 *bufferCount = def.nBufferCountActual; 1085 *bufferSize = def.nBufferSize; 1086 return err; 1087 } 1088 1089 status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1090 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1091 status_t err = configureOutputBuffersFromNativeWindow( 1092 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1093 if (err != 0) 1094 return err; 1095 mNumUndequeuedBuffers = minUndequeuedBuffers; 1096 1097 if (!storingMetadataInDecodedBuffers()) { 1098 static_cast<Surface*>(mNativeWindow.get()) 1099 ->getIGraphicBufferProducer()->allowAllocation(true); 1100 } 1101 1102 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1103 "output port", 1104 mComponentName.c_str(), bufferCount, bufferSize); 1105 1106 // Dequeue buffers and send them to OMX 1107 for (OMX_U32 i = 0; i < bufferCount; i++) { 1108 ANativeWindowBuffer *buf; 1109 int fenceFd; 1110 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1111 if (err != 0) { 1112 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1113 break; 1114 } 1115 1116 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1117 BufferInfo info; 1118 info.mStatus = BufferInfo::OWNED_BY_US; 1119 info.mFenceFd = fenceFd; 1120 info.mIsReadFence = false; 1121 info.mRenderInfo = NULL; 1122 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1123 info.mCodecData = info.mData; 1124 info.mGraphicBuffer = graphicBuffer; 1125 mBuffers[kPortIndexOutput].push(info); 1126 1127 IOMX::buffer_id bufferId; 1128 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1129 &bufferId); 1130 if (err != 0) { 1131 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1132 "%d", i, err); 1133 break; 1134 } 1135 1136 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1137 1138 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1139 mComponentName.c_str(), 1140 bufferId, graphicBuffer.get()); 1141 } 1142 1143 OMX_U32 cancelStart; 1144 OMX_U32 cancelEnd; 1145 1146 if (err != 0) { 1147 // If an error occurred while dequeuing we need to cancel any buffers 1148 // that were dequeued. 1149 cancelStart = 0; 1150 cancelEnd = mBuffers[kPortIndexOutput].size(); 1151 } else { 1152 // Return the required minimum undequeued buffers to the native window. 1153 cancelStart = bufferCount - minUndequeuedBuffers; 1154 cancelEnd = bufferCount; 1155 } 1156 1157 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1158 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1159 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1160 status_t error = cancelBufferToNativeWindow(info); 1161 if (err == 0) { 1162 err = error; 1163 } 1164 } 1165 } 1166 1167 if (!storingMetadataInDecodedBuffers()) { 1168 static_cast<Surface*>(mNativeWindow.get()) 1169 ->getIGraphicBufferProducer()->allowAllocation(false); 1170 } 1171 1172 return err; 1173 } 1174 1175 status_t ACodec::allocateOutputMetadataBuffers() { 1176 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1177 status_t err = configureOutputBuffersFromNativeWindow( 1178 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1179 mLegacyAdaptiveExperiment /* preregister */); 1180 if (err != 0) 1181 return err; 1182 mNumUndequeuedBuffers = minUndequeuedBuffers; 1183 1184 ALOGV("[%s] Allocating %u meta buffers on output port", 1185 mComponentName.c_str(), bufferCount); 1186 1187 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1188 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1189 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1190 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1191 1192 // Dequeue buffers and send them to OMX 1193 for (OMX_U32 i = 0; i < bufferCount; i++) { 1194 BufferInfo info; 1195 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1196 info.mFenceFd = -1; 1197 info.mRenderInfo = NULL; 1198 info.mGraphicBuffer = NULL; 1199 info.mDequeuedAt = mDequeueCounter; 1200 1201 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1202 if (mem == NULL || mem->pointer() == NULL) { 1203 return NO_MEMORY; 1204 } 1205 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1206 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1207 } 1208 info.mData = new ABuffer(mem->pointer(), mem->size()); 1209 info.mMemRef = mem; 1210 info.mCodecData = info.mData; 1211 info.mCodecRef = mem; 1212 1213 // we use useBuffer for metadata regardless of quirks 1214 err = mOMX->useBuffer( 1215 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1216 mBuffers[kPortIndexOutput].push(info); 1217 1218 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1219 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1220 } 1221 1222 if (mLegacyAdaptiveExperiment) { 1223 // preallocate and preregister buffers 1224 static_cast<Surface *>(mNativeWindow.get()) 1225 ->getIGraphicBufferProducer()->allowAllocation(true); 1226 1227 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1228 "output port", 1229 mComponentName.c_str(), bufferCount, bufferSize); 1230 1231 // Dequeue buffers then cancel them all 1232 for (OMX_U32 i = 0; i < bufferCount; i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 1235 ANativeWindowBuffer *buf; 1236 int fenceFd; 1237 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1238 if (err != 0) { 1239 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1240 break; 1241 } 1242 1243 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1244 mOMX->updateGraphicBufferInMeta( 1245 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1246 info->mStatus = BufferInfo::OWNED_BY_US; 1247 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1248 info->mGraphicBuffer = graphicBuffer; 1249 } 1250 1251 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1252 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1253 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1254 status_t error = cancelBufferToNativeWindow(info); 1255 if (err == OK) { 1256 err = error; 1257 } 1258 } 1259 } 1260 1261 static_cast<Surface*>(mNativeWindow.get()) 1262 ->getIGraphicBufferProducer()->allowAllocation(false); 1263 } 1264 1265 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1266 return err; 1267 } 1268 1269 status_t ACodec::submitOutputMetadataBuffer() { 1270 CHECK(storingMetadataInDecodedBuffers()); 1271 if (mMetadataBuffersToSubmit == 0) 1272 return OK; 1273 1274 BufferInfo *info = dequeueBufferFromNativeWindow(); 1275 if (info == NULL) { 1276 return ERROR_IO; 1277 } 1278 1279 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1280 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1281 1282 --mMetadataBuffersToSubmit; 1283 info->checkWriteFence("submitOutputMetadataBuffer"); 1284 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1285 info->mFenceFd = -1; 1286 if (err == OK) { 1287 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1288 } 1289 1290 return err; 1291 } 1292 1293 status_t ACodec::waitForFence(int fd, const char *dbg ) { 1294 status_t res = OK; 1295 if (fd >= 0) { 1296 sp<Fence> fence = new Fence(fd); 1297 res = fence->wait(IOMX::kFenceTimeoutMs); 1298 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1299 } 1300 return res; 1301 } 1302 1303 // static 1304 const char *ACodec::_asString(BufferInfo::Status s) { 1305 switch (s) { 1306 case BufferInfo::OWNED_BY_US: return "OUR"; 1307 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1308 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1309 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1310 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1311 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1312 default: return "?"; 1313 } 1314 } 1315 1316 void ACodec::dumpBuffers(OMX_U32 portIndex) { 1317 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1318 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1319 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1320 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1321 const BufferInfo &info = mBuffers[portIndex][i]; 1322 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1323 i, info.mBufferID, info.mGraphicBuffer.get(), 1324 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1325 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1326 } 1327 } 1328 1329 status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1330 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1331 1332 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1333 mComponentName.c_str(), info->mBufferID); 1334 1335 info->checkWriteFence("cancelBufferToNativeWindow"); 1336 int err = mNativeWindow->cancelBuffer( 1337 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1338 info->mFenceFd = -1; 1339 1340 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1341 mComponentName.c_str(), info->mBufferID); 1342 // change ownership even if cancelBuffer fails 1343 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1344 1345 return err; 1346 } 1347 1348 void ACodec::updateRenderInfoForDequeuedBuffer( 1349 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1350 1351 info->mRenderInfo = 1352 mRenderTracker.updateInfoForDequeuedBuffer( 1353 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1354 1355 // check for any fences already signaled 1356 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1357 } 1358 1359 void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1360 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1361 mRenderTracker.dumpRenderQueue(); 1362 } 1363 } 1364 1365 void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1366 sp<AMessage> msg = mNotify->dup(); 1367 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1368 std::list<FrameRenderTracker::Info> done = 1369 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1370 1371 // unlink untracked frames 1372 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1373 it != done.cend(); ++it) { 1374 ssize_t index = it->getIndex(); 1375 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1376 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1377 } else if (index >= 0) { 1378 // THIS SHOULD NEVER HAPPEN 1379 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1380 } 1381 } 1382 1383 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1384 msg->post(); 1385 } 1386 } 1387 1388 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1389 ANativeWindowBuffer *buf; 1390 CHECK(mNativeWindow.get() != NULL); 1391 1392 if (mTunneled) { 1393 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1394 " video playback mode mode!"); 1395 return NULL; 1396 } 1397 1398 if (mFatalError) { 1399 ALOGW("not dequeuing from native window due to fatal error"); 1400 return NULL; 1401 } 1402 1403 int fenceFd = -1; 1404 do { 1405 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1406 if (err != 0) { 1407 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1408 return NULL; 1409 } 1410 1411 bool stale = false; 1412 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1413 i--; 1414 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1415 1416 if (info->mGraphicBuffer != NULL && 1417 info->mGraphicBuffer->handle == buf->handle) { 1418 // Since consumers can attach buffers to BufferQueues, it is possible 1419 // that a known yet stale buffer can return from a surface that we 1420 // once used. We can simply ignore this as we have already dequeued 1421 // this buffer properly. NOTE: this does not eliminate all cases, 1422 // e.g. it is possible that we have queued the valid buffer to the 1423 // NW, and a stale copy of the same buffer gets dequeued - which will 1424 // be treated as the valid buffer by ACodec. 1425 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1426 ALOGI("dequeued stale buffer %p. discarding", buf); 1427 stale = true; 1428 break; 1429 } 1430 1431 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1432 info->mStatus = BufferInfo::OWNED_BY_US; 1433 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1434 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1435 return info; 1436 } 1437 } 1438 1439 // It is also possible to receive a previously unregistered buffer 1440 // in non-meta mode. These should be treated as stale buffers. The 1441 // same is possible in meta mode, in which case, it will be treated 1442 // as a normal buffer, which is not desirable. 1443 // TODO: fix this. 1444 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1445 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1446 stale = true; 1447 } 1448 if (stale) { 1449 // TODO: detach stale buffer, but there is no API yet to do it. 1450 buf = NULL; 1451 } 1452 } while (buf == NULL); 1453 1454 // get oldest undequeued buffer 1455 BufferInfo *oldest = NULL; 1456 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1457 i--; 1458 BufferInfo *info = 1459 &mBuffers[kPortIndexOutput].editItemAt(i); 1460 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1461 (oldest == NULL || 1462 // avoid potential issues from counter rolling over 1463 mDequeueCounter - info->mDequeuedAt > 1464 mDequeueCounter - oldest->mDequeuedAt)) { 1465 oldest = info; 1466 } 1467 } 1468 1469 // it is impossible dequeue a buffer when there are no buffers with ANW 1470 CHECK(oldest != NULL); 1471 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1472 // while loop above does not complete 1473 CHECK(storingMetadataInDecodedBuffers()); 1474 1475 // discard buffer in LRU info and replace with new buffer 1476 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1477 oldest->mStatus = BufferInfo::OWNED_BY_US; 1478 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1479 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1480 oldest->mRenderInfo = NULL; 1481 1482 mOMX->updateGraphicBufferInMeta( 1483 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1484 oldest->mBufferID); 1485 1486 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1487 VideoGrallocMetadata *grallocMeta = 1488 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1489 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1490 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1491 mDequeueCounter - oldest->mDequeuedAt, 1492 (void *)(uintptr_t)grallocMeta->pHandle, 1493 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1494 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1495 VideoNativeMetadata *nativeMeta = 1496 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1497 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1498 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1499 mDequeueCounter - oldest->mDequeuedAt, 1500 (void *)(uintptr_t)nativeMeta->pBuffer, 1501 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1502 } 1503 1504 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1505 return oldest; 1506 } 1507 1508 status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1509 status_t err = OK; 1510 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1511 i--; 1512 status_t err2 = freeBuffer(portIndex, i); 1513 if (err == OK) { 1514 err = err2; 1515 } 1516 } 1517 1518 // clear mDealer even on an error 1519 mDealer[portIndex].clear(); 1520 return err; 1521 } 1522 1523 status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1524 status_t err = OK; 1525 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1526 i--; 1527 BufferInfo *info = 1528 &mBuffers[kPortIndexOutput].editItemAt(i); 1529 1530 // At this time some buffers may still be with the component 1531 // or being drained. 1532 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1533 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1534 status_t err2 = freeBuffer(kPortIndexOutput, i); 1535 if (err == OK) { 1536 err = err2; 1537 } 1538 } 1539 } 1540 1541 return err; 1542 } 1543 1544 status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1545 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1546 status_t err = OK; 1547 1548 // there should not be any fences in the metadata 1549 MetadataBufferType type = 1550 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1551 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1552 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1553 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1554 if (fenceFd >= 0) { 1555 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1556 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1557 } 1558 } 1559 1560 switch (info->mStatus) { 1561 case BufferInfo::OWNED_BY_US: 1562 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1563 (void)cancelBufferToNativeWindow(info); 1564 } 1565 // fall through 1566 1567 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1568 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1569 break; 1570 1571 default: 1572 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1573 err = FAILED_TRANSACTION; 1574 break; 1575 } 1576 1577 if (info->mFenceFd >= 0) { 1578 ::close(info->mFenceFd); 1579 } 1580 1581 if (portIndex == kPortIndexOutput) { 1582 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1583 info->mRenderInfo = NULL; 1584 } 1585 1586 // remove buffer even if mOMX->freeBuffer fails 1587 mBuffers[portIndex].removeAt(i); 1588 return err; 1589 } 1590 1591 ACodec::BufferInfo *ACodec::findBufferByID( 1592 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1593 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1594 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1595 1596 if (info->mBufferID == bufferID) { 1597 if (index != NULL) { 1598 *index = i; 1599 } 1600 return info; 1601 } 1602 } 1603 1604 ALOGE("Could not find buffer with ID %u", bufferID); 1605 return NULL; 1606 } 1607 1608 status_t ACodec::setComponentRole( 1609 bool isEncoder, const char *mime) { 1610 const char *role = getComponentRole(isEncoder, mime); 1611 if (role == NULL) { 1612 return BAD_VALUE; 1613 } 1614 status_t err = setComponentRole(mOMX, mNode, role); 1615 if (err != OK) { 1616 ALOGW("[%s] Failed to set standard component role '%s'.", 1617 mComponentName.c_str(), role); 1618 } 1619 return err; 1620 } 1621 1622 //static 1623 const char *ACodec::getComponentRole( 1624 bool isEncoder, const char *mime) { 1625 struct MimeToRole { 1626 const char *mime; 1627 const char *decoderRole; 1628 const char *encoderRole; 1629 }; 1630 1631 static const MimeToRole kMimeToRole[] = { 1632 { MEDIA_MIMETYPE_AUDIO_MPEG, 1633 "audio_decoder.mp3", "audio_encoder.mp3" }, 1634 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1635 "audio_decoder.mp1", "audio_encoder.mp1" }, 1636 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1637 "audio_decoder.mp2", "audio_encoder.mp2" }, 1638 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1639 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1640 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1641 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1642 { MEDIA_MIMETYPE_AUDIO_AAC, 1643 "audio_decoder.aac", "audio_encoder.aac" }, 1644 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1645 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1646 { MEDIA_MIMETYPE_AUDIO_OPUS, 1647 "audio_decoder.opus", "audio_encoder.opus" }, 1648 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1649 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1650 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1651 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1652 { MEDIA_MIMETYPE_VIDEO_AVC, 1653 "video_decoder.avc", "video_encoder.avc" }, 1654 { MEDIA_MIMETYPE_VIDEO_HEVC, 1655 "video_decoder.hevc", "video_encoder.hevc" }, 1656 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1657 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1658 { MEDIA_MIMETYPE_VIDEO_H263, 1659 "video_decoder.h263", "video_encoder.h263" }, 1660 { MEDIA_MIMETYPE_VIDEO_VP8, 1661 "video_decoder.vp8", "video_encoder.vp8" }, 1662 { MEDIA_MIMETYPE_VIDEO_VP9, 1663 "video_decoder.vp9", "video_encoder.vp9" }, 1664 { MEDIA_MIMETYPE_AUDIO_RAW, 1665 "audio_decoder.raw", "audio_encoder.raw" }, 1666 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1667 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1668 { MEDIA_MIMETYPE_AUDIO_FLAC, 1669 "audio_decoder.flac", "audio_encoder.flac" }, 1670 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1671 "audio_decoder.gsm", "audio_encoder.gsm" }, 1672 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1673 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1674 { MEDIA_MIMETYPE_AUDIO_AC3, 1675 "audio_decoder.ac3", "audio_encoder.ac3" }, 1676 { MEDIA_MIMETYPE_AUDIO_EAC3, 1677 "audio_decoder.eac3", "audio_encoder.eac3" }, 1678 }; 1679 1680 static const size_t kNumMimeToRole = 1681 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1682 1683 size_t i; 1684 for (i = 0; i < kNumMimeToRole; ++i) { 1685 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1686 break; 1687 } 1688 } 1689 1690 if (i == kNumMimeToRole) { 1691 return NULL; 1692 } 1693 1694 return isEncoder ? kMimeToRole[i].encoderRole 1695 : kMimeToRole[i].decoderRole; 1696 } 1697 1698 //static 1699 status_t ACodec::setComponentRole( 1700 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1701 OMX_PARAM_COMPONENTROLETYPE roleParams; 1702 InitOMXParams(&roleParams); 1703 1704 strncpy((char *)roleParams.cRole, 1705 role, OMX_MAX_STRINGNAME_SIZE - 1); 1706 1707 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1708 1709 return omx->setParameter( 1710 node, OMX_IndexParamStandardComponentRole, 1711 &roleParams, sizeof(roleParams)); 1712 } 1713 1714 status_t ACodec::configureCodec( 1715 const char *mime, const sp<AMessage> &msg) { 1716 int32_t encoder; 1717 if (!msg->findInt32("encoder", &encoder)) { 1718 encoder = false; 1719 } 1720 1721 sp<AMessage> inputFormat = new AMessage; 1722 sp<AMessage> outputFormat = new AMessage; 1723 mConfigFormat = msg; 1724 1725 mIsEncoder = encoder; 1726 1727 mInputMetadataType = kMetadataBufferTypeInvalid; 1728 mOutputMetadataType = kMetadataBufferTypeInvalid; 1729 1730 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1731 1732 if (err != OK) { 1733 return err; 1734 } 1735 1736 int32_t bitRate = 0; 1737 // FLAC encoder doesn't need a bitrate, other encoders do 1738 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1739 && !msg->findInt32("bitrate", &bitRate)) { 1740 return INVALID_OPERATION; 1741 } 1742 1743 // propagate bitrate to the output so that the muxer has it 1744 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1745 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1746 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1747 outputFormat->setInt32("bitrate", bitRate); 1748 outputFormat->setInt32("max-bitrate", bitRate); 1749 } 1750 1751 int32_t storeMeta; 1752 if (encoder 1753 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1754 && storeMeta != kMetadataBufferTypeInvalid) { 1755 mInputMetadataType = (MetadataBufferType)storeMeta; 1756 err = mOMX->storeMetaDataInBuffers( 1757 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1758 if (err != OK) { 1759 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1760 mComponentName.c_str(), err); 1761 1762 return err; 1763 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1764 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1765 // IOMX translates ANWBuffers to gralloc source already. 1766 mInputMetadataType = (MetadataBufferType)storeMeta; 1767 } 1768 1769 uint32_t usageBits; 1770 if (mOMX->getParameter( 1771 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1772 &usageBits, sizeof(usageBits)) == OK) { 1773 inputFormat->setInt32( 1774 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1775 } 1776 } 1777 1778 int32_t prependSPSPPS = 0; 1779 if (encoder 1780 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1781 && prependSPSPPS != 0) { 1782 OMX_INDEXTYPE index; 1783 err = mOMX->getExtensionIndex( 1784 mNode, 1785 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1786 &index); 1787 1788 if (err == OK) { 1789 PrependSPSPPSToIDRFramesParams params; 1790 InitOMXParams(¶ms); 1791 params.bEnable = OMX_TRUE; 1792 1793 err = mOMX->setParameter( 1794 mNode, index, ¶ms, sizeof(params)); 1795 } 1796 1797 if (err != OK) { 1798 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1799 "IDR frames. (err %d)", err); 1800 1801 return err; 1802 } 1803 } 1804 1805 // Only enable metadata mode on encoder output if encoder can prepend 1806 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1807 // opaque handle, to which we don't have access. 1808 int32_t video = !strncasecmp(mime, "video/", 6); 1809 mIsVideo = video; 1810 if (encoder && video) { 1811 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1812 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1813 && storeMeta != 0); 1814 1815 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1816 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1817 if (err != OK) { 1818 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1819 mComponentName.c_str(), err); 1820 } 1821 1822 if (!msg->findInt64( 1823 "repeat-previous-frame-after", 1824 &mRepeatFrameDelayUs)) { 1825 mRepeatFrameDelayUs = -1ll; 1826 } 1827 1828 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1829 mMaxPtsGapUs = -1ll; 1830 } 1831 1832 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1833 mMaxFps = -1; 1834 } 1835 1836 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1837 mTimePerCaptureUs = -1ll; 1838 } 1839 1840 if (!msg->findInt32( 1841 "create-input-buffers-suspended", 1842 (int32_t*)&mCreateInputBuffersSuspended)) { 1843 mCreateInputBuffersSuspended = false; 1844 } 1845 } 1846 1847 // NOTE: we only use native window for video decoders 1848 sp<RefBase> obj; 1849 bool haveNativeWindow = msg->findObject("native-window", &obj) 1850 && obj != NULL && video && !encoder; 1851 mUsingNativeWindow = haveNativeWindow; 1852 mLegacyAdaptiveExperiment = false; 1853 if (video && !encoder) { 1854 inputFormat->setInt32("adaptive-playback", false); 1855 1856 int32_t usageProtected; 1857 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1858 if (!haveNativeWindow) { 1859 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1860 return PERMISSION_DENIED; 1861 } 1862 mFlags |= kFlagIsGrallocUsageProtected; 1863 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1864 } 1865 1866 if (mFlags & kFlagIsSecure) { 1867 // use native_handles for secure input buffers 1868 err = mOMX->enableNativeBuffers( 1869 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1870 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1871 err = OK; // ignore error for now 1872 } 1873 } 1874 if (haveNativeWindow) { 1875 sp<ANativeWindow> nativeWindow = 1876 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1877 1878 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1879 int32_t autoFrc; 1880 if (msg->findInt32("auto-frc", &autoFrc)) { 1881 bool enabled = autoFrc; 1882 OMX_CONFIG_BOOLEANTYPE config; 1883 InitOMXParams(&config); 1884 config.bEnabled = (OMX_BOOL)enabled; 1885 status_t temp = mOMX->setConfig( 1886 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1887 &config, sizeof(config)); 1888 if (temp == OK) { 1889 outputFormat->setInt32("auto-frc", enabled); 1890 } else if (enabled) { 1891 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1892 } 1893 } 1894 // END of temporary support for automatic FRC 1895 1896 int32_t tunneled; 1897 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1898 tunneled != 0) { 1899 ALOGI("Configuring TUNNELED video playback."); 1900 mTunneled = true; 1901 1902 int32_t audioHwSync = 0; 1903 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1904 ALOGW("No Audio HW Sync provided for video tunnel"); 1905 } 1906 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1907 if (err != OK) { 1908 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1909 audioHwSync, nativeWindow.get()); 1910 return err; 1911 } 1912 1913 int32_t maxWidth = 0, maxHeight = 0; 1914 if (msg->findInt32("max-width", &maxWidth) && 1915 msg->findInt32("max-height", &maxHeight)) { 1916 1917 err = mOMX->prepareForAdaptivePlayback( 1918 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1919 if (err != OK) { 1920 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1921 mComponentName.c_str(), err); 1922 // allow failure 1923 err = OK; 1924 } else { 1925 inputFormat->setInt32("max-width", maxWidth); 1926 inputFormat->setInt32("max-height", maxHeight); 1927 inputFormat->setInt32("adaptive-playback", true); 1928 } 1929 } 1930 } else { 1931 ALOGV("Configuring CPU controlled video playback."); 1932 mTunneled = false; 1933 1934 // Explicity reset the sideband handle of the window for 1935 // non-tunneled video in case the window was previously used 1936 // for a tunneled video playback. 1937 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1938 if (err != OK) { 1939 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1940 return err; 1941 } 1942 1943 // Always try to enable dynamic output buffers on native surface 1944 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1945 err = mOMX->storeMetaDataInBuffers( 1946 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1947 if (err != OK) { 1948 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1949 mComponentName.c_str(), err); 1950 1951 // if adaptive playback has been requested, try JB fallback 1952 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1953 // LARGE MEMORY REQUIREMENT 1954 1955 // we will not do adaptive playback on software accessed 1956 // surfaces as they never had to respond to changes in the 1957 // crop window, and we don't trust that they will be able to. 1958 int usageBits = 0; 1959 bool canDoAdaptivePlayback; 1960 1961 if (nativeWindow->query( 1962 nativeWindow.get(), 1963 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1964 &usageBits) != OK) { 1965 canDoAdaptivePlayback = false; 1966 } else { 1967 canDoAdaptivePlayback = 1968 (usageBits & 1969 (GRALLOC_USAGE_SW_READ_MASK | 1970 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1971 } 1972 1973 int32_t maxWidth = 0, maxHeight = 0; 1974 if (canDoAdaptivePlayback && 1975 msg->findInt32("max-width", &maxWidth) && 1976 msg->findInt32("max-height", &maxHeight)) { 1977 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1978 mComponentName.c_str(), maxWidth, maxHeight); 1979 1980 err = mOMX->prepareForAdaptivePlayback( 1981 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1982 maxHeight); 1983 ALOGW_IF(err != OK, 1984 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1985 mComponentName.c_str(), err); 1986 1987 if (err == OK) { 1988 inputFormat->setInt32("max-width", maxWidth); 1989 inputFormat->setInt32("max-height", maxHeight); 1990 inputFormat->setInt32("adaptive-playback", true); 1991 } 1992 } 1993 // allow failure 1994 err = OK; 1995 } else { 1996 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1997 mComponentName.c_str()); 1998 CHECK(storingMetadataInDecodedBuffers()); 1999 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2000 "legacy-adaptive", !msg->contains("no-experiments")); 2001 2002 inputFormat->setInt32("adaptive-playback", true); 2003 } 2004 2005 int32_t push; 2006 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2007 && push != 0) { 2008 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2009 } 2010 } 2011 2012 int32_t rotationDegrees; 2013 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2014 mRotationDegrees = rotationDegrees; 2015 } else { 2016 mRotationDegrees = 0; 2017 } 2018 } 2019 2020 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2021 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2022 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2023 2024 if (video) { 2025 // determine need for software renderer 2026 bool usingSwRenderer = false; 2027 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2028 usingSwRenderer = true; 2029 haveNativeWindow = false; 2030 } 2031 2032 if (encoder) { 2033 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2034 } else { 2035 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2036 } 2037 2038 if (err != OK) { 2039 return err; 2040 } 2041 2042 if (haveNativeWindow) { 2043 mNativeWindow = static_cast<Surface *>(obj.get()); 2044 } 2045 2046 // initialize native window now to get actual output format 2047 // TODO: this is needed for some encoders even though they don't use native window 2048 err = initNativeWindow(); 2049 if (err != OK) { 2050 return err; 2051 } 2052 2053 // fallback for devices that do not handle flex-YUV for native buffers 2054 if (haveNativeWindow) { 2055 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2056 if (msg->findInt32("color-format", &requestedColorFormat) && 2057 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2058 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2059 if (err != OK) { 2060 return err; 2061 } 2062 int32_t colorFormat = OMX_COLOR_FormatUnused; 2063 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2064 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2065 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2066 return BAD_VALUE; 2067 } 2068 ALOGD("[%s] Requested output format %#x and got %#x.", 2069 mComponentName.c_str(), requestedColorFormat, colorFormat); 2070 if (!isFlexibleColorFormat( 2071 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2072 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2073 // device did not handle flex-YUV request for native window, fall back 2074 // to SW renderer 2075 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2076 mNativeWindow.clear(); 2077 mNativeWindowUsageBits = 0; 2078 haveNativeWindow = false; 2079 usingSwRenderer = true; 2080 if (storingMetadataInDecodedBuffers()) { 2081 err = mOMX->storeMetaDataInBuffers( 2082 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2083 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2084 // TODO: implement adaptive-playback support for bytebuffer mode. 2085 // This is done by SW codecs, but most HW codecs don't support it. 2086 inputFormat->setInt32("adaptive-playback", false); 2087 } 2088 if (err == OK) { 2089 err = mOMX->enableNativeBuffers( 2090 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2091 } 2092 if (mFlags & kFlagIsGrallocUsageProtected) { 2093 // fallback is not supported for protected playback 2094 err = PERMISSION_DENIED; 2095 } else if (err == OK) { 2096 err = setupVideoDecoder( 2097 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2098 } 2099 } 2100 } 2101 } 2102 2103 if (usingSwRenderer) { 2104 outputFormat->setInt32("using-sw-renderer", 1); 2105 } 2106 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2107 int32_t numChannels, sampleRate; 2108 if (!msg->findInt32("channel-count", &numChannels) 2109 || !msg->findInt32("sample-rate", &sampleRate)) { 2110 // Since we did not always check for these, leave them optional 2111 // and have the decoder figure it all out. 2112 err = OK; 2113 } else { 2114 err = setupRawAudioFormat( 2115 encoder ? kPortIndexInput : kPortIndexOutput, 2116 sampleRate, 2117 numChannels); 2118 } 2119 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2120 int32_t numChannels, sampleRate; 2121 if (!msg->findInt32("channel-count", &numChannels) 2122 || !msg->findInt32("sample-rate", &sampleRate)) { 2123 err = INVALID_OPERATION; 2124 } else { 2125 int32_t isADTS, aacProfile; 2126 int32_t sbrMode; 2127 int32_t maxOutputChannelCount; 2128 int32_t pcmLimiterEnable; 2129 drcParams_t drc; 2130 if (!msg->findInt32("is-adts", &isADTS)) { 2131 isADTS = 0; 2132 } 2133 if (!msg->findInt32("aac-profile", &aacProfile)) { 2134 aacProfile = OMX_AUDIO_AACObjectNull; 2135 } 2136 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2137 sbrMode = -1; 2138 } 2139 2140 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2141 maxOutputChannelCount = -1; 2142 } 2143 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2144 // value is unknown 2145 pcmLimiterEnable = -1; 2146 } 2147 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2148 // value is unknown 2149 drc.encodedTargetLevel = -1; 2150 } 2151 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2152 // value is unknown 2153 drc.drcCut = -1; 2154 } 2155 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2156 // value is unknown 2157 drc.drcBoost = -1; 2158 } 2159 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2160 // value is unknown 2161 drc.heavyCompression = -1; 2162 } 2163 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2164 // value is unknown 2165 drc.targetRefLevel = -1; 2166 } 2167 2168 err = setupAACCodec( 2169 encoder, numChannels, sampleRate, bitRate, aacProfile, 2170 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2171 pcmLimiterEnable); 2172 } 2173 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2174 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2175 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2176 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2177 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2178 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2179 // These are PCM-like formats with a fixed sample rate but 2180 // a variable number of channels. 2181 2182 int32_t numChannels; 2183 if (!msg->findInt32("channel-count", &numChannels)) { 2184 err = INVALID_OPERATION; 2185 } else { 2186 int32_t sampleRate; 2187 if (!msg->findInt32("sample-rate", &sampleRate)) { 2188 sampleRate = 8000; 2189 } 2190 err = setupG711Codec(encoder, sampleRate, numChannels); 2191 } 2192 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2193 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2194 if (encoder && 2195 (!msg->findInt32("channel-count", &numChannels) 2196 || !msg->findInt32("sample-rate", &sampleRate))) { 2197 ALOGE("missing channel count or sample rate for FLAC encoder"); 2198 err = INVALID_OPERATION; 2199 } else { 2200 if (encoder) { 2201 if (!msg->findInt32( 2202 "complexity", &compressionLevel) && 2203 !msg->findInt32( 2204 "flac-compression-level", &compressionLevel)) { 2205 compressionLevel = 5; // default FLAC compression level 2206 } else if (compressionLevel < 0) { 2207 ALOGW("compression level %d outside [0..8] range, " 2208 "using 0", 2209 compressionLevel); 2210 compressionLevel = 0; 2211 } else if (compressionLevel > 8) { 2212 ALOGW("compression level %d outside [0..8] range, " 2213 "using 8", 2214 compressionLevel); 2215 compressionLevel = 8; 2216 } 2217 } 2218 err = setupFlacCodec( 2219 encoder, numChannels, sampleRate, compressionLevel); 2220 } 2221 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2222 int32_t numChannels, sampleRate; 2223 if (encoder 2224 || !msg->findInt32("channel-count", &numChannels) 2225 || !msg->findInt32("sample-rate", &sampleRate)) { 2226 err = INVALID_OPERATION; 2227 } else { 2228 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2229 } 2230 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2231 int32_t numChannels; 2232 int32_t sampleRate; 2233 if (!msg->findInt32("channel-count", &numChannels) 2234 || !msg->findInt32("sample-rate", &sampleRate)) { 2235 err = INVALID_OPERATION; 2236 } else { 2237 err = setupAC3Codec(encoder, numChannels, sampleRate); 2238 } 2239 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2240 int32_t numChannels; 2241 int32_t sampleRate; 2242 if (!msg->findInt32("channel-count", &numChannels) 2243 || !msg->findInt32("sample-rate", &sampleRate)) { 2244 err = INVALID_OPERATION; 2245 } else { 2246 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2247 } 2248 } 2249 2250 if (err != OK) { 2251 return err; 2252 } 2253 2254 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2255 mEncoderDelay = 0; 2256 } 2257 2258 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2259 mEncoderPadding = 0; 2260 } 2261 2262 if (msg->findInt32("channel-mask", &mChannelMask)) { 2263 mChannelMaskPresent = true; 2264 } else { 2265 mChannelMaskPresent = false; 2266 } 2267 2268 int32_t maxInputSize; 2269 if (msg->findInt32("max-input-size", &maxInputSize)) { 2270 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2271 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2272 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2273 } 2274 2275 int32_t priority; 2276 if (msg->findInt32("priority", &priority)) { 2277 err = setPriority(priority); 2278 } 2279 2280 int32_t rateInt = -1; 2281 float rateFloat = -1; 2282 if (!msg->findFloat("operating-rate", &rateFloat)) { 2283 msg->findInt32("operating-rate", &rateInt); 2284 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2285 } 2286 if (rateFloat > 0) { 2287 err = setOperatingRate(rateFloat, video); 2288 } 2289 2290 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2291 mBaseOutputFormat = outputFormat; 2292 // trigger a kWhatOutputFormatChanged msg on first buffer 2293 mLastOutputFormat.clear(); 2294 2295 err = getPortFormat(kPortIndexInput, inputFormat); 2296 if (err == OK) { 2297 err = getPortFormat(kPortIndexOutput, outputFormat); 2298 if (err == OK) { 2299 mInputFormat = inputFormat; 2300 mOutputFormat = outputFormat; 2301 } 2302 } 2303 2304 // create data converters if needed 2305 if (!video && err == OK) { 2306 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2307 if (encoder) { 2308 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2309 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2310 if (mConverter[kPortIndexInput] != NULL) { 2311 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2312 } 2313 } else { 2314 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2315 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2316 if (mConverter[kPortIndexOutput] != NULL) { 2317 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2318 } 2319 } 2320 } 2321 2322 return err; 2323 } 2324 2325 status_t ACodec::setPriority(int32_t priority) { 2326 if (priority < 0) { 2327 return BAD_VALUE; 2328 } 2329 OMX_PARAM_U32TYPE config; 2330 InitOMXParams(&config); 2331 config.nU32 = (OMX_U32)priority; 2332 status_t temp = mOMX->setConfig( 2333 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2334 &config, sizeof(config)); 2335 if (temp != OK) { 2336 ALOGI("codec does not support config priority (err %d)", temp); 2337 } 2338 return OK; 2339 } 2340 2341 status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2342 if (rateFloat < 0) { 2343 return BAD_VALUE; 2344 } 2345 OMX_U32 rate; 2346 if (isVideo) { 2347 if (rateFloat > 65535) { 2348 return BAD_VALUE; 2349 } 2350 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2351 } else { 2352 if (rateFloat > UINT_MAX) { 2353 return BAD_VALUE; 2354 } 2355 rate = (OMX_U32)(rateFloat); 2356 } 2357 OMX_PARAM_U32TYPE config; 2358 InitOMXParams(&config); 2359 config.nU32 = rate; 2360 status_t err = mOMX->setConfig( 2361 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2362 &config, sizeof(config)); 2363 if (err != OK) { 2364 ALOGI("codec does not support config operating rate (err %d)", err); 2365 } 2366 return OK; 2367 } 2368 2369 status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2370 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2371 InitOMXParams(¶ms); 2372 params.nPortIndex = kPortIndexOutput; 2373 status_t err = mOMX->getConfig( 2374 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2375 if (err == OK) { 2376 *intraRefreshPeriod = params.nRefreshPeriod; 2377 return OK; 2378 } 2379 2380 // Fallback to query through standard OMX index. 2381 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2382 InitOMXParams(&refreshParams); 2383 refreshParams.nPortIndex = kPortIndexOutput; 2384 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2385 err = mOMX->getParameter( 2386 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2387 if (err != OK || refreshParams.nCirMBs == 0) { 2388 *intraRefreshPeriod = 0; 2389 return OK; 2390 } 2391 2392 // Calculate period based on width and height 2393 uint32_t width, height; 2394 OMX_PARAM_PORTDEFINITIONTYPE def; 2395 InitOMXParams(&def); 2396 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2397 def.nPortIndex = kPortIndexOutput; 2398 err = mOMX->getParameter( 2399 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2400 if (err != OK) { 2401 *intraRefreshPeriod = 0; 2402 return err; 2403 } 2404 width = video_def->nFrameWidth; 2405 height = video_def->nFrameHeight; 2406 // Use H.264/AVC MacroBlock size 16x16 2407 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2408 2409 return OK; 2410 } 2411 2412 status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2413 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2414 InitOMXParams(¶ms); 2415 params.nPortIndex = kPortIndexOutput; 2416 params.nRefreshPeriod = intraRefreshPeriod; 2417 status_t err = mOMX->setConfig( 2418 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2419 if (err == OK) { 2420 return OK; 2421 } 2422 2423 // Only in configure state, a component could invoke setParameter. 2424 if (!inConfigure) { 2425 return INVALID_OPERATION; 2426 } else { 2427 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2428 } 2429 2430 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2431 InitOMXParams(&refreshParams); 2432 refreshParams.nPortIndex = kPortIndexOutput; 2433 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2434 2435 if (intraRefreshPeriod == 0) { 2436 // 0 means disable intra refresh. 2437 refreshParams.nCirMBs = 0; 2438 } else { 2439 // Calculate macroblocks that need to be intra coded base on width and height 2440 uint32_t width, height; 2441 OMX_PARAM_PORTDEFINITIONTYPE def; 2442 InitOMXParams(&def); 2443 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2444 def.nPortIndex = kPortIndexOutput; 2445 err = mOMX->getParameter( 2446 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2447 if (err != OK) { 2448 return err; 2449 } 2450 width = video_def->nFrameWidth; 2451 height = video_def->nFrameHeight; 2452 // Use H.264/AVC MacroBlock size 16x16 2453 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2454 } 2455 2456 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2457 &refreshParams, sizeof(refreshParams)); 2458 if (err != OK) { 2459 return err; 2460 } 2461 2462 return OK; 2463 } 2464 2465 status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2466 OMX_PARAM_PORTDEFINITIONTYPE def; 2467 InitOMXParams(&def); 2468 def.nPortIndex = portIndex; 2469 2470 status_t err = mOMX->getParameter( 2471 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2472 2473 if (err != OK) { 2474 return err; 2475 } 2476 2477 if (def.nBufferSize >= size) { 2478 return OK; 2479 } 2480 2481 def.nBufferSize = size; 2482 2483 err = mOMX->setParameter( 2484 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2485 2486 if (err != OK) { 2487 return err; 2488 } 2489 2490 err = mOMX->getParameter( 2491 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2492 2493 if (err != OK) { 2494 return err; 2495 } 2496 2497 if (def.nBufferSize < size) { 2498 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2499 return FAILED_TRANSACTION; 2500 } 2501 2502 return OK; 2503 } 2504 2505 status_t ACodec::selectAudioPortFormat( 2506 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2507 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2508 InitOMXParams(&format); 2509 2510 format.nPortIndex = portIndex; 2511 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2512 format.nIndex = index; 2513 status_t err = mOMX->getParameter( 2514 mNode, OMX_IndexParamAudioPortFormat, 2515 &format, sizeof(format)); 2516 2517 if (err != OK) { 2518 return err; 2519 } 2520 2521 if (format.eEncoding == desiredFormat) { 2522 break; 2523 } 2524 2525 if (index == kMaxIndicesToCheck) { 2526 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2527 mComponentName.c_str(), index, 2528 asString(format.eEncoding), format.eEncoding); 2529 return ERROR_UNSUPPORTED; 2530 } 2531 } 2532 2533 return mOMX->setParameter( 2534 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2535 } 2536 2537 status_t ACodec::setupAACCodec( 2538 bool encoder, int32_t numChannels, int32_t sampleRate, 2539 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2540 int32_t maxOutputChannelCount, const drcParams_t& drc, 2541 int32_t pcmLimiterEnable) { 2542 if (encoder && isADTS) { 2543 return -EINVAL; 2544 } 2545 2546 status_t err = setupRawAudioFormat( 2547 encoder ? kPortIndexInput : kPortIndexOutput, 2548 sampleRate, 2549 numChannels); 2550 2551 if (err != OK) { 2552 return err; 2553 } 2554 2555 if (encoder) { 2556 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2557 2558 if (err != OK) { 2559 return err; 2560 } 2561 2562 OMX_PARAM_PORTDEFINITIONTYPE def; 2563 InitOMXParams(&def); 2564 def.nPortIndex = kPortIndexOutput; 2565 2566 err = mOMX->getParameter( 2567 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2568 2569 if (err != OK) { 2570 return err; 2571 } 2572 2573 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2574 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2575 2576 err = mOMX->setParameter( 2577 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2578 2579 if (err != OK) { 2580 return err; 2581 } 2582 2583 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2584 InitOMXParams(&profile); 2585 profile.nPortIndex = kPortIndexOutput; 2586 2587 err = mOMX->getParameter( 2588 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 profile.nChannels = numChannels; 2595 2596 profile.eChannelMode = 2597 (numChannels == 1) 2598 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2599 2600 profile.nSampleRate = sampleRate; 2601 profile.nBitRate = bitRate; 2602 profile.nAudioBandWidth = 0; 2603 profile.nFrameLength = 0; 2604 profile.nAACtools = OMX_AUDIO_AACToolAll; 2605 profile.nAACERtools = OMX_AUDIO_AACERNone; 2606 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2607 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2608 switch (sbrMode) { 2609 case 0: 2610 // disable sbr 2611 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2612 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2613 break; 2614 case 1: 2615 // enable single-rate sbr 2616 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2617 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2618 break; 2619 case 2: 2620 // enable dual-rate sbr 2621 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2622 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2623 break; 2624 case -1: 2625 // enable both modes -> the codec will decide which mode should be used 2626 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2627 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2628 break; 2629 default: 2630 // unsupported sbr mode 2631 return BAD_VALUE; 2632 } 2633 2634 2635 err = mOMX->setParameter( 2636 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2637 2638 if (err != OK) { 2639 return err; 2640 } 2641 2642 return err; 2643 } 2644 2645 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2646 InitOMXParams(&profile); 2647 profile.nPortIndex = kPortIndexInput; 2648 2649 err = mOMX->getParameter( 2650 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2651 2652 if (err != OK) { 2653 return err; 2654 } 2655 2656 profile.nChannels = numChannels; 2657 profile.nSampleRate = sampleRate; 2658 2659 profile.eAACStreamFormat = 2660 isADTS 2661 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2662 : OMX_AUDIO_AACStreamFormatMP4FF; 2663 2664 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2665 InitOMXParams(&presentation); 2666 presentation.nMaxOutputChannels = maxOutputChannelCount; 2667 presentation.nDrcCut = drc.drcCut; 2668 presentation.nDrcBoost = drc.drcBoost; 2669 presentation.nHeavyCompression = drc.heavyCompression; 2670 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2671 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2672 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2673 2674 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2675 if (res == OK) { 2676 // optional parameters, will not cause configuration failure 2677 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2678 &presentation, sizeof(presentation)); 2679 } else { 2680 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2681 } 2682 return res; 2683 } 2684 2685 status_t ACodec::setupAC3Codec( 2686 bool encoder, int32_t numChannels, int32_t sampleRate) { 2687 status_t err = setupRawAudioFormat( 2688 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2689 2690 if (err != OK) { 2691 return err; 2692 } 2693 2694 if (encoder) { 2695 ALOGW("AC3 encoding is not supported."); 2696 return INVALID_OPERATION; 2697 } 2698 2699 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2700 InitOMXParams(&def); 2701 def.nPortIndex = kPortIndexInput; 2702 2703 err = mOMX->getParameter( 2704 mNode, 2705 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2706 &def, 2707 sizeof(def)); 2708 2709 if (err != OK) { 2710 return err; 2711 } 2712 2713 def.nChannels = numChannels; 2714 def.nSampleRate = sampleRate; 2715 2716 return mOMX->setParameter( 2717 mNode, 2718 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2719 &def, 2720 sizeof(def)); 2721 } 2722 2723 status_t ACodec::setupEAC3Codec( 2724 bool encoder, int32_t numChannels, int32_t sampleRate) { 2725 status_t err = setupRawAudioFormat( 2726 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2727 2728 if (err != OK) { 2729 return err; 2730 } 2731 2732 if (encoder) { 2733 ALOGW("EAC3 encoding is not supported."); 2734 return INVALID_OPERATION; 2735 } 2736 2737 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2738 InitOMXParams(&def); 2739 def.nPortIndex = kPortIndexInput; 2740 2741 err = mOMX->getParameter( 2742 mNode, 2743 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2744 &def, 2745 sizeof(def)); 2746 2747 if (err != OK) { 2748 return err; 2749 } 2750 2751 def.nChannels = numChannels; 2752 def.nSampleRate = sampleRate; 2753 2754 return mOMX->setParameter( 2755 mNode, 2756 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2757 &def, 2758 sizeof(def)); 2759 } 2760 2761 static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2762 bool isAMRWB, int32_t bps) { 2763 if (isAMRWB) { 2764 if (bps <= 6600) { 2765 return OMX_AUDIO_AMRBandModeWB0; 2766 } else if (bps <= 8850) { 2767 return OMX_AUDIO_AMRBandModeWB1; 2768 } else if (bps <= 12650) { 2769 return OMX_AUDIO_AMRBandModeWB2; 2770 } else if (bps <= 14250) { 2771 return OMX_AUDIO_AMRBandModeWB3; 2772 } else if (bps <= 15850) { 2773 return OMX_AUDIO_AMRBandModeWB4; 2774 } else if (bps <= 18250) { 2775 return OMX_AUDIO_AMRBandModeWB5; 2776 } else if (bps <= 19850) { 2777 return OMX_AUDIO_AMRBandModeWB6; 2778 } else if (bps <= 23050) { 2779 return OMX_AUDIO_AMRBandModeWB7; 2780 } 2781 2782 // 23850 bps 2783 return OMX_AUDIO_AMRBandModeWB8; 2784 } else { // AMRNB 2785 if (bps <= 4750) { 2786 return OMX_AUDIO_AMRBandModeNB0; 2787 } else if (bps <= 5150) { 2788 return OMX_AUDIO_AMRBandModeNB1; 2789 } else if (bps <= 5900) { 2790 return OMX_AUDIO_AMRBandModeNB2; 2791 } else if (bps <= 6700) { 2792 return OMX_AUDIO_AMRBandModeNB3; 2793 } else if (bps <= 7400) { 2794 return OMX_AUDIO_AMRBandModeNB4; 2795 } else if (bps <= 7950) { 2796 return OMX_AUDIO_AMRBandModeNB5; 2797 } else if (bps <= 10200) { 2798 return OMX_AUDIO_AMRBandModeNB6; 2799 } 2800 2801 // 12200 bps 2802 return OMX_AUDIO_AMRBandModeNB7; 2803 } 2804 } 2805 2806 status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2807 OMX_AUDIO_PARAM_AMRTYPE def; 2808 InitOMXParams(&def); 2809 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2810 2811 status_t err = 2812 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2813 2814 if (err != OK) { 2815 return err; 2816 } 2817 2818 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2819 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2820 2821 err = mOMX->setParameter( 2822 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2823 2824 if (err != OK) { 2825 return err; 2826 } 2827 2828 return setupRawAudioFormat( 2829 encoder ? kPortIndexInput : kPortIndexOutput, 2830 isWAMR ? 16000 : 8000 /* sampleRate */, 2831 1 /* numChannels */); 2832 } 2833 2834 status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2835 if (encoder) { 2836 return INVALID_OPERATION; 2837 } 2838 2839 return setupRawAudioFormat( 2840 kPortIndexInput, sampleRate, numChannels); 2841 } 2842 2843 status_t ACodec::setupFlacCodec( 2844 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2845 2846 if (encoder) { 2847 OMX_AUDIO_PARAM_FLACTYPE def; 2848 InitOMXParams(&def); 2849 def.nPortIndex = kPortIndexOutput; 2850 2851 // configure compression level 2852 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2853 if (err != OK) { 2854 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2855 return err; 2856 } 2857 def.nCompressionLevel = compressionLevel; 2858 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2859 if (err != OK) { 2860 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2861 return err; 2862 } 2863 } 2864 2865 return setupRawAudioFormat( 2866 encoder ? kPortIndexInput : kPortIndexOutput, 2867 sampleRate, 2868 numChannels); 2869 } 2870 2871 status_t ACodec::setupRawAudioFormat( 2872 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2873 OMX_PARAM_PORTDEFINITIONTYPE def; 2874 InitOMXParams(&def); 2875 def.nPortIndex = portIndex; 2876 2877 status_t err = mOMX->getParameter( 2878 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2879 2880 if (err != OK) { 2881 return err; 2882 } 2883 2884 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2885 2886 err = mOMX->setParameter( 2887 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2888 2889 if (err != OK) { 2890 return err; 2891 } 2892 2893 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2894 InitOMXParams(&pcmParams); 2895 pcmParams.nPortIndex = portIndex; 2896 2897 err = mOMX->getParameter( 2898 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2899 2900 if (err != OK) { 2901 return err; 2902 } 2903 2904 pcmParams.nChannels = numChannels; 2905 switch (encoding) { 2906 case kAudioEncodingPcm8bit: 2907 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2908 pcmParams.nBitPerSample = 8; 2909 break; 2910 case kAudioEncodingPcmFloat: 2911 pcmParams.eNumData = OMX_NumericalDataFloat; 2912 pcmParams.nBitPerSample = 32; 2913 break; 2914 case kAudioEncodingPcm16bit: 2915 pcmParams.eNumData = OMX_NumericalDataSigned; 2916 pcmParams.nBitPerSample = 16; 2917 break; 2918 default: 2919 return BAD_VALUE; 2920 } 2921 pcmParams.bInterleaved = OMX_TRUE; 2922 pcmParams.nSamplingRate = sampleRate; 2923 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2924 2925 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2926 return OMX_ErrorNone; 2927 } 2928 2929 err = mOMX->setParameter( 2930 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2931 // if we could not set up raw format to non-16-bit, try with 16-bit 2932 // NOTE: we will also verify this via readback, in case codec ignores these fields 2933 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2934 pcmParams.eNumData = OMX_NumericalDataSigned; 2935 pcmParams.nBitPerSample = 16; 2936 err = mOMX->setParameter( 2937 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2938 } 2939 return err; 2940 } 2941 2942 status_t ACodec::configureTunneledVideoPlayback( 2943 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2944 native_handle_t* sidebandHandle; 2945 2946 status_t err = mOMX->configureVideoTunnelMode( 2947 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2948 if (err != OK) { 2949 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2950 return err; 2951 } 2952 2953 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2954 if (err != OK) { 2955 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2956 sidebandHandle, err); 2957 return err; 2958 } 2959 2960 return OK; 2961 } 2962 2963 status_t ACodec::setVideoPortFormatType( 2964 OMX_U32 portIndex, 2965 OMX_VIDEO_CODINGTYPE compressionFormat, 2966 OMX_COLOR_FORMATTYPE colorFormat, 2967 bool usingNativeBuffers) { 2968 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2969 InitOMXParams(&format); 2970 format.nPortIndex = portIndex; 2971 format.nIndex = 0; 2972 bool found = false; 2973 2974 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2975 format.nIndex = index; 2976 status_t err = mOMX->getParameter( 2977 mNode, OMX_IndexParamVideoPortFormat, 2978 &format, sizeof(format)); 2979 2980 if (err != OK) { 2981 return err; 2982 } 2983 2984 // substitute back flexible color format to codec supported format 2985 OMX_U32 flexibleEquivalent; 2986 if (compressionFormat == OMX_VIDEO_CodingUnused 2987 && isFlexibleColorFormat( 2988 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2989 && colorFormat == flexibleEquivalent) { 2990 ALOGI("[%s] using color format %#x in place of %#x", 2991 mComponentName.c_str(), format.eColorFormat, colorFormat); 2992 colorFormat = format.eColorFormat; 2993 } 2994 2995 // The following assertion is violated by TI's video decoder. 2996 // CHECK_EQ(format.nIndex, index); 2997 2998 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2999 if (portIndex == kPortIndexInput 3000 && colorFormat == format.eColorFormat) { 3001 // eCompressionFormat does not seem right. 3002 found = true; 3003 break; 3004 } 3005 if (portIndex == kPortIndexOutput 3006 && compressionFormat == format.eCompressionFormat) { 3007 // eColorFormat does not seem right. 3008 found = true; 3009 break; 3010 } 3011 } 3012 3013 if (format.eCompressionFormat == compressionFormat 3014 && format.eColorFormat == colorFormat) { 3015 found = true; 3016 break; 3017 } 3018 3019 if (index == kMaxIndicesToCheck) { 3020 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3021 mComponentName.c_str(), index, 3022 asString(format.eCompressionFormat), format.eCompressionFormat, 3023 asString(format.eColorFormat), format.eColorFormat); 3024 } 3025 } 3026 3027 if (!found) { 3028 return UNKNOWN_ERROR; 3029 } 3030 3031 status_t err = mOMX->setParameter( 3032 mNode, OMX_IndexParamVideoPortFormat, 3033 &format, sizeof(format)); 3034 3035 return err; 3036 } 3037 3038 // Set optimal output format. OMX component lists output formats in the order 3039 // of preference, but this got more complicated since the introduction of flexible 3040 // YUV formats. We support a legacy behavior for applications that do not use 3041 // surface output, do not specify an output format, but expect a "usable" standard 3042 // OMX format. SW readable and standard formats must be flex-YUV. 3043 // 3044 // Suggested preference order: 3045 // - optimal format for texture rendering (mediaplayer behavior) 3046 // - optimal SW readable & texture renderable format (flex-YUV support) 3047 // - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3048 // - legacy "usable" standard formats 3049 // 3050 // For legacy support, we prefer a standard format, but will settle for a SW readable 3051 // flex-YUV format. 3052 status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3053 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3054 InitOMXParams(&format); 3055 format.nPortIndex = kPortIndexOutput; 3056 3057 InitOMXParams(&legacyFormat); 3058 // this field will change when we find a suitable legacy format 3059 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3060 3061 for (OMX_U32 index = 0; ; ++index) { 3062 format.nIndex = index; 3063 status_t err = mOMX->getParameter( 3064 mNode, OMX_IndexParamVideoPortFormat, 3065 &format, sizeof(format)); 3066 if (err != OK) { 3067 // no more formats, pick legacy format if found 3068 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3069 memcpy(&format, &legacyFormat, sizeof(format)); 3070 break; 3071 } 3072 return err; 3073 } 3074 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3075 return OMX_ErrorBadParameter; 3076 } 3077 if (!getLegacyFlexibleFormat) { 3078 break; 3079 } 3080 // standard formats that were exposed to users before 3081 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3082 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3083 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3084 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3085 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3086 break; 3087 } 3088 // find best legacy non-standard format 3089 OMX_U32 flexibleEquivalent; 3090 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3091 && isFlexibleColorFormat( 3092 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3093 &flexibleEquivalent) 3094 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3095 memcpy(&legacyFormat, &format, sizeof(format)); 3096 } 3097 } 3098 return mOMX->setParameter( 3099 mNode, OMX_IndexParamVideoPortFormat, 3100 &format, sizeof(format)); 3101 } 3102 3103 static const struct VideoCodingMapEntry { 3104 const char *mMime; 3105 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3106 } kVideoCodingMapEntry[] = { 3107 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3108 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3109 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3110 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3111 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3112 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3113 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3114 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3115 }; 3116 3117 static status_t GetVideoCodingTypeFromMime( 3118 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3119 for (size_t i = 0; 3120 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3121 ++i) { 3122 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3123 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3124 return OK; 3125 } 3126 } 3127 3128 *codingType = OMX_VIDEO_CodingUnused; 3129 3130 return ERROR_UNSUPPORTED; 3131 } 3132 3133 static status_t GetMimeTypeForVideoCoding( 3134 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3135 for (size_t i = 0; 3136 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3137 ++i) { 3138 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3139 *mime = kVideoCodingMapEntry[i].mMime; 3140 return OK; 3141 } 3142 } 3143 3144 mime->clear(); 3145 3146 return ERROR_UNSUPPORTED; 3147 } 3148 3149 status_t ACodec::setupVideoDecoder( 3150 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3151 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3152 int32_t width, height; 3153 if (!msg->findInt32("width", &width) 3154 || !msg->findInt32("height", &height)) { 3155 return INVALID_OPERATION; 3156 } 3157 3158 OMX_VIDEO_CODINGTYPE compressionFormat; 3159 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3160 3161 if (err != OK) { 3162 return err; 3163 } 3164 3165 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3166 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3167 InitOMXParams(¶ms); 3168 params.nPortIndex = kPortIndexInput; 3169 // Check if VP9 decoder advertises supported profiles. 3170 params.nProfileIndex = 0; 3171 status_t err = mOMX->getParameter( 3172 mNode, 3173 OMX_IndexParamVideoProfileLevelQuerySupported, 3174 ¶ms, 3175 sizeof(params)); 3176 mIsLegacyVP9Decoder = err != OK; 3177 } 3178 3179 err = setVideoPortFormatType( 3180 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3181 3182 if (err != OK) { 3183 return err; 3184 } 3185 3186 int32_t tmp; 3187 if (msg->findInt32("color-format", &tmp)) { 3188 OMX_COLOR_FORMATTYPE colorFormat = 3189 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3190 err = setVideoPortFormatType( 3191 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3192 if (err != OK) { 3193 ALOGW("[%s] does not support color format %d", 3194 mComponentName.c_str(), colorFormat); 3195 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3196 } 3197 } else { 3198 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3199 } 3200 3201 if (err != OK) { 3202 return err; 3203 } 3204 3205 int32_t frameRateInt; 3206 float frameRateFloat; 3207 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3208 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3209 frameRateInt = -1; 3210 } 3211 frameRateFloat = (float)frameRateInt; 3212 } 3213 3214 err = setVideoFormatOnPort( 3215 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3216 3217 if (err != OK) { 3218 return err; 3219 } 3220 3221 err = setVideoFormatOnPort( 3222 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3223 3224 if (err != OK) { 3225 return err; 3226 } 3227 3228 err = setColorAspectsForVideoDecoder( 3229 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3230 if (err == ERROR_UNSUPPORTED) { // support is optional 3231 err = OK; 3232 } 3233 3234 if (err != OK) { 3235 return err; 3236 } 3237 3238 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3239 if (err == ERROR_UNSUPPORTED) { // support is optional 3240 err = OK; 3241 } 3242 return err; 3243 } 3244 3245 status_t ACodec::initDescribeColorAspectsIndex() { 3246 status_t err = mOMX->getExtensionIndex( 3247 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3248 if (err != OK) { 3249 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3250 } 3251 return err; 3252 } 3253 3254 status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3255 status_t err = ERROR_UNSUPPORTED; 3256 if (mDescribeColorAspectsIndex) { 3257 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3258 } 3259 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3260 mComponentName.c_str(), 3261 params.sAspects.mRange, asString(params.sAspects.mRange), 3262 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3263 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3264 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3265 err, asString(err)); 3266 3267 if (verify && err == OK) { 3268 err = getCodecColorAspects(params); 3269 } 3270 3271 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3272 "[%s] setting color aspects failed even though codec advertises support", 3273 mComponentName.c_str()); 3274 return err; 3275 } 3276 3277 status_t ACodec::setColorAspectsForVideoDecoder( 3278 int32_t width, int32_t height, bool usingNativeWindow, 3279 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3280 DescribeColorAspectsParams params; 3281 InitOMXParams(¶ms); 3282 params.nPortIndex = kPortIndexOutput; 3283 3284 getColorAspectsFromFormat(configFormat, params.sAspects); 3285 if (usingNativeWindow) { 3286 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3287 // The default aspects will be set back to the output format during the 3288 // getFormat phase of configure(). Set non-Unspecified values back into the 3289 // format, in case component does not support this enumeration. 3290 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3291 } 3292 3293 (void)initDescribeColorAspectsIndex(); 3294 3295 // communicate color aspects to codec 3296 return setCodecColorAspects(params); 3297 } 3298 3299 status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3300 status_t err = ERROR_UNSUPPORTED; 3301 if (mDescribeColorAspectsIndex) { 3302 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3303 } 3304 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3305 mComponentName.c_str(), 3306 params.sAspects.mRange, asString(params.sAspects.mRange), 3307 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3308 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3309 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3310 err, asString(err)); 3311 if (params.bRequestingDataSpace) { 3312 ALOGV("for dataspace %#x", params.nDataSpace); 3313 } 3314 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3315 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3316 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3317 mComponentName.c_str()); 3318 } 3319 return err; 3320 } 3321 3322 status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3323 DescribeColorAspectsParams params; 3324 InitOMXParams(¶ms); 3325 params.nPortIndex = kPortIndexInput; 3326 status_t err = getCodecColorAspects(params); 3327 if (err == OK) { 3328 // we only set encoder input aspects if codec supports them 3329 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3330 } 3331 return err; 3332 } 3333 3334 status_t ACodec::getDataSpace( 3335 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3336 bool tryCodec) { 3337 status_t err = OK; 3338 if (tryCodec) { 3339 // request dataspace guidance from codec. 3340 params.bRequestingDataSpace = OMX_TRUE; 3341 err = getCodecColorAspects(params); 3342 params.bRequestingDataSpace = OMX_FALSE; 3343 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3344 *dataSpace = (android_dataspace)params.nDataSpace; 3345 return err; 3346 } else if (err == ERROR_UNSUPPORTED) { 3347 // ignore not-implemented error for dataspace requests 3348 err = OK; 3349 } 3350 } 3351 3352 // this returns legacy versions if available 3353 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3354 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3355 "and dataspace %#x", 3356 mComponentName.c_str(), 3357 params.sAspects.mRange, asString(params.sAspects.mRange), 3358 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3359 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3360 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3361 *dataSpace); 3362 return err; 3363 } 3364 3365 3366 status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3367 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3368 android_dataspace *dataSpace) { 3369 DescribeColorAspectsParams params; 3370 InitOMXParams(¶ms); 3371 params.nPortIndex = kPortIndexOutput; 3372 3373 // reset default format and get resulting format 3374 getColorAspectsFromFormat(configFormat, params.sAspects); 3375 if (dataSpace != NULL) { 3376 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3377 } 3378 status_t err = setCodecColorAspects(params, true /* readBack */); 3379 3380 // we always set specified aspects for decoders 3381 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3382 3383 if (dataSpace != NULL) { 3384 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3385 if (err == OK) { 3386 err = res; 3387 } 3388 } 3389 3390 return err; 3391 } 3392 3393 // initial video encoder setup for bytebuffer mode 3394 status_t ACodec::setColorAspectsForVideoEncoder( 3395 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3396 // copy config to output format as this is not exposed via getFormat 3397 copyColorConfig(configFormat, outputFormat); 3398 3399 DescribeColorAspectsParams params; 3400 InitOMXParams(¶ms); 3401 params.nPortIndex = kPortIndexInput; 3402 getColorAspectsFromFormat(configFormat, params.sAspects); 3403 3404 (void)initDescribeColorAspectsIndex(); 3405 3406 int32_t usingRecorder; 3407 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3408 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3409 int32_t width, height; 3410 if (configFormat->findInt32("width", &width) 3411 && configFormat->findInt32("height", &height)) { 3412 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3413 status_t err = getDataSpace( 3414 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3415 if (err != OK) { 3416 return err; 3417 } 3418 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3419 } 3420 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3421 } 3422 3423 // communicate color aspects to codec, but do not allow change of the platform aspects 3424 ColorAspects origAspects = params.sAspects; 3425 for (int triesLeft = 2; --triesLeft >= 0; ) { 3426 status_t err = setCodecColorAspects(params, true /* readBack */); 3427 if (err != OK 3428 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3429 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3430 return err; 3431 } 3432 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3433 mComponentName.c_str()); 3434 } 3435 return OK; 3436 } 3437 3438 status_t ACodec::setHDRStaticInfoForVideoCodec( 3439 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3440 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3441 3442 DescribeHDRStaticInfoParams params; 3443 InitOMXParams(¶ms); 3444 params.nPortIndex = portIndex; 3445 3446 HDRStaticInfo *info = ¶ms.sInfo; 3447 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3448 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3449 } 3450 3451 (void)initDescribeHDRStaticInfoIndex(); 3452 3453 // communicate HDR static Info to codec 3454 return setHDRStaticInfo(params); 3455 } 3456 3457 // subsequent initial video encoder setup for surface mode 3458 status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3459 android_dataspace *dataSpace /* nonnull */) { 3460 DescribeColorAspectsParams params; 3461 InitOMXParams(¶ms); 3462 params.nPortIndex = kPortIndexInput; 3463 ColorAspects &aspects = params.sAspects; 3464 3465 // reset default format and store resulting format into both input and output formats 3466 getColorAspectsFromFormat(mConfigFormat, aspects); 3467 int32_t width, height; 3468 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3469 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3470 } 3471 setColorAspectsIntoFormat(aspects, mInputFormat); 3472 setColorAspectsIntoFormat(aspects, mOutputFormat); 3473 3474 // communicate color aspects to codec, but do not allow any change 3475 ColorAspects origAspects = aspects; 3476 status_t err = OK; 3477 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3478 status_t err = setCodecColorAspects(params, true /* readBack */); 3479 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3480 break; 3481 } 3482 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3483 mComponentName.c_str()); 3484 } 3485 3486 *dataSpace = HAL_DATASPACE_BT709; 3487 aspects = origAspects; // restore desired color aspects 3488 status_t res = getDataSpace( 3489 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3490 if (err == OK) { 3491 err = res; 3492 } 3493 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3494 mInputFormat->setBuffer( 3495 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3496 3497 // update input format with codec supported color aspects (basically set unsupported 3498 // aspects to Unspecified) 3499 if (err == OK) { 3500 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3501 } 3502 3503 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3504 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3505 3506 return err; 3507 } 3508 3509 status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3510 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3511 DescribeHDRStaticInfoParams params; 3512 InitOMXParams(¶ms); 3513 params.nPortIndex = portIndex; 3514 3515 status_t err = getHDRStaticInfo(params); 3516 if (err == OK) { 3517 // we only set decodec output HDRStaticInfo if codec supports them 3518 setHDRStaticInfoIntoFormat(params.sInfo, format); 3519 } 3520 return err; 3521 } 3522 3523 status_t ACodec::initDescribeHDRStaticInfoIndex() { 3524 status_t err = mOMX->getExtensionIndex( 3525 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3526 if (err != OK) { 3527 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3528 } 3529 return err; 3530 } 3531 3532 status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3533 status_t err = ERROR_UNSUPPORTED; 3534 if (mDescribeHDRStaticInfoIndex) { 3535 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3536 } 3537 3538 const HDRStaticInfo *info = ¶ms.sInfo; 3539 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3540 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3541 mComponentName.c_str(), 3542 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3543 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3544 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3545 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3546 3547 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3548 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3549 mComponentName.c_str()); 3550 return err; 3551 } 3552 3553 status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3554 status_t err = ERROR_UNSUPPORTED; 3555 if (mDescribeHDRStaticInfoIndex) { 3556 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3557 } 3558 3559 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3560 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3561 mComponentName.c_str()); 3562 return err; 3563 } 3564 3565 status_t ACodec::setupVideoEncoder( 3566 const char *mime, const sp<AMessage> &msg, 3567 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3568 int32_t tmp; 3569 if (!msg->findInt32("color-format", &tmp)) { 3570 return INVALID_OPERATION; 3571 } 3572 3573 OMX_COLOR_FORMATTYPE colorFormat = 3574 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3575 3576 status_t err = setVideoPortFormatType( 3577 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3578 3579 if (err != OK) { 3580 ALOGE("[%s] does not support color format %d", 3581 mComponentName.c_str(), colorFormat); 3582 3583 return err; 3584 } 3585 3586 /* Input port configuration */ 3587 3588 OMX_PARAM_PORTDEFINITIONTYPE def; 3589 InitOMXParams(&def); 3590 3591 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3592 3593 def.nPortIndex = kPortIndexInput; 3594 3595 err = mOMX->getParameter( 3596 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3597 3598 if (err != OK) { 3599 return err; 3600 } 3601 3602 int32_t width, height, bitrate; 3603 if (!msg->findInt32("width", &width) 3604 || !msg->findInt32("height", &height) 3605 || !msg->findInt32("bitrate", &bitrate)) { 3606 return INVALID_OPERATION; 3607 } 3608 3609 video_def->nFrameWidth = width; 3610 video_def->nFrameHeight = height; 3611 3612 int32_t stride; 3613 if (!msg->findInt32("stride", &stride)) { 3614 stride = width; 3615 } 3616 3617 video_def->nStride = stride; 3618 3619 int32_t sliceHeight; 3620 if (!msg->findInt32("slice-height", &sliceHeight)) { 3621 sliceHeight = height; 3622 } 3623 3624 video_def->nSliceHeight = sliceHeight; 3625 3626 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3627 3628 float frameRate; 3629 if (!msg->findFloat("frame-rate", &frameRate)) { 3630 int32_t tmp; 3631 if (!msg->findInt32("frame-rate", &tmp)) { 3632 return INVALID_OPERATION; 3633 } 3634 frameRate = (float)tmp; 3635 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3636 } 3637 3638 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3639 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3640 // this is redundant as it was already set up in setVideoPortFormatType 3641 // FIXME for now skip this only for flexible YUV formats 3642 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3643 video_def->eColorFormat = colorFormat; 3644 } 3645 3646 err = mOMX->setParameter( 3647 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3648 3649 if (err != OK) { 3650 ALOGE("[%s] failed to set input port definition parameters.", 3651 mComponentName.c_str()); 3652 3653 return err; 3654 } 3655 3656 /* Output port configuration */ 3657 3658 OMX_VIDEO_CODINGTYPE compressionFormat; 3659 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3660 3661 if (err != OK) { 3662 return err; 3663 } 3664 3665 err = setVideoPortFormatType( 3666 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3667 3668 if (err != OK) { 3669 ALOGE("[%s] does not support compression format %d", 3670 mComponentName.c_str(), compressionFormat); 3671 3672 return err; 3673 } 3674 3675 def.nPortIndex = kPortIndexOutput; 3676 3677 err = mOMX->getParameter( 3678 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3679 3680 if (err != OK) { 3681 return err; 3682 } 3683 3684 video_def->nFrameWidth = width; 3685 video_def->nFrameHeight = height; 3686 video_def->xFramerate = 0; 3687 video_def->nBitrate = bitrate; 3688 video_def->eCompressionFormat = compressionFormat; 3689 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3690 3691 err = mOMX->setParameter( 3692 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3693 3694 if (err != OK) { 3695 ALOGE("[%s] failed to set output port definition parameters.", 3696 mComponentName.c_str()); 3697 3698 return err; 3699 } 3700 3701 int32_t intraRefreshPeriod = 0; 3702 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3703 && intraRefreshPeriod >= 0) { 3704 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3705 if (err != OK) { 3706 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3707 mComponentName.c_str()); 3708 err = OK; 3709 } 3710 } 3711 3712 switch (compressionFormat) { 3713 case OMX_VIDEO_CodingMPEG4: 3714 err = setupMPEG4EncoderParameters(msg); 3715 break; 3716 3717 case OMX_VIDEO_CodingH263: 3718 err = setupH263EncoderParameters(msg); 3719 break; 3720 3721 case OMX_VIDEO_CodingAVC: 3722 err = setupAVCEncoderParameters(msg); 3723 break; 3724 3725 case OMX_VIDEO_CodingHEVC: 3726 err = setupHEVCEncoderParameters(msg); 3727 break; 3728 3729 case OMX_VIDEO_CodingVP8: 3730 case OMX_VIDEO_CodingVP9: 3731 err = setupVPXEncoderParameters(msg); 3732 break; 3733 3734 default: 3735 break; 3736 } 3737 3738 // Set up color aspects on input, but propagate them to the output format, as they will 3739 // not be read back from encoder. 3740 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3741 if (err == ERROR_UNSUPPORTED) { 3742 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3743 err = OK; 3744 } 3745 3746 if (err != OK) { 3747 return err; 3748 } 3749 3750 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3751 if (err == ERROR_UNSUPPORTED) { // support is optional 3752 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3753 err = OK; 3754 } 3755 3756 if (err == OK) { 3757 ALOGI("setupVideoEncoder succeeded"); 3758 } 3759 3760 return err; 3761 } 3762 3763 status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3764 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3765 InitOMXParams(¶ms); 3766 params.nPortIndex = kPortIndexOutput; 3767 3768 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3769 3770 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3771 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3772 int32_t mbs; 3773 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3774 return INVALID_OPERATION; 3775 } 3776 params.nCirMBs = mbs; 3777 } 3778 3779 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3780 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3781 int32_t mbs; 3782 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3783 return INVALID_OPERATION; 3784 } 3785 params.nAirMBs = mbs; 3786 3787 int32_t ref; 3788 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3789 return INVALID_OPERATION; 3790 } 3791 params.nAirRef = ref; 3792 } 3793 3794 status_t err = mOMX->setParameter( 3795 mNode, OMX_IndexParamVideoIntraRefresh, 3796 ¶ms, sizeof(params)); 3797 return err; 3798 } 3799 3800 static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3801 if (iFramesInterval < 0) { 3802 return 0xFFFFFFFF; 3803 } else if (iFramesInterval == 0) { 3804 return 0; 3805 } 3806 OMX_U32 ret = frameRate * iFramesInterval; 3807 return ret; 3808 } 3809 3810 static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3811 int32_t tmp; 3812 if (!msg->findInt32("bitrate-mode", &tmp)) { 3813 return OMX_Video_ControlRateVariable; 3814 } 3815 3816 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3817 } 3818 3819 status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3820 int32_t bitrate, iFrameInterval; 3821 if (!msg->findInt32("bitrate", &bitrate) 3822 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3823 return INVALID_OPERATION; 3824 } 3825 3826 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3827 3828 float frameRate; 3829 if (!msg->findFloat("frame-rate", &frameRate)) { 3830 int32_t tmp; 3831 if (!msg->findInt32("frame-rate", &tmp)) { 3832 return INVALID_OPERATION; 3833 } 3834 frameRate = (float)tmp; 3835 } 3836 3837 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3838 InitOMXParams(&mpeg4type); 3839 mpeg4type.nPortIndex = kPortIndexOutput; 3840 3841 status_t err = mOMX->getParameter( 3842 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3843 3844 if (err != OK) { 3845 return err; 3846 } 3847 3848 mpeg4type.nSliceHeaderSpacing = 0; 3849 mpeg4type.bSVH = OMX_FALSE; 3850 mpeg4type.bGov = OMX_FALSE; 3851 3852 mpeg4type.nAllowedPictureTypes = 3853 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3854 3855 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3856 if (mpeg4type.nPFrames == 0) { 3857 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3858 } 3859 mpeg4type.nBFrames = 0; 3860 mpeg4type.nIDCVLCThreshold = 0; 3861 mpeg4type.bACPred = OMX_TRUE; 3862 mpeg4type.nMaxPacketSize = 256; 3863 mpeg4type.nTimeIncRes = 1000; 3864 mpeg4type.nHeaderExtension = 0; 3865 mpeg4type.bReversibleVLC = OMX_FALSE; 3866 3867 int32_t profile; 3868 if (msg->findInt32("profile", &profile)) { 3869 int32_t level; 3870 if (!msg->findInt32("level", &level)) { 3871 return INVALID_OPERATION; 3872 } 3873 3874 err = verifySupportForProfileAndLevel(profile, level); 3875 3876 if (err != OK) { 3877 return err; 3878 } 3879 3880 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3881 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3882 } 3883 3884 err = mOMX->setParameter( 3885 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3886 3887 if (err != OK) { 3888 return err; 3889 } 3890 3891 err = configureBitrate(bitrate, bitrateMode); 3892 3893 if (err != OK) { 3894 return err; 3895 } 3896 3897 return setupErrorCorrectionParameters(); 3898 } 3899 3900 status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3901 int32_t bitrate, iFrameInterval; 3902 if (!msg->findInt32("bitrate", &bitrate) 3903 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3904 return INVALID_OPERATION; 3905 } 3906 3907 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3908 3909 float frameRate; 3910 if (!msg->findFloat("frame-rate", &frameRate)) { 3911 int32_t tmp; 3912 if (!msg->findInt32("frame-rate", &tmp)) { 3913 return INVALID_OPERATION; 3914 } 3915 frameRate = (float)tmp; 3916 } 3917 3918 OMX_VIDEO_PARAM_H263TYPE h263type; 3919 InitOMXParams(&h263type); 3920 h263type.nPortIndex = kPortIndexOutput; 3921 3922 status_t err = mOMX->getParameter( 3923 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3924 3925 if (err != OK) { 3926 return err; 3927 } 3928 3929 h263type.nAllowedPictureTypes = 3930 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3931 3932 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3933 if (h263type.nPFrames == 0) { 3934 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3935 } 3936 h263type.nBFrames = 0; 3937 3938 int32_t profile; 3939 if (msg->findInt32("profile", &profile)) { 3940 int32_t level; 3941 if (!msg->findInt32("level", &level)) { 3942 return INVALID_OPERATION; 3943 } 3944 3945 err = verifySupportForProfileAndLevel(profile, level); 3946 3947 if (err != OK) { 3948 return err; 3949 } 3950 3951 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3952 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3953 } 3954 3955 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3956 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3957 h263type.nPictureHeaderRepetition = 0; 3958 h263type.nGOBHeaderInterval = 0; 3959 3960 err = mOMX->setParameter( 3961 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3962 3963 if (err != OK) { 3964 return err; 3965 } 3966 3967 err = configureBitrate(bitrate, bitrateMode); 3968 3969 if (err != OK) { 3970 return err; 3971 } 3972 3973 return setupErrorCorrectionParameters(); 3974 } 3975 3976 // static 3977 int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3978 int width, int height, int rate, int bitrate, 3979 OMX_VIDEO_AVCPROFILETYPE profile) { 3980 // convert bitrate to main/baseline profile kbps equivalent 3981 switch (profile) { 3982 case OMX_VIDEO_AVCProfileHigh10: 3983 bitrate = divUp(bitrate, 3000); break; 3984 case OMX_VIDEO_AVCProfileHigh: 3985 bitrate = divUp(bitrate, 1250); break; 3986 default: 3987 bitrate = divUp(bitrate, 1000); break; 3988 } 3989 3990 // convert size and rate to MBs 3991 width = divUp(width, 16); 3992 height = divUp(height, 16); 3993 int mbs = width * height; 3994 rate *= mbs; 3995 int maxDimension = max(width, height); 3996 3997 static const int limits[][5] = { 3998 /* MBps MB dim bitrate level */ 3999 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4000 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4001 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4002 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4003 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4004 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4005 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4006 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4007 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4008 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4009 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4010 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4011 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4012 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4013 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4014 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4015 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4016 }; 4017 4018 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4019 const int (&limit)[5] = limits[i]; 4020 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4021 && bitrate <= limit[3]) { 4022 return limit[4]; 4023 } 4024 } 4025 return 0; 4026 } 4027 4028 status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4029 int32_t bitrate, iFrameInterval; 4030 if (!msg->findInt32("bitrate", &bitrate) 4031 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4032 return INVALID_OPERATION; 4033 } 4034 4035 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4036 4037 float frameRate; 4038 if (!msg->findFloat("frame-rate", &frameRate)) { 4039 int32_t tmp; 4040 if (!msg->findInt32("frame-rate", &tmp)) { 4041 return INVALID_OPERATION; 4042 } 4043 frameRate = (float)tmp; 4044 } 4045 4046 status_t err = OK; 4047 int32_t intraRefreshMode = 0; 4048 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4049 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4050 if (err != OK) { 4051 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4052 err, intraRefreshMode); 4053 return err; 4054 } 4055 } 4056 4057 OMX_VIDEO_PARAM_AVCTYPE h264type; 4058 InitOMXParams(&h264type); 4059 h264type.nPortIndex = kPortIndexOutput; 4060 4061 err = mOMX->getParameter( 4062 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4063 4064 if (err != OK) { 4065 return err; 4066 } 4067 4068 h264type.nAllowedPictureTypes = 4069 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4070 4071 int32_t profile; 4072 if (msg->findInt32("profile", &profile)) { 4073 int32_t level; 4074 if (!msg->findInt32("level", &level)) { 4075 return INVALID_OPERATION; 4076 } 4077 4078 err = verifySupportForProfileAndLevel(profile, level); 4079 4080 if (err != OK) { 4081 return err; 4082 } 4083 4084 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4085 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4086 } else { 4087 // Use baseline profile for AVC recording if profile is not specified. 4088 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4089 } 4090 4091 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4092 asString(h264type.eProfile), asString(h264type.eLevel)); 4093 4094 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4095 h264type.nSliceHeaderSpacing = 0; 4096 h264type.bUseHadamard = OMX_TRUE; 4097 h264type.nRefFrames = 1; 4098 h264type.nBFrames = 0; 4099 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4100 if (h264type.nPFrames == 0) { 4101 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4102 } 4103 h264type.nRefIdx10ActiveMinus1 = 0; 4104 h264type.nRefIdx11ActiveMinus1 = 0; 4105 h264type.bEntropyCodingCABAC = OMX_FALSE; 4106 h264type.bWeightedPPrediction = OMX_FALSE; 4107 h264type.bconstIpred = OMX_FALSE; 4108 h264type.bDirect8x8Inference = OMX_FALSE; 4109 h264type.bDirectSpatialTemporal = OMX_FALSE; 4110 h264type.nCabacInitIdc = 0; 4111 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4112 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4113 h264type.nSliceHeaderSpacing = 0; 4114 h264type.bUseHadamard = OMX_TRUE; 4115 h264type.nRefFrames = 2; 4116 h264type.nBFrames = 1; 4117 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4118 h264type.nAllowedPictureTypes = 4119 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4120 h264type.nRefIdx10ActiveMinus1 = 0; 4121 h264type.nRefIdx11ActiveMinus1 = 0; 4122 h264type.bEntropyCodingCABAC = OMX_TRUE; 4123 h264type.bWeightedPPrediction = OMX_TRUE; 4124 h264type.bconstIpred = OMX_TRUE; 4125 h264type.bDirect8x8Inference = OMX_TRUE; 4126 h264type.bDirectSpatialTemporal = OMX_TRUE; 4127 h264type.nCabacInitIdc = 1; 4128 } 4129 4130 if (h264type.nBFrames != 0) { 4131 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4132 } 4133 4134 h264type.bEnableUEP = OMX_FALSE; 4135 h264type.bEnableFMO = OMX_FALSE; 4136 h264type.bEnableASO = OMX_FALSE; 4137 h264type.bEnableRS = OMX_FALSE; 4138 h264type.bFrameMBsOnly = OMX_TRUE; 4139 h264type.bMBAFF = OMX_FALSE; 4140 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4141 4142 err = mOMX->setParameter( 4143 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4144 4145 if (err != OK) { 4146 return err; 4147 } 4148 4149 return configureBitrate(bitrate, bitrateMode); 4150 } 4151 4152 status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4153 int32_t bitrate, iFrameInterval; 4154 if (!msg->findInt32("bitrate", &bitrate) 4155 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4156 return INVALID_OPERATION; 4157 } 4158 4159 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4160 4161 float frameRate; 4162 if (!msg->findFloat("frame-rate", &frameRate)) { 4163 int32_t tmp; 4164 if (!msg->findInt32("frame-rate", &tmp)) { 4165 return INVALID_OPERATION; 4166 } 4167 frameRate = (float)tmp; 4168 } 4169 4170 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4171 InitOMXParams(&hevcType); 4172 hevcType.nPortIndex = kPortIndexOutput; 4173 4174 status_t err = OK; 4175 err = mOMX->getParameter( 4176 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4177 if (err != OK) { 4178 return err; 4179 } 4180 4181 int32_t profile; 4182 if (msg->findInt32("profile", &profile)) { 4183 int32_t level; 4184 if (!msg->findInt32("level", &level)) { 4185 return INVALID_OPERATION; 4186 } 4187 4188 err = verifySupportForProfileAndLevel(profile, level); 4189 if (err != OK) { 4190 return err; 4191 } 4192 4193 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4194 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4195 } 4196 // TODO: finer control? 4197 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4198 4199 err = mOMX->setParameter( 4200 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4201 if (err != OK) { 4202 return err; 4203 } 4204 4205 return configureBitrate(bitrate, bitrateMode); 4206 } 4207 4208 status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4209 int32_t bitrate; 4210 int32_t iFrameInterval = 0; 4211 size_t tsLayers = 0; 4212 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4213 OMX_VIDEO_VPXTemporalLayerPatternNone; 4214 static const uint32_t kVp8LayerRateAlloction 4215 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4216 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4217 {100, 100, 100}, // 1 layer 4218 { 60, 100, 100}, // 2 layers {60%, 40%} 4219 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4220 }; 4221 if (!msg->findInt32("bitrate", &bitrate)) { 4222 return INVALID_OPERATION; 4223 } 4224 msg->findInt32("i-frame-interval", &iFrameInterval); 4225 4226 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4227 4228 float frameRate; 4229 if (!msg->findFloat("frame-rate", &frameRate)) { 4230 int32_t tmp; 4231 if (!msg->findInt32("frame-rate", &tmp)) { 4232 return INVALID_OPERATION; 4233 } 4234 frameRate = (float)tmp; 4235 } 4236 4237 AString tsSchema; 4238 if (msg->findString("ts-schema", &tsSchema)) { 4239 if (tsSchema == "webrtc.vp8.1-layer") { 4240 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4241 tsLayers = 1; 4242 } else if (tsSchema == "webrtc.vp8.2-layer") { 4243 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4244 tsLayers = 2; 4245 } else if (tsSchema == "webrtc.vp8.3-layer") { 4246 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4247 tsLayers = 3; 4248 } else { 4249 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4250 } 4251 } 4252 4253 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4254 InitOMXParams(&vp8type); 4255 vp8type.nPortIndex = kPortIndexOutput; 4256 status_t err = mOMX->getParameter( 4257 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4258 &vp8type, sizeof(vp8type)); 4259 4260 if (err == OK) { 4261 if (iFrameInterval > 0) { 4262 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4263 } 4264 vp8type.eTemporalPattern = pattern; 4265 vp8type.nTemporalLayerCount = tsLayers; 4266 if (tsLayers > 0) { 4267 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4268 vp8type.nTemporalLayerBitrateRatio[i] = 4269 kVp8LayerRateAlloction[tsLayers - 1][i]; 4270 } 4271 } 4272 if (bitrateMode == OMX_Video_ControlRateConstant) { 4273 vp8type.nMinQuantizer = 2; 4274 vp8type.nMaxQuantizer = 63; 4275 } 4276 4277 err = mOMX->setParameter( 4278 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4279 &vp8type, sizeof(vp8type)); 4280 if (err != OK) { 4281 ALOGW("Extended VP8 parameters set failed: %d", err); 4282 } 4283 } 4284 4285 return configureBitrate(bitrate, bitrateMode); 4286 } 4287 4288 status_t ACodec::verifySupportForProfileAndLevel( 4289 int32_t profile, int32_t level) { 4290 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4291 InitOMXParams(¶ms); 4292 params.nPortIndex = kPortIndexOutput; 4293 4294 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4295 params.nProfileIndex = index; 4296 status_t err = mOMX->getParameter( 4297 mNode, 4298 OMX_IndexParamVideoProfileLevelQuerySupported, 4299 ¶ms, 4300 sizeof(params)); 4301 4302 if (err != OK) { 4303 return err; 4304 } 4305 4306 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4307 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4308 4309 if (profile == supportedProfile && level <= supportedLevel) { 4310 return OK; 4311 } 4312 4313 if (index == kMaxIndicesToCheck) { 4314 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4315 mComponentName.c_str(), index, 4316 params.eProfile, params.eLevel); 4317 } 4318 } 4319 return ERROR_UNSUPPORTED; 4320 } 4321 4322 status_t ACodec::configureBitrate( 4323 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4324 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4325 InitOMXParams(&bitrateType); 4326 bitrateType.nPortIndex = kPortIndexOutput; 4327 4328 status_t err = mOMX->getParameter( 4329 mNode, OMX_IndexParamVideoBitrate, 4330 &bitrateType, sizeof(bitrateType)); 4331 4332 if (err != OK) { 4333 return err; 4334 } 4335 4336 bitrateType.eControlRate = bitrateMode; 4337 bitrateType.nTargetBitrate = bitrate; 4338 4339 return mOMX->setParameter( 4340 mNode, OMX_IndexParamVideoBitrate, 4341 &bitrateType, sizeof(bitrateType)); 4342 } 4343 4344 status_t ACodec::setupErrorCorrectionParameters() { 4345 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4346 InitOMXParams(&errorCorrectionType); 4347 errorCorrectionType.nPortIndex = kPortIndexOutput; 4348 4349 status_t err = mOMX->getParameter( 4350 mNode, OMX_IndexParamVideoErrorCorrection, 4351 &errorCorrectionType, sizeof(errorCorrectionType)); 4352 4353 if (err != OK) { 4354 return OK; // Optional feature. Ignore this failure 4355 } 4356 4357 errorCorrectionType.bEnableHEC = OMX_FALSE; 4358 errorCorrectionType.bEnableResync = OMX_TRUE; 4359 errorCorrectionType.nResynchMarkerSpacing = 256; 4360 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4361 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4362 4363 return mOMX->setParameter( 4364 mNode, OMX_IndexParamVideoErrorCorrection, 4365 &errorCorrectionType, sizeof(errorCorrectionType)); 4366 } 4367 4368 status_t ACodec::setVideoFormatOnPort( 4369 OMX_U32 portIndex, 4370 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4371 float frameRate) { 4372 OMX_PARAM_PORTDEFINITIONTYPE def; 4373 InitOMXParams(&def); 4374 def.nPortIndex = portIndex; 4375 4376 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4377 4378 status_t err = mOMX->getParameter( 4379 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4380 if (err != OK) { 4381 return err; 4382 } 4383 4384 if (portIndex == kPortIndexInput) { 4385 // XXX Need a (much) better heuristic to compute input buffer sizes. 4386 const size_t X = 64 * 1024; 4387 if (def.nBufferSize < X) { 4388 def.nBufferSize = X; 4389 } 4390 } 4391 4392 if (def.eDomain != OMX_PortDomainVideo) { 4393 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4394 return FAILED_TRANSACTION; 4395 } 4396 4397 video_def->nFrameWidth = width; 4398 video_def->nFrameHeight = height; 4399 4400 if (portIndex == kPortIndexInput) { 4401 video_def->eCompressionFormat = compressionFormat; 4402 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4403 if (frameRate >= 0) { 4404 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4405 } 4406 } 4407 4408 err = mOMX->setParameter( 4409 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4410 4411 return err; 4412 } 4413 4414 status_t ACodec::initNativeWindow() { 4415 if (mNativeWindow != NULL) { 4416 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4417 } 4418 4419 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4420 return OK; 4421 } 4422 4423 size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4424 size_t n = 0; 4425 4426 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4427 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4428 4429 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4430 ++n; 4431 } 4432 } 4433 4434 return n; 4435 } 4436 4437 size_t ACodec::countBuffersOwnedByNativeWindow() const { 4438 size_t n = 0; 4439 4440 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4441 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4442 4443 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4444 ++n; 4445 } 4446 } 4447 4448 return n; 4449 } 4450 4451 void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4452 if (mNativeWindow == NULL) { 4453 return; 4454 } 4455 4456 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4457 && dequeueBufferFromNativeWindow() != NULL) { 4458 // these buffers will be submitted as regular buffers; account for this 4459 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4460 --mMetadataBuffersToSubmit; 4461 } 4462 } 4463 } 4464 4465 bool ACodec::allYourBuffersAreBelongToUs( 4466 OMX_U32 portIndex) { 4467 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4468 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4469 4470 if (info->mStatus != BufferInfo::OWNED_BY_US 4471 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4472 ALOGV("[%s] Buffer %u on port %u still has status %d", 4473 mComponentName.c_str(), 4474 info->mBufferID, portIndex, info->mStatus); 4475 return false; 4476 } 4477 } 4478 4479 return true; 4480 } 4481 4482 bool ACodec::allYourBuffersAreBelongToUs() { 4483 return allYourBuffersAreBelongToUs(kPortIndexInput) 4484 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4485 } 4486 4487 void ACodec::deferMessage(const sp<AMessage> &msg) { 4488 mDeferredQueue.push_back(msg); 4489 } 4490 4491 void ACodec::processDeferredMessages() { 4492 List<sp<AMessage> > queue = mDeferredQueue; 4493 mDeferredQueue.clear(); 4494 4495 List<sp<AMessage> >::iterator it = queue.begin(); 4496 while (it != queue.end()) { 4497 onMessageReceived(*it++); 4498 } 4499 } 4500 4501 // static 4502 bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4503 MediaImage2 &image = params.sMediaImage; 4504 memset(&image, 0, sizeof(image)); 4505 4506 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4507 image.mNumPlanes = 0; 4508 4509 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4510 image.mWidth = params.nFrameWidth; 4511 image.mHeight = params.nFrameHeight; 4512 4513 // only supporting YUV420 4514 if (fmt != OMX_COLOR_FormatYUV420Planar && 4515 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4516 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4517 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4518 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4519 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4520 return false; 4521 } 4522 4523 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4524 if (params.nStride != 0 && params.nSliceHeight == 0) { 4525 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4526 params.nFrameHeight); 4527 params.nSliceHeight = params.nFrameHeight; 4528 } 4529 4530 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4531 // prevent integer overflows further down the line, and do not indicate support for 4532 // 32kx32k video. 4533 if (params.nStride == 0 || params.nSliceHeight == 0 4534 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4535 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4536 fmt, fmt, params.nStride, params.nSliceHeight); 4537 return false; 4538 } 4539 4540 // set-up YUV format 4541 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4542 image.mNumPlanes = 3; 4543 image.mBitDepth = 8; 4544 image.mBitDepthAllocated = 8; 4545 image.mPlane[image.Y].mOffset = 0; 4546 image.mPlane[image.Y].mColInc = 1; 4547 image.mPlane[image.Y].mRowInc = params.nStride; 4548 image.mPlane[image.Y].mHorizSubsampling = 1; 4549 image.mPlane[image.Y].mVertSubsampling = 1; 4550 4551 switch ((int)fmt) { 4552 case HAL_PIXEL_FORMAT_YV12: 4553 if (params.bUsingNativeBuffers) { 4554 size_t ystride = align(params.nStride, 16); 4555 size_t cstride = align(params.nStride / 2, 16); 4556 image.mPlane[image.Y].mRowInc = ystride; 4557 4558 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4559 image.mPlane[image.V].mColInc = 1; 4560 image.mPlane[image.V].mRowInc = cstride; 4561 image.mPlane[image.V].mHorizSubsampling = 2; 4562 image.mPlane[image.V].mVertSubsampling = 2; 4563 4564 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4565 + (cstride * params.nSliceHeight / 2); 4566 image.mPlane[image.U].mColInc = 1; 4567 image.mPlane[image.U].mRowInc = cstride; 4568 image.mPlane[image.U].mHorizSubsampling = 2; 4569 image.mPlane[image.U].mVertSubsampling = 2; 4570 break; 4571 } else { 4572 // fall through as YV12 is used for YUV420Planar by some codecs 4573 } 4574 4575 case OMX_COLOR_FormatYUV420Planar: 4576 case OMX_COLOR_FormatYUV420PackedPlanar: 4577 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4578 image.mPlane[image.U].mColInc = 1; 4579 image.mPlane[image.U].mRowInc = params.nStride / 2; 4580 image.mPlane[image.U].mHorizSubsampling = 2; 4581 image.mPlane[image.U].mVertSubsampling = 2; 4582 4583 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4584 + (params.nStride * params.nSliceHeight / 4); 4585 image.mPlane[image.V].mColInc = 1; 4586 image.mPlane[image.V].mRowInc = params.nStride / 2; 4587 image.mPlane[image.V].mHorizSubsampling = 2; 4588 image.mPlane[image.V].mVertSubsampling = 2; 4589 break; 4590 4591 case OMX_COLOR_FormatYUV420SemiPlanar: 4592 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4593 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4594 // NV12 4595 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4596 image.mPlane[image.U].mColInc = 2; 4597 image.mPlane[image.U].mRowInc = params.nStride; 4598 image.mPlane[image.U].mHorizSubsampling = 2; 4599 image.mPlane[image.U].mVertSubsampling = 2; 4600 4601 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4602 image.mPlane[image.V].mColInc = 2; 4603 image.mPlane[image.V].mRowInc = params.nStride; 4604 image.mPlane[image.V].mHorizSubsampling = 2; 4605 image.mPlane[image.V].mVertSubsampling = 2; 4606 break; 4607 4608 default: 4609 TRESPASS(); 4610 } 4611 return true; 4612 } 4613 4614 // static 4615 bool ACodec::describeColorFormat( 4616 const sp<IOMX> &omx, IOMX::node_id node, 4617 DescribeColorFormat2Params &describeParams) 4618 { 4619 OMX_INDEXTYPE describeColorFormatIndex; 4620 if (omx->getExtensionIndex( 4621 node, "OMX.google.android.index.describeColorFormat", 4622 &describeColorFormatIndex) == OK) { 4623 DescribeColorFormatParams describeParamsV1(describeParams); 4624 if (omx->getParameter( 4625 node, describeColorFormatIndex, 4626 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4627 describeParams.initFromV1(describeParamsV1); 4628 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4629 } 4630 } else if (omx->getExtensionIndex( 4631 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4632 && omx->getParameter( 4633 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4634 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4635 } 4636 4637 return describeDefaultColorFormat(describeParams); 4638 } 4639 4640 // static 4641 bool ACodec::isFlexibleColorFormat( 4642 const sp<IOMX> &omx, IOMX::node_id node, 4643 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4644 DescribeColorFormat2Params describeParams; 4645 InitOMXParams(&describeParams); 4646 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4647 // reasonable dummy values 4648 describeParams.nFrameWidth = 128; 4649 describeParams.nFrameHeight = 128; 4650 describeParams.nStride = 128; 4651 describeParams.nSliceHeight = 128; 4652 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4653 4654 CHECK(flexibleEquivalent != NULL); 4655 4656 if (!describeColorFormat(omx, node, describeParams)) { 4657 return false; 4658 } 4659 4660 const MediaImage2 &img = describeParams.sMediaImage; 4661 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4662 if (img.mNumPlanes != 3 4663 || img.mPlane[img.Y].mHorizSubsampling != 1 4664 || img.mPlane[img.Y].mVertSubsampling != 1) { 4665 return false; 4666 } 4667 4668 // YUV 420 4669 if (img.mPlane[img.U].mHorizSubsampling == 2 4670 && img.mPlane[img.U].mVertSubsampling == 2 4671 && img.mPlane[img.V].mHorizSubsampling == 2 4672 && img.mPlane[img.V].mVertSubsampling == 2) { 4673 // possible flexible YUV420 format 4674 if (img.mBitDepth <= 8) { 4675 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4676 return true; 4677 } 4678 } 4679 } 4680 return false; 4681 } 4682 4683 status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4684 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4685 OMX_PARAM_PORTDEFINITIONTYPE def; 4686 InitOMXParams(&def); 4687 def.nPortIndex = portIndex; 4688 4689 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4690 if (err != OK) { 4691 return err; 4692 } 4693 4694 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4695 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4696 return BAD_VALUE; 4697 } 4698 4699 switch (def.eDomain) { 4700 case OMX_PortDomainVideo: 4701 { 4702 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4703 switch ((int)videoDef->eCompressionFormat) { 4704 case OMX_VIDEO_CodingUnused: 4705 { 4706 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4707 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4708 4709 notify->setInt32("stride", videoDef->nStride); 4710 notify->setInt32("slice-height", videoDef->nSliceHeight); 4711 notify->setInt32("color-format", videoDef->eColorFormat); 4712 4713 if (mNativeWindow == NULL) { 4714 DescribeColorFormat2Params describeParams; 4715 InitOMXParams(&describeParams); 4716 describeParams.eColorFormat = videoDef->eColorFormat; 4717 describeParams.nFrameWidth = videoDef->nFrameWidth; 4718 describeParams.nFrameHeight = videoDef->nFrameHeight; 4719 describeParams.nStride = videoDef->nStride; 4720 describeParams.nSliceHeight = videoDef->nSliceHeight; 4721 describeParams.bUsingNativeBuffers = OMX_FALSE; 4722 4723 if (describeColorFormat(mOMX, mNode, describeParams)) { 4724 notify->setBuffer( 4725 "image-data", 4726 ABuffer::CreateAsCopy( 4727 &describeParams.sMediaImage, 4728 sizeof(describeParams.sMediaImage))); 4729 4730 MediaImage2 &img = describeParams.sMediaImage; 4731 MediaImage2::PlaneInfo *plane = img.mPlane; 4732 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4733 mComponentName.c_str(), img.mWidth, img.mHeight, 4734 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4735 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4736 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4737 } 4738 } 4739 4740 int32_t width = (int32_t)videoDef->nFrameWidth; 4741 int32_t height = (int32_t)videoDef->nFrameHeight; 4742 4743 if (portIndex == kPortIndexOutput) { 4744 OMX_CONFIG_RECTTYPE rect; 4745 InitOMXParams(&rect); 4746 rect.nPortIndex = portIndex; 4747 4748 if (mOMX->getConfig( 4749 mNode, 4750 (portIndex == kPortIndexOutput ? 4751 OMX_IndexConfigCommonOutputCrop : 4752 OMX_IndexConfigCommonInputCrop), 4753 &rect, sizeof(rect)) != OK) { 4754 rect.nLeft = 0; 4755 rect.nTop = 0; 4756 rect.nWidth = videoDef->nFrameWidth; 4757 rect.nHeight = videoDef->nFrameHeight; 4758 } 4759 4760 if (rect.nLeft < 0 || 4761 rect.nTop < 0 || 4762 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4763 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4764 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4765 rect.nLeft, rect.nTop, 4766 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4767 videoDef->nFrameWidth, videoDef->nFrameHeight); 4768 return BAD_VALUE; 4769 } 4770 4771 notify->setRect( 4772 "crop", 4773 rect.nLeft, 4774 rect.nTop, 4775 rect.nLeft + rect.nWidth - 1, 4776 rect.nTop + rect.nHeight - 1); 4777 4778 width = rect.nWidth; 4779 height = rect.nHeight; 4780 4781 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4782 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4783 width, height, mConfigFormat, notify, 4784 mUsingNativeWindow ? &dataSpace : NULL); 4785 if (mUsingNativeWindow) { 4786 notify->setInt32("android._dataspace", dataSpace); 4787 } 4788 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4789 } else { 4790 (void)getInputColorAspectsForVideoEncoder(notify); 4791 if (mConfigFormat->contains("hdr-static-info")) { 4792 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4793 } 4794 } 4795 4796 break; 4797 } 4798 4799 case OMX_VIDEO_CodingVP8: 4800 case OMX_VIDEO_CodingVP9: 4801 { 4802 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4803 InitOMXParams(&vp8type); 4804 vp8type.nPortIndex = kPortIndexOutput; 4805 status_t err = mOMX->getParameter( 4806 mNode, 4807 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4808 &vp8type, 4809 sizeof(vp8type)); 4810 4811 if (err == OK) { 4812 AString tsSchema = "none"; 4813 if (vp8type.eTemporalPattern 4814 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4815 switch (vp8type.nTemporalLayerCount) { 4816 case 1: 4817 { 4818 tsSchema = "webrtc.vp8.1-layer"; 4819 break; 4820 } 4821 case 2: 4822 { 4823 tsSchema = "webrtc.vp8.2-layer"; 4824 break; 4825 } 4826 case 3: 4827 { 4828 tsSchema = "webrtc.vp8.3-layer"; 4829 break; 4830 } 4831 default: 4832 { 4833 break; 4834 } 4835 } 4836 } 4837 notify->setString("ts-schema", tsSchema); 4838 } 4839 // Fall through to set up mime. 4840 } 4841 4842 default: 4843 { 4844 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4845 // should be CodingUnused 4846 ALOGE("Raw port video compression format is %s(%d)", 4847 asString(videoDef->eCompressionFormat), 4848 videoDef->eCompressionFormat); 4849 return BAD_VALUE; 4850 } 4851 AString mime; 4852 if (GetMimeTypeForVideoCoding( 4853 videoDef->eCompressionFormat, &mime) != OK) { 4854 notify->setString("mime", "application/octet-stream"); 4855 } else { 4856 notify->setString("mime", mime.c_str()); 4857 } 4858 uint32_t intraRefreshPeriod = 0; 4859 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4860 && intraRefreshPeriod > 0) { 4861 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4862 } 4863 break; 4864 } 4865 } 4866 notify->setInt32("width", videoDef->nFrameWidth); 4867 notify->setInt32("height", videoDef->nFrameHeight); 4868 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4869 portIndex == kPortIndexInput ? "input" : "output", 4870 notify->debugString().c_str()); 4871 4872 break; 4873 } 4874 4875 case OMX_PortDomainAudio: 4876 { 4877 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4878 4879 switch ((int)audioDef->eEncoding) { 4880 case OMX_AUDIO_CodingPCM: 4881 { 4882 OMX_AUDIO_PARAM_PCMMODETYPE params; 4883 InitOMXParams(¶ms); 4884 params.nPortIndex = portIndex; 4885 4886 err = mOMX->getParameter( 4887 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4888 if (err != OK) { 4889 return err; 4890 } 4891 4892 if (params.nChannels <= 0 4893 || (params.nChannels != 1 && !params.bInterleaved) 4894 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4895 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4896 params.nChannels, 4897 params.bInterleaved ? " interleaved" : "", 4898 params.nBitPerSample); 4899 return FAILED_TRANSACTION; 4900 } 4901 4902 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4903 notify->setInt32("channel-count", params.nChannels); 4904 notify->setInt32("sample-rate", params.nSamplingRate); 4905 4906 AudioEncoding encoding = kAudioEncodingPcm16bit; 4907 if (params.eNumData == OMX_NumericalDataUnsigned 4908 && params.nBitPerSample == 8u) { 4909 encoding = kAudioEncodingPcm8bit; 4910 } else if (params.eNumData == OMX_NumericalDataFloat 4911 && params.nBitPerSample == 32u) { 4912 encoding = kAudioEncodingPcmFloat; 4913 } else if (params.nBitPerSample != 16u 4914 || params.eNumData != OMX_NumericalDataSigned) { 4915 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4916 asString(params.eNumData), params.eNumData, 4917 asString(params.ePCMMode), params.ePCMMode); 4918 return FAILED_TRANSACTION; 4919 } 4920 notify->setInt32("pcm-encoding", encoding); 4921 4922 if (mChannelMaskPresent) { 4923 notify->setInt32("channel-mask", mChannelMask); 4924 } 4925 break; 4926 } 4927 4928 case OMX_AUDIO_CodingAAC: 4929 { 4930 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4931 InitOMXParams(¶ms); 4932 params.nPortIndex = portIndex; 4933 4934 err = mOMX->getParameter( 4935 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4936 if (err != OK) { 4937 return err; 4938 } 4939 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4941 notify->setInt32("channel-count", params.nChannels); 4942 notify->setInt32("sample-rate", params.nSampleRate); 4943 break; 4944 } 4945 4946 case OMX_AUDIO_CodingAMR: 4947 { 4948 OMX_AUDIO_PARAM_AMRTYPE params; 4949 InitOMXParams(¶ms); 4950 params.nPortIndex = portIndex; 4951 4952 err = mOMX->getParameter( 4953 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4954 if (err != OK) { 4955 return err; 4956 } 4957 4958 notify->setInt32("channel-count", 1); 4959 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4960 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4961 notify->setInt32("sample-rate", 16000); 4962 } else { 4963 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4964 notify->setInt32("sample-rate", 8000); 4965 } 4966 break; 4967 } 4968 4969 case OMX_AUDIO_CodingFLAC: 4970 { 4971 OMX_AUDIO_PARAM_FLACTYPE params; 4972 InitOMXParams(¶ms); 4973 params.nPortIndex = portIndex; 4974 4975 err = mOMX->getParameter( 4976 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4977 if (err != OK) { 4978 return err; 4979 } 4980 4981 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4982 notify->setInt32("channel-count", params.nChannels); 4983 notify->setInt32("sample-rate", params.nSampleRate); 4984 break; 4985 } 4986 4987 case OMX_AUDIO_CodingMP3: 4988 { 4989 OMX_AUDIO_PARAM_MP3TYPE params; 4990 InitOMXParams(¶ms); 4991 params.nPortIndex = portIndex; 4992 4993 err = mOMX->getParameter( 4994 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4995 if (err != OK) { 4996 return err; 4997 } 4998 4999 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5000 notify->setInt32("channel-count", params.nChannels); 5001 notify->setInt32("sample-rate", params.nSampleRate); 5002 break; 5003 } 5004 5005 case OMX_AUDIO_CodingVORBIS: 5006 { 5007 OMX_AUDIO_PARAM_VORBISTYPE params; 5008 InitOMXParams(¶ms); 5009 params.nPortIndex = portIndex; 5010 5011 err = mOMX->getParameter( 5012 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5013 if (err != OK) { 5014 return err; 5015 } 5016 5017 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5018 notify->setInt32("channel-count", params.nChannels); 5019 notify->setInt32("sample-rate", params.nSampleRate); 5020 break; 5021 } 5022 5023 case OMX_AUDIO_CodingAndroidAC3: 5024 { 5025 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5026 InitOMXParams(¶ms); 5027 params.nPortIndex = portIndex; 5028 5029 err = mOMX->getParameter( 5030 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5031 ¶ms, sizeof(params)); 5032 if (err != OK) { 5033 return err; 5034 } 5035 5036 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5037 notify->setInt32("channel-count", params.nChannels); 5038 notify->setInt32("sample-rate", params.nSampleRate); 5039 break; 5040 } 5041 5042 case OMX_AUDIO_CodingAndroidEAC3: 5043 { 5044 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5045 InitOMXParams(¶ms); 5046 params.nPortIndex = portIndex; 5047 5048 err = mOMX->getParameter( 5049 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5050 ¶ms, sizeof(params)); 5051 if (err != OK) { 5052 return err; 5053 } 5054 5055 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5056 notify->setInt32("channel-count", params.nChannels); 5057 notify->setInt32("sample-rate", params.nSampleRate); 5058 break; 5059 } 5060 5061 case OMX_AUDIO_CodingAndroidOPUS: 5062 { 5063 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5064 InitOMXParams(¶ms); 5065 params.nPortIndex = portIndex; 5066 5067 err = mOMX->getParameter( 5068 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5069 ¶ms, sizeof(params)); 5070 if (err != OK) { 5071 return err; 5072 } 5073 5074 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5075 notify->setInt32("channel-count", params.nChannels); 5076 notify->setInt32("sample-rate", params.nSampleRate); 5077 break; 5078 } 5079 5080 case OMX_AUDIO_CodingG711: 5081 { 5082 OMX_AUDIO_PARAM_PCMMODETYPE params; 5083 InitOMXParams(¶ms); 5084 params.nPortIndex = portIndex; 5085 5086 err = mOMX->getParameter( 5087 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5088 if (err != OK) { 5089 return err; 5090 } 5091 5092 const char *mime = NULL; 5093 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5094 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5095 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5096 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5097 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5098 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5099 } 5100 notify->setString("mime", mime); 5101 notify->setInt32("channel-count", params.nChannels); 5102 notify->setInt32("sample-rate", params.nSamplingRate); 5103 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5104 break; 5105 } 5106 5107 case OMX_AUDIO_CodingGSMFR: 5108 { 5109 OMX_AUDIO_PARAM_PCMMODETYPE params; 5110 InitOMXParams(¶ms); 5111 params.nPortIndex = portIndex; 5112 5113 err = mOMX->getParameter( 5114 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5115 if (err != OK) { 5116 return err; 5117 } 5118 5119 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5120 notify->setInt32("channel-count", params.nChannels); 5121 notify->setInt32("sample-rate", params.nSamplingRate); 5122 break; 5123 } 5124 5125 default: 5126 ALOGE("Unsupported audio coding: %s(%d)\n", 5127 asString(audioDef->eEncoding), audioDef->eEncoding); 5128 return BAD_TYPE; 5129 } 5130 break; 5131 } 5132 5133 default: 5134 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5135 return BAD_TYPE; 5136 } 5137 5138 return OK; 5139 } 5140 5141 void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5142 // aspects are normally communicated in ColorAspects 5143 int32_t range, standard, transfer; 5144 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5145 5146 // if some aspects are unspecified, use dataspace fields 5147 if (range != 0) { 5148 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5149 } 5150 if (standard != 0) { 5151 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5152 } 5153 if (transfer != 0) { 5154 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5155 } 5156 5157 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5158 if (range != 0) { 5159 mOutputFormat->setInt32("color-range", range); 5160 } 5161 if (standard != 0) { 5162 mOutputFormat->setInt32("color-standard", standard); 5163 } 5164 if (transfer != 0) { 5165 mOutputFormat->setInt32("color-transfer", transfer); 5166 } 5167 5168 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5169 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5170 dataSpace, 5171 aspects.mRange, asString(aspects.mRange), 5172 aspects.mPrimaries, asString(aspects.mPrimaries), 5173 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5174 aspects.mTransfer, asString(aspects.mTransfer), 5175 range, asString((ColorRange)range), 5176 standard, asString((ColorStandard)standard), 5177 transfer, asString((ColorTransfer)transfer)); 5178 } 5179 5180 void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5181 // store new output format, at the same time mark that this is no longer the first frame 5182 mOutputFormat = mBaseOutputFormat->dup(); 5183 5184 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5185 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5186 return; 5187 } 5188 5189 if (expectedFormat != NULL) { 5190 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5191 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5192 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5193 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5194 mComponentName.c_str(), 5195 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5196 } 5197 } 5198 5199 if (!mIsVideo && !mIsEncoder) { 5200 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5201 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5202 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5203 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5204 5205 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5206 if (mConverter[kPortIndexOutput] != NULL) { 5207 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5208 } 5209 } 5210 5211 if (mTunneled) { 5212 sendFormatChange(); 5213 } 5214 } 5215 5216 void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5217 AString mime; 5218 CHECK(mOutputFormat->findString("mime", &mime)); 5219 5220 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5221 // notify renderer of the crop change and dataspace change 5222 // NOTE: native window uses extended right-bottom coordinate 5223 int32_t left, top, right, bottom; 5224 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5225 notify->setRect("crop", left, top, right + 1, bottom + 1); 5226 } 5227 5228 int32_t dataSpace; 5229 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5230 notify->setInt32("dataspace", dataSpace); 5231 } 5232 } 5233 } 5234 5235 void ACodec::sendFormatChange() { 5236 AString mime; 5237 CHECK(mOutputFormat->findString("mime", &mime)); 5238 5239 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5240 int32_t channelCount; 5241 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5242 if (mSkipCutBuffer != NULL) { 5243 size_t prevbufsize = mSkipCutBuffer->size(); 5244 if (prevbufsize != 0) { 5245 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5246 } 5247 } 5248 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5249 } 5250 5251 sp<AMessage> notify = mNotify->dup(); 5252 notify->setInt32("what", kWhatOutputFormatChanged); 5253 notify->setMessage("format", mOutputFormat); 5254 notify->post(); 5255 5256 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5257 mLastOutputFormat = mOutputFormat; 5258 } 5259 5260 void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5261 sp<AMessage> notify = mNotify->dup(); 5262 notify->setInt32("what", CodecBase::kWhatError); 5263 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5264 5265 if (internalError == UNKNOWN_ERROR) { // find better error code 5266 const status_t omxStatus = statusFromOMXError(error); 5267 if (omxStatus != 0) { 5268 internalError = omxStatus; 5269 } else { 5270 ALOGW("Invalid OMX error %#x", error); 5271 } 5272 } 5273 5274 mFatalError = true; 5275 5276 notify->setInt32("err", internalError); 5277 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5278 notify->post(); 5279 } 5280 5281 //////////////////////////////////////////////////////////////////////////////// 5282 5283 ACodec::PortDescription::PortDescription() { 5284 } 5285 5286 status_t ACodec::requestIDRFrame() { 5287 if (!mIsEncoder) { 5288 return ERROR_UNSUPPORTED; 5289 } 5290 5291 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5292 InitOMXParams(¶ms); 5293 5294 params.nPortIndex = kPortIndexOutput; 5295 params.IntraRefreshVOP = OMX_TRUE; 5296 5297 return mOMX->setConfig( 5298 mNode, 5299 OMX_IndexConfigVideoIntraVOPRefresh, 5300 ¶ms, 5301 sizeof(params)); 5302 } 5303 5304 void ACodec::PortDescription::addBuffer( 5305 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5306 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5307 mBufferIDs.push_back(id); 5308 mBuffers.push_back(buffer); 5309 mHandles.push_back(handle); 5310 mMemRefs.push_back(memRef); 5311 } 5312 5313 size_t ACodec::PortDescription::countBuffers() { 5314 return mBufferIDs.size(); 5315 } 5316 5317 IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5318 return mBufferIDs.itemAt(index); 5319 } 5320 5321 sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5322 return mBuffers.itemAt(index); 5323 } 5324 5325 sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5326 return mHandles.itemAt(index); 5327 } 5328 5329 sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5330 return mMemRefs.itemAt(index); 5331 } 5332 5333 //////////////////////////////////////////////////////////////////////////////// 5334 5335 ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5336 : AState(parentState), 5337 mCodec(codec) { 5338 } 5339 5340 ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5341 OMX_U32 /* portIndex */) { 5342 return KEEP_BUFFERS; 5343 } 5344 5345 bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5346 switch (msg->what()) { 5347 case kWhatInputBufferFilled: 5348 { 5349 onInputBufferFilled(msg); 5350 break; 5351 } 5352 5353 case kWhatOutputBufferDrained: 5354 { 5355 onOutputBufferDrained(msg); 5356 break; 5357 } 5358 5359 case ACodec::kWhatOMXMessageList: 5360 { 5361 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5362 } 5363 5364 case ACodec::kWhatOMXMessageItem: 5365 { 5366 // no need to check as we already did it for kWhatOMXMessageList 5367 return onOMXMessage(msg); 5368 } 5369 5370 case ACodec::kWhatOMXMessage: 5371 { 5372 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5373 } 5374 5375 case ACodec::kWhatSetSurface: 5376 { 5377 sp<AReplyToken> replyID; 5378 CHECK(msg->senderAwaitsResponse(&replyID)); 5379 5380 sp<RefBase> obj; 5381 CHECK(msg->findObject("surface", &obj)); 5382 5383 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5384 5385 sp<AMessage> response = new AMessage; 5386 response->setInt32("err", err); 5387 response->postReply(replyID); 5388 break; 5389 } 5390 5391 case ACodec::kWhatCreateInputSurface: 5392 case ACodec::kWhatSetInputSurface: 5393 case ACodec::kWhatSignalEndOfInputStream: 5394 { 5395 // This may result in an app illegal state exception. 5396 ALOGE("Message 0x%x was not handled", msg->what()); 5397 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5398 return true; 5399 } 5400 5401 case ACodec::kWhatOMXDied: 5402 { 5403 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5404 ALOGE("OMX/mediaserver died, signalling error!"); 5405 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5406 break; 5407 } 5408 5409 case ACodec::kWhatReleaseCodecInstance: 5410 { 5411 ALOGI("[%s] forcing the release of codec", 5412 mCodec->mComponentName.c_str()); 5413 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5414 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5415 mCodec->mComponentName.c_str(), err); 5416 sp<AMessage> notify = mCodec->mNotify->dup(); 5417 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5418 notify->post(); 5419 break; 5420 } 5421 5422 default: 5423 return false; 5424 } 5425 5426 return true; 5427 } 5428 5429 bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5430 // there is a possibility that this is an outstanding message for a 5431 // codec that we have already destroyed 5432 if (mCodec->mNode == 0) { 5433 ALOGI("ignoring message as already freed component: %s", 5434 msg->debugString().c_str()); 5435 return false; 5436 } 5437 5438 IOMX::node_id nodeID; 5439 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5440 if (nodeID != mCodec->mNode) { 5441 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5442 return false; 5443 } 5444 return true; 5445 } 5446 5447 bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5448 sp<RefBase> obj; 5449 CHECK(msg->findObject("messages", &obj)); 5450 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5451 5452 bool receivedRenderedEvents = false; 5453 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5454 it != msgList->getList().cend(); ++it) { 5455 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5456 mCodec->handleMessage(*it); 5457 int32_t type; 5458 CHECK((*it)->findInt32("type", &type)); 5459 if (type == omx_message::FRAME_RENDERED) { 5460 receivedRenderedEvents = true; 5461 } 5462 } 5463 5464 if (receivedRenderedEvents) { 5465 // NOTE: all buffers are rendered in this case 5466 mCodec->notifyOfRenderedFrames(); 5467 } 5468 return true; 5469 } 5470 5471 bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5472 int32_t type; 5473 CHECK(msg->findInt32("type", &type)); 5474 5475 switch (type) { 5476 case omx_message::EVENT: 5477 { 5478 int32_t event, data1, data2; 5479 CHECK(msg->findInt32("event", &event)); 5480 CHECK(msg->findInt32("data1", &data1)); 5481 CHECK(msg->findInt32("data2", &data2)); 5482 5483 if (event == OMX_EventCmdComplete 5484 && data1 == OMX_CommandFlush 5485 && data2 == (int32_t)OMX_ALL) { 5486 // Use of this notification is not consistent across 5487 // implementations. We'll drop this notification and rely 5488 // on flush-complete notifications on the individual port 5489 // indices instead. 5490 5491 return true; 5492 } 5493 5494 return onOMXEvent( 5495 static_cast<OMX_EVENTTYPE>(event), 5496 static_cast<OMX_U32>(data1), 5497 static_cast<OMX_U32>(data2)); 5498 } 5499 5500 case omx_message::EMPTY_BUFFER_DONE: 5501 { 5502 IOMX::buffer_id bufferID; 5503 int32_t fenceFd; 5504 5505 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5506 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5507 5508 return onOMXEmptyBufferDone(bufferID, fenceFd); 5509 } 5510 5511 case omx_message::FILL_BUFFER_DONE: 5512 { 5513 IOMX::buffer_id bufferID; 5514 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5515 5516 int32_t rangeOffset, rangeLength, flags, fenceFd; 5517 int64_t timeUs; 5518 5519 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5520 CHECK(msg->findInt32("range_length", &rangeLength)); 5521 CHECK(msg->findInt32("flags", &flags)); 5522 CHECK(msg->findInt64("timestamp", &timeUs)); 5523 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5524 5525 return onOMXFillBufferDone( 5526 bufferID, 5527 (size_t)rangeOffset, (size_t)rangeLength, 5528 (OMX_U32)flags, 5529 timeUs, 5530 fenceFd); 5531 } 5532 5533 case omx_message::FRAME_RENDERED: 5534 { 5535 int64_t mediaTimeUs, systemNano; 5536 5537 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5538 CHECK(msg->findInt64("system_nano", &systemNano)); 5539 5540 return onOMXFrameRendered( 5541 mediaTimeUs, systemNano); 5542 } 5543 5544 default: 5545 ALOGE("Unexpected message type: %d", type); 5546 return false; 5547 } 5548 } 5549 5550 bool ACodec::BaseState::onOMXFrameRendered( 5551 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5552 // ignore outside of Executing and PortSettingsChanged states 5553 return true; 5554 } 5555 5556 bool ACodec::BaseState::onOMXEvent( 5557 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5558 if (event == OMX_EventDataSpaceChanged) { 5559 ColorAspects aspects; 5560 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5561 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5562 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5563 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5564 5565 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5566 return true; 5567 } 5568 5569 if (event != OMX_EventError) { 5570 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5571 mCodec->mComponentName.c_str(), event, data1, data2); 5572 5573 return false; 5574 } 5575 5576 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5577 5578 // verify OMX component sends back an error we expect. 5579 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5580 if (!isOMXError(omxError)) { 5581 ALOGW("Invalid OMX error %#x", omxError); 5582 omxError = OMX_ErrorUndefined; 5583 } 5584 mCodec->signalError(omxError); 5585 5586 return true; 5587 } 5588 5589 bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5590 ALOGV("[%s] onOMXEmptyBufferDone %u", 5591 mCodec->mComponentName.c_str(), bufferID); 5592 5593 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5594 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5595 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5596 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5597 mCodec->dumpBuffers(kPortIndexInput); 5598 if (fenceFd >= 0) { 5599 ::close(fenceFd); 5600 } 5601 return false; 5602 } 5603 info->mStatus = BufferInfo::OWNED_BY_US; 5604 5605 // input buffers cannot take fences, so wait for any fence now 5606 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5607 fenceFd = -1; 5608 5609 // still save fence for completeness 5610 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5611 5612 // We're in "store-metadata-in-buffers" mode, the underlying 5613 // OMX component had access to data that's implicitly refcounted 5614 // by this "MediaBuffer" object. Now that the OMX component has 5615 // told us that it's done with the input buffer, we can decrement 5616 // the mediaBuffer's reference count. 5617 info->mData->setMediaBufferBase(NULL); 5618 5619 PortMode mode = getPortMode(kPortIndexInput); 5620 5621 switch (mode) { 5622 case KEEP_BUFFERS: 5623 break; 5624 5625 case RESUBMIT_BUFFERS: 5626 postFillThisBuffer(info); 5627 break; 5628 5629 case FREE_BUFFERS: 5630 default: 5631 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5632 return false; 5633 } 5634 5635 return true; 5636 } 5637 5638 void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5639 if (mCodec->mPortEOS[kPortIndexInput]) { 5640 return; 5641 } 5642 5643 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5644 5645 sp<AMessage> notify = mCodec->mNotify->dup(); 5646 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5647 notify->setInt32("buffer-id", info->mBufferID); 5648 5649 info->mData->meta()->clear(); 5650 notify->setBuffer("buffer", info->mData); 5651 5652 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5653 reply->setInt32("buffer-id", info->mBufferID); 5654 5655 notify->setMessage("reply", reply); 5656 5657 notify->post(); 5658 5659 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5660 } 5661 5662 void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5663 IOMX::buffer_id bufferID; 5664 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5665 sp<ABuffer> buffer; 5666 int32_t err = OK; 5667 bool eos = false; 5668 PortMode mode = getPortMode(kPortIndexInput); 5669 5670 if (!msg->findBuffer("buffer", &buffer)) { 5671 /* these are unfilled buffers returned by client */ 5672 CHECK(msg->findInt32("err", &err)); 5673 5674 if (err == OK) { 5675 /* buffers with no errors are returned on MediaCodec.flush */ 5676 mode = KEEP_BUFFERS; 5677 } else { 5678 ALOGV("[%s] saw error %d instead of an input buffer", 5679 mCodec->mComponentName.c_str(), err); 5680 eos = true; 5681 } 5682 5683 buffer.clear(); 5684 } 5685 5686 int32_t tmp; 5687 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5688 eos = true; 5689 err = ERROR_END_OF_STREAM; 5690 } 5691 5692 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5693 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5694 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5695 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5696 mCodec->dumpBuffers(kPortIndexInput); 5697 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5698 return; 5699 } 5700 5701 info->mStatus = BufferInfo::OWNED_BY_US; 5702 5703 switch (mode) { 5704 case KEEP_BUFFERS: 5705 { 5706 if (eos) { 5707 if (!mCodec->mPortEOS[kPortIndexInput]) { 5708 mCodec->mPortEOS[kPortIndexInput] = true; 5709 mCodec->mInputEOSResult = err; 5710 } 5711 } 5712 break; 5713 } 5714 5715 case RESUBMIT_BUFFERS: 5716 { 5717 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5718 // Do not send empty input buffer w/o EOS to the component. 5719 if (buffer->size() == 0 && !eos) { 5720 postFillThisBuffer(info); 5721 break; 5722 } 5723 5724 int64_t timeUs; 5725 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5726 5727 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5728 5729 MetadataBufferType metaType = mCodec->mInputMetadataType; 5730 int32_t isCSD = 0; 5731 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5732 if (mCodec->mIsLegacyVP9Decoder) { 5733 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5734 mCodec->mComponentName.c_str(), bufferID); 5735 postFillThisBuffer(info); 5736 break; 5737 } 5738 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5739 metaType = kMetadataBufferTypeInvalid; 5740 } 5741 5742 if (eos) { 5743 flags |= OMX_BUFFERFLAG_EOS; 5744 } 5745 5746 if (buffer != info->mCodecData) { 5747 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5748 mCodec->mComponentName.c_str(), 5749 bufferID, 5750 buffer.get(), info->mCodecData.get()); 5751 5752 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5753 if (converter == NULL || isCSD) { 5754 converter = getCopyConverter(); 5755 } 5756 status_t err = converter->convert(buffer, info->mCodecData); 5757 if (err != OK) { 5758 mCodec->signalError(OMX_ErrorUndefined, err); 5759 return; 5760 } 5761 } 5762 5763 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5764 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5765 mCodec->mComponentName.c_str(), bufferID); 5766 } else if (flags & OMX_BUFFERFLAG_EOS) { 5767 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5768 mCodec->mComponentName.c_str(), bufferID); 5769 } else { 5770 #if TRACK_BUFFER_TIMING 5771 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5772 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5773 #else 5774 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5775 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5776 #endif 5777 } 5778 5779 #if TRACK_BUFFER_TIMING 5780 ACodec::BufferStats stats; 5781 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5782 stats.mFillBufferDoneTimeUs = -1ll; 5783 mCodec->mBufferStats.add(timeUs, stats); 5784 #endif 5785 5786 if (mCodec->storingMetadataInDecodedBuffers()) { 5787 // try to submit an output buffer for each input buffer 5788 PortMode outputMode = getPortMode(kPortIndexOutput); 5789 5790 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5791 mCodec->mMetadataBuffersToSubmit, 5792 (outputMode == FREE_BUFFERS ? "FREE" : 5793 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5794 if (outputMode == RESUBMIT_BUFFERS) { 5795 mCodec->submitOutputMetadataBuffer(); 5796 } 5797 } 5798 info->checkReadFence("onInputBufferFilled"); 5799 5800 status_t err2 = OK; 5801 switch (metaType) { 5802 case kMetadataBufferTypeInvalid: 5803 break; 5804 #ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5805 case kMetadataBufferTypeNativeHandleSource: 5806 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5807 VideoNativeHandleMetadata *vnhmd = 5808 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5809 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5810 mCodec->mNode, kPortIndexInput, 5811 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5812 bufferID); 5813 } 5814 break; 5815 case kMetadataBufferTypeANWBuffer: 5816 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5817 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5818 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5819 mCodec->mNode, kPortIndexInput, 5820 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5821 bufferID); 5822 } 5823 break; 5824 #endif 5825 default: 5826 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5827 asString(metaType), info->mCodecData->size(), 5828 sizeof(buffer_handle_t) * 8); 5829 err2 = ERROR_UNSUPPORTED; 5830 break; 5831 } 5832 5833 if (err2 == OK) { 5834 err2 = mCodec->mOMX->emptyBuffer( 5835 mCodec->mNode, 5836 bufferID, 5837 0, 5838 info->mCodecData->size(), 5839 flags, 5840 timeUs, 5841 info->mFenceFd); 5842 } 5843 info->mFenceFd = -1; 5844 if (err2 != OK) { 5845 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5846 return; 5847 } 5848 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5849 5850 if (!eos && err == OK) { 5851 getMoreInputDataIfPossible(); 5852 } else { 5853 ALOGV("[%s] Signalled EOS (%d) on the input port", 5854 mCodec->mComponentName.c_str(), err); 5855 5856 mCodec->mPortEOS[kPortIndexInput] = true; 5857 mCodec->mInputEOSResult = err; 5858 } 5859 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5860 if (err != OK && err != ERROR_END_OF_STREAM) { 5861 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5862 mCodec->mComponentName.c_str(), err); 5863 } else { 5864 ALOGV("[%s] Signalling EOS on the input port", 5865 mCodec->mComponentName.c_str()); 5866 } 5867 5868 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5869 mCodec->mComponentName.c_str(), bufferID); 5870 5871 info->checkReadFence("onInputBufferFilled"); 5872 status_t err2 = mCodec->mOMX->emptyBuffer( 5873 mCodec->mNode, 5874 bufferID, 5875 0, 5876 0, 5877 OMX_BUFFERFLAG_EOS, 5878 0, 5879 info->mFenceFd); 5880 info->mFenceFd = -1; 5881 if (err2 != OK) { 5882 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5883 return; 5884 } 5885 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5886 5887 mCodec->mPortEOS[kPortIndexInput] = true; 5888 mCodec->mInputEOSResult = err; 5889 } 5890 break; 5891 } 5892 5893 case FREE_BUFFERS: 5894 break; 5895 5896 default: 5897 ALOGE("invalid port mode: %d", mode); 5898 break; 5899 } 5900 } 5901 5902 void ACodec::BaseState::getMoreInputDataIfPossible() { 5903 if (mCodec->mPortEOS[kPortIndexInput]) { 5904 return; 5905 } 5906 5907 BufferInfo *eligible = NULL; 5908 5909 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5910 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5911 5912 #if 0 5913 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5914 // There's already a "read" pending. 5915 return; 5916 } 5917 #endif 5918 5919 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5920 eligible = info; 5921 } 5922 } 5923 5924 if (eligible == NULL) { 5925 return; 5926 } 5927 5928 postFillThisBuffer(eligible); 5929 } 5930 5931 bool ACodec::BaseState::onOMXFillBufferDone( 5932 IOMX::buffer_id bufferID, 5933 size_t rangeOffset, size_t rangeLength, 5934 OMX_U32 flags, 5935 int64_t timeUs, 5936 int fenceFd) { 5937 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5938 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5939 5940 ssize_t index; 5941 status_t err= OK; 5942 5943 #if TRACK_BUFFER_TIMING 5944 index = mCodec->mBufferStats.indexOfKey(timeUs); 5945 if (index >= 0) { 5946 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5947 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5948 5949 ALOGI("frame PTS %lld: %lld", 5950 timeUs, 5951 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5952 5953 mCodec->mBufferStats.removeItemsAt(index); 5954 stats = NULL; 5955 } 5956 #endif 5957 5958 BufferInfo *info = 5959 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5960 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5961 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5962 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5963 mCodec->dumpBuffers(kPortIndexOutput); 5964 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5965 if (fenceFd >= 0) { 5966 ::close(fenceFd); 5967 } 5968 return true; 5969 } 5970 5971 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5972 info->mStatus = BufferInfo::OWNED_BY_US; 5973 5974 if (info->mRenderInfo != NULL) { 5975 // The fence for an emptied buffer must have signaled, but there still could be queued 5976 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5977 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5978 // track of buffers that are requeued to the surface, it is better to add support to the 5979 // buffer-queue to notify us of released buffers and their fences (in the future). 5980 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5981 } 5982 5983 // byte buffers cannot take fences, so wait for any fence now 5984 if (mCodec->mNativeWindow == NULL) { 5985 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5986 fenceFd = -1; 5987 } 5988 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5989 5990 PortMode mode = getPortMode(kPortIndexOutput); 5991 5992 switch (mode) { 5993 case KEEP_BUFFERS: 5994 break; 5995 5996 case RESUBMIT_BUFFERS: 5997 { 5998 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5999 || mCodec->mPortEOS[kPortIndexOutput])) { 6000 ALOGV("[%s] calling fillBuffer %u", 6001 mCodec->mComponentName.c_str(), info->mBufferID); 6002 6003 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6004 info->mFenceFd = -1; 6005 if (err != OK) { 6006 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6007 return true; 6008 } 6009 6010 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6011 break; 6012 } 6013 6014 sp<AMessage> reply = 6015 new AMessage(kWhatOutputBufferDrained, mCodec); 6016 6017 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6018 // pretend that output format has changed on the first frame (we used to do this) 6019 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6020 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6021 } 6022 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6023 mCodec->sendFormatChange(); 6024 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6025 // If potentially rendering onto a surface, always save key format data (crop & 6026 // data space) so that we can set it if and once the buffer is rendered. 6027 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6028 } 6029 6030 if (mCodec->usingMetadataOnEncoderOutput()) { 6031 native_handle_t *handle = NULL; 6032 VideoNativeHandleMetadata &nativeMeta = 6033 *(VideoNativeHandleMetadata *)info->mData->data(); 6034 if (info->mData->size() >= sizeof(nativeMeta) 6035 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6036 #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6037 // handle is only valid on 32-bit/mediaserver process 6038 handle = NULL; 6039 #else 6040 handle = (native_handle_t *)nativeMeta.pHandle; 6041 #endif 6042 } 6043 info->mData->meta()->setPointer("handle", handle); 6044 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6045 info->mData->meta()->setInt32("rangeLength", rangeLength); 6046 } else if (info->mData == info->mCodecData) { 6047 info->mData->setRange(rangeOffset, rangeLength); 6048 } else { 6049 info->mCodecData->setRange(rangeOffset, rangeLength); 6050 // in this case we know that mConverter is not null 6051 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6052 info->mCodecData, info->mData); 6053 if (err != OK) { 6054 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6055 return true; 6056 } 6057 } 6058 #if 0 6059 if (mCodec->mNativeWindow == NULL) { 6060 if (IsIDR(info->mData)) { 6061 ALOGI("IDR frame"); 6062 } 6063 } 6064 #endif 6065 6066 if (mCodec->mSkipCutBuffer != NULL) { 6067 mCodec->mSkipCutBuffer->submit(info->mData); 6068 } 6069 info->mData->meta()->setInt64("timeUs", timeUs); 6070 6071 sp<AMessage> notify = mCodec->mNotify->dup(); 6072 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6073 notify->setInt32("buffer-id", info->mBufferID); 6074 notify->setBuffer("buffer", info->mData); 6075 notify->setInt32("flags", flags); 6076 6077 reply->setInt32("buffer-id", info->mBufferID); 6078 6079 notify->setMessage("reply", reply); 6080 6081 notify->post(); 6082 6083 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6084 6085 if (flags & OMX_BUFFERFLAG_EOS) { 6086 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6087 6088 sp<AMessage> notify = mCodec->mNotify->dup(); 6089 notify->setInt32("what", CodecBase::kWhatEOS); 6090 notify->setInt32("err", mCodec->mInputEOSResult); 6091 notify->post(); 6092 6093 mCodec->mPortEOS[kPortIndexOutput] = true; 6094 } 6095 break; 6096 } 6097 6098 case FREE_BUFFERS: 6099 err = mCodec->freeBuffer(kPortIndexOutput, index); 6100 if (err != OK) { 6101 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6102 return true; 6103 } 6104 break; 6105 6106 default: 6107 ALOGE("Invalid port mode: %d", mode); 6108 return false; 6109 } 6110 6111 return true; 6112 } 6113 6114 void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6115 IOMX::buffer_id bufferID; 6116 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6117 ssize_t index; 6118 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6119 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6120 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6121 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6122 mCodec->dumpBuffers(kPortIndexOutput); 6123 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6124 return; 6125 } 6126 6127 android_native_rect_t crop; 6128 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6129 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6130 mCodec->mLastNativeWindowCrop = crop; 6131 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6132 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6133 } 6134 6135 int32_t dataSpace; 6136 if (msg->findInt32("dataspace", &dataSpace) 6137 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6138 status_t err = native_window_set_buffers_data_space( 6139 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6140 mCodec->mLastNativeWindowDataSpace = dataSpace; 6141 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6142 } 6143 6144 int32_t render; 6145 if (mCodec->mNativeWindow != NULL 6146 && msg->findInt32("render", &render) && render != 0 6147 && info->mData != NULL && info->mData->size() != 0) { 6148 ATRACE_NAME("render"); 6149 // The client wants this buffer to be rendered. 6150 6151 // save buffers sent to the surface so we can get render time when they return 6152 int64_t mediaTimeUs = -1; 6153 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6154 if (mediaTimeUs >= 0) { 6155 mCodec->mRenderTracker.onFrameQueued( 6156 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6157 } 6158 6159 int64_t timestampNs = 0; 6160 if (!msg->findInt64("timestampNs", ×tampNs)) { 6161 // use media timestamp if client did not request a specific render timestamp 6162 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6163 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6164 timestampNs *= 1000; 6165 } 6166 } 6167 6168 status_t err; 6169 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6170 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6171 6172 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6173 err = mCodec->mNativeWindow->queueBuffer( 6174 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6175 info->mFenceFd = -1; 6176 if (err == OK) { 6177 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6178 } else { 6179 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6180 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6181 info->mStatus = BufferInfo::OWNED_BY_US; 6182 // keeping read fence as write fence to avoid clobbering 6183 info->mIsReadFence = false; 6184 } 6185 } else { 6186 if (mCodec->mNativeWindow != NULL && 6187 (info->mData == NULL || info->mData->size() != 0)) { 6188 // move read fence into write fence to avoid clobbering 6189 info->mIsReadFence = false; 6190 ATRACE_NAME("frame-drop"); 6191 } 6192 info->mStatus = BufferInfo::OWNED_BY_US; 6193 } 6194 6195 PortMode mode = getPortMode(kPortIndexOutput); 6196 6197 switch (mode) { 6198 case KEEP_BUFFERS: 6199 { 6200 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6201 6202 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6203 // We cannot resubmit the buffer we just rendered, dequeue 6204 // the spare instead. 6205 6206 info = mCodec->dequeueBufferFromNativeWindow(); 6207 } 6208 break; 6209 } 6210 6211 case RESUBMIT_BUFFERS: 6212 { 6213 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6214 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6215 // We cannot resubmit the buffer we just rendered, dequeue 6216 // the spare instead. 6217 6218 info = mCodec->dequeueBufferFromNativeWindow(); 6219 } 6220 6221 if (info != NULL) { 6222 ALOGV("[%s] calling fillBuffer %u", 6223 mCodec->mComponentName.c_str(), info->mBufferID); 6224 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6225 status_t err = mCodec->mOMX->fillBuffer( 6226 mCodec->mNode, info->mBufferID, info->mFenceFd); 6227 info->mFenceFd = -1; 6228 if (err == OK) { 6229 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6230 } else { 6231 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6232 } 6233 } 6234 } 6235 break; 6236 } 6237 6238 case FREE_BUFFERS: 6239 { 6240 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6241 if (err != OK) { 6242 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6243 } 6244 break; 6245 } 6246 6247 default: 6248 ALOGE("Invalid port mode: %d", mode); 6249 return; 6250 } 6251 } 6252 6253 //////////////////////////////////////////////////////////////////////////////// 6254 6255 ACodec::UninitializedState::UninitializedState(ACodec *codec) 6256 : BaseState(codec) { 6257 } 6258 6259 void ACodec::UninitializedState::stateEntered() { 6260 ALOGV("Now uninitialized"); 6261 6262 if (mDeathNotifier != NULL) { 6263 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6264 mDeathNotifier.clear(); 6265 } 6266 6267 mCodec->mUsingNativeWindow = false; 6268 mCodec->mNativeWindow.clear(); 6269 mCodec->mNativeWindowUsageBits = 0; 6270 mCodec->mNode = 0; 6271 mCodec->mOMX.clear(); 6272 mCodec->mQuirks = 0; 6273 mCodec->mFlags = 0; 6274 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6275 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6276 mCodec->mConverter[0].clear(); 6277 mCodec->mConverter[1].clear(); 6278 mCodec->mComponentName.clear(); 6279 } 6280 6281 bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6282 bool handled = false; 6283 6284 switch (msg->what()) { 6285 case ACodec::kWhatSetup: 6286 { 6287 onSetup(msg); 6288 6289 handled = true; 6290 break; 6291 } 6292 6293 case ACodec::kWhatAllocateComponent: 6294 { 6295 onAllocateComponent(msg); 6296 handled = true; 6297 break; 6298 } 6299 6300 case ACodec::kWhatShutdown: 6301 { 6302 int32_t keepComponentAllocated; 6303 CHECK(msg->findInt32( 6304 "keepComponentAllocated", &keepComponentAllocated)); 6305 ALOGW_IF(keepComponentAllocated, 6306 "cannot keep component allocated on shutdown in Uninitialized state"); 6307 6308 sp<AMessage> notify = mCodec->mNotify->dup(); 6309 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6310 notify->post(); 6311 6312 handled = true; 6313 break; 6314 } 6315 6316 case ACodec::kWhatFlush: 6317 { 6318 sp<AMessage> notify = mCodec->mNotify->dup(); 6319 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6320 notify->post(); 6321 6322 handled = true; 6323 break; 6324 } 6325 6326 case ACodec::kWhatReleaseCodecInstance: 6327 { 6328 // nothing to do, as we have already signaled shutdown 6329 handled = true; 6330 break; 6331 } 6332 6333 default: 6334 return BaseState::onMessageReceived(msg); 6335 } 6336 6337 return handled; 6338 } 6339 6340 void ACodec::UninitializedState::onSetup( 6341 const sp<AMessage> &msg) { 6342 if (onAllocateComponent(msg) 6343 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6344 mCodec->mLoadedState->onStart(); 6345 } 6346 } 6347 6348 bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6349 ALOGV("onAllocateComponent"); 6350 6351 CHECK(mCodec->mNode == 0); 6352 6353 OMXClient client; 6354 if (client.connect() != OK) { 6355 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6356 return false; 6357 } 6358 6359 sp<IOMX> omx = client.interface(); 6360 6361 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6362 6363 Vector<AString> matchingCodecs; 6364 6365 AString mime; 6366 6367 AString componentName; 6368 uint32_t quirks = 0; 6369 int32_t encoder = false; 6370 if (msg->findString("componentName", &componentName)) { 6371 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6372 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6373 matchingCodecs.add(componentName); 6374 } 6375 } else { 6376 CHECK(msg->findString("mime", &mime)); 6377 6378 if (!msg->findInt32("encoder", &encoder)) { 6379 encoder = false; 6380 } 6381 6382 MediaCodecList::findMatchingCodecs( 6383 mime.c_str(), 6384 encoder, // createEncoder 6385 0, // flags 6386 &matchingCodecs); 6387 } 6388 6389 sp<CodecObserver> observer = new CodecObserver; 6390 IOMX::node_id node = 0; 6391 6392 status_t err = NAME_NOT_FOUND; 6393 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6394 ++matchIndex) { 6395 componentName = matchingCodecs[matchIndex]; 6396 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6397 6398 pid_t tid = gettid(); 6399 int prevPriority = androidGetThreadPriority(tid); 6400 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6401 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6402 androidSetThreadPriority(tid, prevPriority); 6403 6404 if (err == OK) { 6405 break; 6406 } else { 6407 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6408 } 6409 6410 node = 0; 6411 } 6412 6413 if (node == 0) { 6414 if (!mime.empty()) { 6415 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6416 encoder ? "en" : "de", mime.c_str(), err); 6417 } else { 6418 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6419 } 6420 6421 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6422 return false; 6423 } 6424 6425 mDeathNotifier = new DeathNotifier(notify); 6426 if (mCodec->mNodeBinder == NULL || 6427 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6428 // This was a local binder, if it dies so do we, we won't care 6429 // about any notifications in the afterlife. 6430 mDeathNotifier.clear(); 6431 } 6432 6433 notify = new AMessage(kWhatOMXMessageList, mCodec); 6434 observer->setNotificationMessage(notify); 6435 6436 mCodec->mComponentName = componentName; 6437 mCodec->mRenderTracker.setComponentName(componentName); 6438 mCodec->mFlags = 0; 6439 6440 if (componentName.endsWith(".secure")) { 6441 mCodec->mFlags |= kFlagIsSecure; 6442 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6443 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6444 } 6445 6446 mCodec->mQuirks = quirks; 6447 mCodec->mOMX = omx; 6448 mCodec->mNode = node; 6449 6450 { 6451 sp<AMessage> notify = mCodec->mNotify->dup(); 6452 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6453 notify->setString("componentName", mCodec->mComponentName.c_str()); 6454 notify->post(); 6455 } 6456 6457 mCodec->changeState(mCodec->mLoadedState); 6458 6459 return true; 6460 } 6461 6462 //////////////////////////////////////////////////////////////////////////////// 6463 6464 ACodec::LoadedState::LoadedState(ACodec *codec) 6465 : BaseState(codec) { 6466 } 6467 6468 void ACodec::LoadedState::stateEntered() { 6469 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6470 6471 mCodec->mPortEOS[kPortIndexInput] = 6472 mCodec->mPortEOS[kPortIndexOutput] = false; 6473 6474 mCodec->mInputEOSResult = OK; 6475 6476 mCodec->mDequeueCounter = 0; 6477 mCodec->mMetadataBuffersToSubmit = 0; 6478 mCodec->mRepeatFrameDelayUs = -1ll; 6479 mCodec->mInputFormat.clear(); 6480 mCodec->mOutputFormat.clear(); 6481 mCodec->mBaseOutputFormat.clear(); 6482 6483 if (mCodec->mShutdownInProgress) { 6484 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6485 6486 mCodec->mShutdownInProgress = false; 6487 mCodec->mKeepComponentAllocated = false; 6488 6489 onShutdown(keepComponentAllocated); 6490 } 6491 mCodec->mExplicitShutdown = false; 6492 6493 mCodec->processDeferredMessages(); 6494 } 6495 6496 void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6497 if (!keepComponentAllocated) { 6498 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6499 6500 mCodec->changeState(mCodec->mUninitializedState); 6501 } 6502 6503 if (mCodec->mExplicitShutdown) { 6504 sp<AMessage> notify = mCodec->mNotify->dup(); 6505 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6506 notify->post(); 6507 mCodec->mExplicitShutdown = false; 6508 } 6509 } 6510 6511 bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6512 bool handled = false; 6513 6514 switch (msg->what()) { 6515 case ACodec::kWhatConfigureComponent: 6516 { 6517 onConfigureComponent(msg); 6518 handled = true; 6519 break; 6520 } 6521 6522 case ACodec::kWhatCreateInputSurface: 6523 { 6524 onCreateInputSurface(msg); 6525 handled = true; 6526 break; 6527 } 6528 6529 case ACodec::kWhatSetInputSurface: 6530 { 6531 onSetInputSurface(msg); 6532 handled = true; 6533 break; 6534 } 6535 6536 case ACodec::kWhatStart: 6537 { 6538 onStart(); 6539 handled = true; 6540 break; 6541 } 6542 6543 case ACodec::kWhatShutdown: 6544 { 6545 int32_t keepComponentAllocated; 6546 CHECK(msg->findInt32( 6547 "keepComponentAllocated", &keepComponentAllocated)); 6548 6549 mCodec->mExplicitShutdown = true; 6550 onShutdown(keepComponentAllocated); 6551 6552 handled = true; 6553 break; 6554 } 6555 6556 case ACodec::kWhatFlush: 6557 { 6558 sp<AMessage> notify = mCodec->mNotify->dup(); 6559 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6560 notify->post(); 6561 6562 handled = true; 6563 break; 6564 } 6565 6566 default: 6567 return BaseState::onMessageReceived(msg); 6568 } 6569 6570 return handled; 6571 } 6572 6573 bool ACodec::LoadedState::onConfigureComponent( 6574 const sp<AMessage> &msg) { 6575 ALOGV("onConfigureComponent"); 6576 6577 CHECK(mCodec->mNode != 0); 6578 6579 status_t err = OK; 6580 AString mime; 6581 if (!msg->findString("mime", &mime)) { 6582 err = BAD_VALUE; 6583 } else { 6584 err = mCodec->configureCodec(mime.c_str(), msg); 6585 } 6586 if (err != OK) { 6587 ALOGE("[%s] configureCodec returning error %d", 6588 mCodec->mComponentName.c_str(), err); 6589 6590 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6591 return false; 6592 } 6593 6594 { 6595 sp<AMessage> notify = mCodec->mNotify->dup(); 6596 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6597 notify->setMessage("input-format", mCodec->mInputFormat); 6598 notify->setMessage("output-format", mCodec->mOutputFormat); 6599 notify->post(); 6600 } 6601 6602 return true; 6603 } 6604 6605 status_t ACodec::LoadedState::setupInputSurface() { 6606 status_t err = OK; 6607 6608 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6609 err = mCodec->mOMX->setInternalOption( 6610 mCodec->mNode, 6611 kPortIndexInput, 6612 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6613 &mCodec->mRepeatFrameDelayUs, 6614 sizeof(mCodec->mRepeatFrameDelayUs)); 6615 6616 if (err != OK) { 6617 ALOGE("[%s] Unable to configure option to repeat previous " 6618 "frames (err %d)", 6619 mCodec->mComponentName.c_str(), 6620 err); 6621 return err; 6622 } 6623 } 6624 6625 if (mCodec->mMaxPtsGapUs > 0ll) { 6626 err = mCodec->mOMX->setInternalOption( 6627 mCodec->mNode, 6628 kPortIndexInput, 6629 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6630 &mCodec->mMaxPtsGapUs, 6631 sizeof(mCodec->mMaxPtsGapUs)); 6632 6633 if (err != OK) { 6634 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6635 mCodec->mComponentName.c_str(), 6636 err); 6637 return err; 6638 } 6639 } 6640 6641 if (mCodec->mMaxFps > 0) { 6642 err = mCodec->mOMX->setInternalOption( 6643 mCodec->mNode, 6644 kPortIndexInput, 6645 IOMX::INTERNAL_OPTION_MAX_FPS, 6646 &mCodec->mMaxFps, 6647 sizeof(mCodec->mMaxFps)); 6648 6649 if (err != OK) { 6650 ALOGE("[%s] Unable to configure max fps (err %d)", 6651 mCodec->mComponentName.c_str(), 6652 err); 6653 return err; 6654 } 6655 } 6656 6657 if (mCodec->mTimePerCaptureUs > 0ll 6658 && mCodec->mTimePerFrameUs > 0ll) { 6659 int64_t timeLapse[2]; 6660 timeLapse[0] = mCodec->mTimePerFrameUs; 6661 timeLapse[1] = mCodec->mTimePerCaptureUs; 6662 err = mCodec->mOMX->setInternalOption( 6663 mCodec->mNode, 6664 kPortIndexInput, 6665 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6666 &timeLapse[0], 6667 sizeof(timeLapse)); 6668 6669 if (err != OK) { 6670 ALOGE("[%s] Unable to configure time lapse (err %d)", 6671 mCodec->mComponentName.c_str(), 6672 err); 6673 return err; 6674 } 6675 } 6676 6677 if (mCodec->mCreateInputBuffersSuspended) { 6678 bool suspend = true; 6679 err = mCodec->mOMX->setInternalOption( 6680 mCodec->mNode, 6681 kPortIndexInput, 6682 IOMX::INTERNAL_OPTION_SUSPEND, 6683 &suspend, 6684 sizeof(suspend)); 6685 6686 if (err != OK) { 6687 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6688 mCodec->mComponentName.c_str(), 6689 err); 6690 return err; 6691 } 6692 } 6693 6694 uint32_t usageBits; 6695 if (mCodec->mOMX->getParameter( 6696 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6697 &usageBits, sizeof(usageBits)) == OK) { 6698 mCodec->mInputFormat->setInt32( 6699 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6700 } 6701 6702 sp<ABuffer> colorAspectsBuffer; 6703 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6704 err = mCodec->mOMX->setInternalOption( 6705 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6706 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6707 if (err != OK) { 6708 ALOGE("[%s] Unable to configure color aspects (err %d)", 6709 mCodec->mComponentName.c_str(), err); 6710 return err; 6711 } 6712 } 6713 return OK; 6714 } 6715 6716 void ACodec::LoadedState::onCreateInputSurface( 6717 const sp<AMessage> & /* msg */) { 6718 ALOGV("onCreateInputSurface"); 6719 6720 sp<AMessage> notify = mCodec->mNotify->dup(); 6721 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6722 6723 android_dataspace dataSpace; 6724 status_t err = 6725 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6726 notify->setMessage("input-format", mCodec->mInputFormat); 6727 notify->setMessage("output-format", mCodec->mOutputFormat); 6728 6729 sp<IGraphicBufferProducer> bufferProducer; 6730 if (err == OK) { 6731 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6732 err = mCodec->mOMX->createInputSurface( 6733 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6734 &mCodec->mInputMetadataType); 6735 // framework uses ANW buffers internally instead of gralloc handles 6736 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6737 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6738 } 6739 } 6740 6741 if (err == OK) { 6742 err = setupInputSurface(); 6743 } 6744 6745 if (err == OK) { 6746 notify->setObject("input-surface", 6747 new BufferProducerWrapper(bufferProducer)); 6748 } else { 6749 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6750 // the error through because it's in the "configured" state. We 6751 // send a kWhatInputSurfaceCreated with an error value instead. 6752 ALOGE("[%s] onCreateInputSurface returning error %d", 6753 mCodec->mComponentName.c_str(), err); 6754 notify->setInt32("err", err); 6755 } 6756 notify->post(); 6757 } 6758 6759 void ACodec::LoadedState::onSetInputSurface( 6760 const sp<AMessage> &msg) { 6761 ALOGV("onSetInputSurface"); 6762 6763 sp<AMessage> notify = mCodec->mNotify->dup(); 6764 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6765 6766 sp<RefBase> obj; 6767 CHECK(msg->findObject("input-surface", &obj)); 6768 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6769 6770 android_dataspace dataSpace; 6771 status_t err = 6772 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6773 notify->setMessage("input-format", mCodec->mInputFormat); 6774 notify->setMessage("output-format", mCodec->mOutputFormat); 6775 6776 if (err == OK) { 6777 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6778 err = mCodec->mOMX->setInputSurface( 6779 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6780 &mCodec->mInputMetadataType); 6781 // framework uses ANW buffers internally instead of gralloc handles 6782 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6783 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6784 } 6785 } 6786 6787 if (err == OK) { 6788 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6789 err = setupInputSurface(); 6790 } 6791 6792 if (err != OK) { 6793 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6794 // the error through because it's in the "configured" state. We 6795 // send a kWhatInputSurfaceAccepted with an error value instead. 6796 ALOGE("[%s] onSetInputSurface returning error %d", 6797 mCodec->mComponentName.c_str(), err); 6798 notify->setInt32("err", err); 6799 } 6800 notify->post(); 6801 } 6802 6803 void ACodec::LoadedState::onStart() { 6804 ALOGV("onStart"); 6805 6806 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6807 if (err != OK) { 6808 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6809 } else { 6810 mCodec->changeState(mCodec->mLoadedToIdleState); 6811 } 6812 } 6813 6814 //////////////////////////////////////////////////////////////////////////////// 6815 6816 ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6817 : BaseState(codec) { 6818 } 6819 6820 void ACodec::LoadedToIdleState::stateEntered() { 6821 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6822 6823 status_t err; 6824 if ((err = allocateBuffers()) != OK) { 6825 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6826 "(error 0x%08x)", 6827 err); 6828 6829 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6830 6831 mCodec->mOMX->sendCommand( 6832 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6833 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6834 mCodec->freeBuffersOnPort(kPortIndexInput); 6835 } 6836 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6837 mCodec->freeBuffersOnPort(kPortIndexOutput); 6838 } 6839 6840 mCodec->changeState(mCodec->mLoadedState); 6841 } 6842 } 6843 6844 status_t ACodec::LoadedToIdleState::allocateBuffers() { 6845 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6846 6847 if (err != OK) { 6848 return err; 6849 } 6850 6851 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6852 } 6853 6854 bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6855 switch (msg->what()) { 6856 case kWhatSetParameters: 6857 case kWhatShutdown: 6858 { 6859 mCodec->deferMessage(msg); 6860 return true; 6861 } 6862 6863 case kWhatSignalEndOfInputStream: 6864 { 6865 mCodec->onSignalEndOfInputStream(); 6866 return true; 6867 } 6868 6869 case kWhatResume: 6870 { 6871 // We'll be active soon enough. 6872 return true; 6873 } 6874 6875 case kWhatFlush: 6876 { 6877 // We haven't even started yet, so we're flushed alright... 6878 sp<AMessage> notify = mCodec->mNotify->dup(); 6879 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6880 notify->post(); 6881 return true; 6882 } 6883 6884 default: 6885 return BaseState::onMessageReceived(msg); 6886 } 6887 } 6888 6889 bool ACodec::LoadedToIdleState::onOMXEvent( 6890 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6891 switch (event) { 6892 case OMX_EventCmdComplete: 6893 { 6894 status_t err = OK; 6895 if (data1 != (OMX_U32)OMX_CommandStateSet 6896 || data2 != (OMX_U32)OMX_StateIdle) { 6897 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6898 asString((OMX_COMMANDTYPE)data1), data1, 6899 asString((OMX_STATETYPE)data2), data2); 6900 err = FAILED_TRANSACTION; 6901 } 6902 6903 if (err == OK) { 6904 err = mCodec->mOMX->sendCommand( 6905 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6906 } 6907 6908 if (err != OK) { 6909 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6910 } else { 6911 mCodec->changeState(mCodec->mIdleToExecutingState); 6912 } 6913 6914 return true; 6915 } 6916 6917 default: 6918 return BaseState::onOMXEvent(event, data1, data2); 6919 } 6920 } 6921 6922 //////////////////////////////////////////////////////////////////////////////// 6923 6924 ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6925 : BaseState(codec) { 6926 } 6927 6928 void ACodec::IdleToExecutingState::stateEntered() { 6929 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6930 } 6931 6932 bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6933 switch (msg->what()) { 6934 case kWhatSetParameters: 6935 case kWhatShutdown: 6936 { 6937 mCodec->deferMessage(msg); 6938 return true; 6939 } 6940 6941 case kWhatResume: 6942 { 6943 // We'll be active soon enough. 6944 return true; 6945 } 6946 6947 case kWhatFlush: 6948 { 6949 // We haven't even started yet, so we're flushed alright... 6950 sp<AMessage> notify = mCodec->mNotify->dup(); 6951 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6952 notify->post(); 6953 6954 return true; 6955 } 6956 6957 case kWhatSignalEndOfInputStream: 6958 { 6959 mCodec->onSignalEndOfInputStream(); 6960 return true; 6961 } 6962 6963 default: 6964 return BaseState::onMessageReceived(msg); 6965 } 6966 } 6967 6968 bool ACodec::IdleToExecutingState::onOMXEvent( 6969 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6970 switch (event) { 6971 case OMX_EventCmdComplete: 6972 { 6973 if (data1 != (OMX_U32)OMX_CommandStateSet 6974 || data2 != (OMX_U32)OMX_StateExecuting) { 6975 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6976 asString((OMX_COMMANDTYPE)data1), data1, 6977 asString((OMX_STATETYPE)data2), data2); 6978 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6979 return true; 6980 } 6981 6982 mCodec->mExecutingState->resume(); 6983 mCodec->changeState(mCodec->mExecutingState); 6984 6985 return true; 6986 } 6987 6988 default: 6989 return BaseState::onOMXEvent(event, data1, data2); 6990 } 6991 } 6992 6993 //////////////////////////////////////////////////////////////////////////////// 6994 6995 ACodec::ExecutingState::ExecutingState(ACodec *codec) 6996 : BaseState(codec), 6997 mActive(false) { 6998 } 6999 7000 ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7001 OMX_U32 /* portIndex */) { 7002 return RESUBMIT_BUFFERS; 7003 } 7004 7005 void ACodec::ExecutingState::submitOutputMetaBuffers() { 7006 // submit as many buffers as there are input buffers with the codec 7007 // in case we are in port reconfiguring 7008 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7009 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7010 7011 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7012 if (mCodec->submitOutputMetadataBuffer() != OK) 7013 break; 7014 } 7015 } 7016 7017 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7018 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7019 } 7020 7021 void ACodec::ExecutingState::submitRegularOutputBuffers() { 7022 bool failed = false; 7023 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7024 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7025 7026 if (mCodec->mNativeWindow != NULL) { 7027 if (info->mStatus != BufferInfo::OWNED_BY_US 7028 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7029 ALOGE("buffers should be owned by us or the surface"); 7030 failed = true; 7031 break; 7032 } 7033 7034 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7035 continue; 7036 } 7037 } else { 7038 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7039 ALOGE("buffers should be owned by us"); 7040 failed = true; 7041 break; 7042 } 7043 } 7044 7045 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7046 7047 info->checkWriteFence("submitRegularOutputBuffers"); 7048 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7049 info->mFenceFd = -1; 7050 if (err != OK) { 7051 failed = true; 7052 break; 7053 } 7054 7055 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7056 } 7057 7058 if (failed) { 7059 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7060 } 7061 } 7062 7063 void ACodec::ExecutingState::submitOutputBuffers() { 7064 submitRegularOutputBuffers(); 7065 if (mCodec->storingMetadataInDecodedBuffers()) { 7066 submitOutputMetaBuffers(); 7067 } 7068 } 7069 7070 void ACodec::ExecutingState::resume() { 7071 if (mActive) { 7072 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7073 return; 7074 } 7075 7076 submitOutputBuffers(); 7077 7078 // Post all available input buffers 7079 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7080 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7081 } 7082 7083 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7084 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7085 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7086 postFillThisBuffer(info); 7087 } 7088 } 7089 7090 mActive = true; 7091 } 7092 7093 void ACodec::ExecutingState::stateEntered() { 7094 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7095 7096 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7097 mCodec->processDeferredMessages(); 7098 } 7099 7100 bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7101 bool handled = false; 7102 7103 switch (msg->what()) { 7104 case kWhatShutdown: 7105 { 7106 int32_t keepComponentAllocated; 7107 CHECK(msg->findInt32( 7108 "keepComponentAllocated", &keepComponentAllocated)); 7109 7110 mCodec->mShutdownInProgress = true; 7111 mCodec->mExplicitShutdown = true; 7112 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7113 7114 mActive = false; 7115 7116 status_t err = mCodec->mOMX->sendCommand( 7117 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7118 if (err != OK) { 7119 if (keepComponentAllocated) { 7120 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7121 } 7122 // TODO: do some recovery here. 7123 } else { 7124 mCodec->changeState(mCodec->mExecutingToIdleState); 7125 } 7126 7127 handled = true; 7128 break; 7129 } 7130 7131 case kWhatFlush: 7132 { 7133 ALOGV("[%s] ExecutingState flushing now " 7134 "(codec owns %zu/%zu input, %zu/%zu output).", 7135 mCodec->mComponentName.c_str(), 7136 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7137 mCodec->mBuffers[kPortIndexInput].size(), 7138 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7139 mCodec->mBuffers[kPortIndexOutput].size()); 7140 7141 mActive = false; 7142 7143 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7144 if (err != OK) { 7145 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7146 } else { 7147 mCodec->changeState(mCodec->mFlushingState); 7148 } 7149 7150 handled = true; 7151 break; 7152 } 7153 7154 case kWhatResume: 7155 { 7156 resume(); 7157 7158 handled = true; 7159 break; 7160 } 7161 7162 case kWhatRequestIDRFrame: 7163 { 7164 status_t err = mCodec->requestIDRFrame(); 7165 if (err != OK) { 7166 ALOGW("Requesting an IDR frame failed."); 7167 } 7168 7169 handled = true; 7170 break; 7171 } 7172 7173 case kWhatSetParameters: 7174 { 7175 sp<AMessage> params; 7176 CHECK(msg->findMessage("params", ¶ms)); 7177 7178 status_t err = mCodec->setParameters(params); 7179 7180 sp<AMessage> reply; 7181 if (msg->findMessage("reply", &reply)) { 7182 reply->setInt32("err", err); 7183 reply->post(); 7184 } 7185 7186 handled = true; 7187 break; 7188 } 7189 7190 case ACodec::kWhatSignalEndOfInputStream: 7191 { 7192 mCodec->onSignalEndOfInputStream(); 7193 handled = true; 7194 break; 7195 } 7196 7197 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7198 case kWhatSubmitOutputMetadataBufferIfEOS: 7199 { 7200 if (mCodec->mPortEOS[kPortIndexInput] && 7201 !mCodec->mPortEOS[kPortIndexOutput]) { 7202 status_t err = mCodec->submitOutputMetadataBuffer(); 7203 if (err == OK) { 7204 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7205 } 7206 } 7207 return true; 7208 } 7209 7210 default: 7211 handled = BaseState::onMessageReceived(msg); 7212 break; 7213 } 7214 7215 return handled; 7216 } 7217 7218 status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7219 int32_t videoBitrate; 7220 if (params->findInt32("video-bitrate", &videoBitrate)) { 7221 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7222 InitOMXParams(&configParams); 7223 configParams.nPortIndex = kPortIndexOutput; 7224 configParams.nEncodeBitrate = videoBitrate; 7225 7226 status_t err = mOMX->setConfig( 7227 mNode, 7228 OMX_IndexConfigVideoBitrate, 7229 &configParams, 7230 sizeof(configParams)); 7231 7232 if (err != OK) { 7233 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7234 videoBitrate, err); 7235 7236 return err; 7237 } 7238 } 7239 7240 int64_t skipFramesBeforeUs; 7241 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7242 status_t err = 7243 mOMX->setInternalOption( 7244 mNode, 7245 kPortIndexInput, 7246 IOMX::INTERNAL_OPTION_START_TIME, 7247 &skipFramesBeforeUs, 7248 sizeof(skipFramesBeforeUs)); 7249 7250 if (err != OK) { 7251 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7252 return err; 7253 } 7254 } 7255 7256 int32_t dropInputFrames; 7257 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7258 bool suspend = dropInputFrames != 0; 7259 7260 status_t err = 7261 mOMX->setInternalOption( 7262 mNode, 7263 kPortIndexInput, 7264 IOMX::INTERNAL_OPTION_SUSPEND, 7265 &suspend, 7266 sizeof(suspend)); 7267 7268 if (err != OK) { 7269 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7270 return err; 7271 } 7272 } 7273 7274 int32_t dummy; 7275 if (params->findInt32("request-sync", &dummy)) { 7276 status_t err = requestIDRFrame(); 7277 7278 if (err != OK) { 7279 ALOGE("Requesting a sync frame failed w/ err %d", err); 7280 return err; 7281 } 7282 } 7283 7284 float rate; 7285 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7286 status_t err = setOperatingRate(rate, mIsVideo); 7287 if (err != OK) { 7288 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7289 return err; 7290 } 7291 } 7292 7293 int32_t intraRefreshPeriod = 0; 7294 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7295 && intraRefreshPeriod > 0) { 7296 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7297 if (err != OK) { 7298 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7299 mComponentName.c_str()); 7300 err = OK; 7301 } 7302 } 7303 7304 return OK; 7305 } 7306 7307 void ACodec::onSignalEndOfInputStream() { 7308 sp<AMessage> notify = mNotify->dup(); 7309 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7310 7311 status_t err = mOMX->signalEndOfInputStream(mNode); 7312 if (err != OK) { 7313 notify->setInt32("err", err); 7314 } 7315 notify->post(); 7316 } 7317 7318 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7319 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7320 return true; 7321 } 7322 7323 bool ACodec::ExecutingState::onOMXEvent( 7324 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7325 switch (event) { 7326 case OMX_EventPortSettingsChanged: 7327 { 7328 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7329 7330 mCodec->onOutputFormatChanged(); 7331 7332 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7333 mCodec->mMetadataBuffersToSubmit = 0; 7334 CHECK_EQ(mCodec->mOMX->sendCommand( 7335 mCodec->mNode, 7336 OMX_CommandPortDisable, kPortIndexOutput), 7337 (status_t)OK); 7338 7339 mCodec->freeOutputBuffersNotOwnedByComponent(); 7340 7341 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7342 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7343 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7344 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7345 mCodec->mComponentName.c_str(), data2); 7346 } 7347 7348 return true; 7349 } 7350 7351 case OMX_EventBufferFlag: 7352 { 7353 return true; 7354 } 7355 7356 default: 7357 return BaseState::onOMXEvent(event, data1, data2); 7358 } 7359 } 7360 7361 //////////////////////////////////////////////////////////////////////////////// 7362 7363 ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7364 ACodec *codec) 7365 : BaseState(codec) { 7366 } 7367 7368 ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7369 OMX_U32 portIndex) { 7370 if (portIndex == kPortIndexOutput) { 7371 return FREE_BUFFERS; 7372 } 7373 7374 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7375 7376 return RESUBMIT_BUFFERS; 7377 } 7378 7379 bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7380 const sp<AMessage> &msg) { 7381 bool handled = false; 7382 7383 switch (msg->what()) { 7384 case kWhatFlush: 7385 case kWhatShutdown: 7386 case kWhatResume: 7387 case kWhatSetParameters: 7388 { 7389 if (msg->what() == kWhatResume) { 7390 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7391 } 7392 7393 mCodec->deferMessage(msg); 7394 handled = true; 7395 break; 7396 } 7397 7398 default: 7399 handled = BaseState::onMessageReceived(msg); 7400 break; 7401 } 7402 7403 return handled; 7404 } 7405 7406 void ACodec::OutputPortSettingsChangedState::stateEntered() { 7407 ALOGV("[%s] Now handling output port settings change", 7408 mCodec->mComponentName.c_str()); 7409 } 7410 7411 bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7412 int64_t mediaTimeUs, nsecs_t systemNano) { 7413 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7414 return true; 7415 } 7416 7417 bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7418 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7419 switch (event) { 7420 case OMX_EventCmdComplete: 7421 { 7422 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7423 if (data2 != (OMX_U32)kPortIndexOutput) { 7424 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7425 return false; 7426 } 7427 7428 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7429 7430 status_t err = OK; 7431 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7432 ALOGE("disabled port should be empty, but has %zu buffers", 7433 mCodec->mBuffers[kPortIndexOutput].size()); 7434 err = FAILED_TRANSACTION; 7435 } else { 7436 mCodec->mDealer[kPortIndexOutput].clear(); 7437 } 7438 7439 if (err == OK) { 7440 err = mCodec->mOMX->sendCommand( 7441 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7442 } 7443 7444 if (err == OK) { 7445 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7446 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7447 "reconfiguration: (%d)", err); 7448 } 7449 7450 if (err != OK) { 7451 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7452 7453 // This is technically not correct, but appears to be 7454 // the only way to free the component instance. 7455 // Controlled transitioning from excecuting->idle 7456 // and idle->loaded seem impossible probably because 7457 // the output port never finishes re-enabling. 7458 mCodec->mShutdownInProgress = true; 7459 mCodec->mKeepComponentAllocated = false; 7460 mCodec->changeState(mCodec->mLoadedState); 7461 } 7462 7463 return true; 7464 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7465 if (data2 != (OMX_U32)kPortIndexOutput) { 7466 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7467 return false; 7468 } 7469 7470 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7471 7472 if (mCodec->mExecutingState->active()) { 7473 mCodec->mExecutingState->submitOutputBuffers(); 7474 } 7475 7476 mCodec->changeState(mCodec->mExecutingState); 7477 7478 return true; 7479 } 7480 7481 return false; 7482 } 7483 7484 default: 7485 return false; 7486 } 7487 } 7488 7489 //////////////////////////////////////////////////////////////////////////////// 7490 7491 ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7492 : BaseState(codec), 7493 mComponentNowIdle(false) { 7494 } 7495 7496 bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7497 bool handled = false; 7498 7499 switch (msg->what()) { 7500 case kWhatFlush: 7501 { 7502 // Don't send me a flush request if you previously wanted me 7503 // to shutdown. 7504 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7505 break; 7506 } 7507 7508 case kWhatShutdown: 7509 { 7510 // We're already doing that... 7511 7512 handled = true; 7513 break; 7514 } 7515 7516 default: 7517 handled = BaseState::onMessageReceived(msg); 7518 break; 7519 } 7520 7521 return handled; 7522 } 7523 7524 void ACodec::ExecutingToIdleState::stateEntered() { 7525 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7526 7527 mComponentNowIdle = false; 7528 mCodec->mLastOutputFormat.clear(); 7529 } 7530 7531 bool ACodec::ExecutingToIdleState::onOMXEvent( 7532 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7533 switch (event) { 7534 case OMX_EventCmdComplete: 7535 { 7536 if (data1 != (OMX_U32)OMX_CommandStateSet 7537 || data2 != (OMX_U32)OMX_StateIdle) { 7538 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7539 asString((OMX_COMMANDTYPE)data1), data1, 7540 asString((OMX_STATETYPE)data2), data2); 7541 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7542 return true; 7543 } 7544 7545 mComponentNowIdle = true; 7546 7547 changeStateIfWeOwnAllBuffers(); 7548 7549 return true; 7550 } 7551 7552 case OMX_EventPortSettingsChanged: 7553 case OMX_EventBufferFlag: 7554 { 7555 // We're shutting down and don't care about this anymore. 7556 return true; 7557 } 7558 7559 default: 7560 return BaseState::onOMXEvent(event, data1, data2); 7561 } 7562 } 7563 7564 void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7565 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7566 status_t err = mCodec->mOMX->sendCommand( 7567 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7568 if (err == OK) { 7569 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7570 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7571 if (err == OK) { 7572 err = err2; 7573 } 7574 } 7575 7576 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7577 && mCodec->mNativeWindow != NULL) { 7578 // We push enough 1x1 blank buffers to ensure that one of 7579 // them has made it to the display. This allows the OMX 7580 // component teardown to zero out any protected buffers 7581 // without the risk of scanning out one of those buffers. 7582 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7583 } 7584 7585 if (err != OK) { 7586 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7587 return; 7588 } 7589 7590 mCodec->changeState(mCodec->mIdleToLoadedState); 7591 } 7592 } 7593 7594 void ACodec::ExecutingToIdleState::onInputBufferFilled( 7595 const sp<AMessage> &msg) { 7596 BaseState::onInputBufferFilled(msg); 7597 7598 changeStateIfWeOwnAllBuffers(); 7599 } 7600 7601 void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7602 const sp<AMessage> &msg) { 7603 BaseState::onOutputBufferDrained(msg); 7604 7605 changeStateIfWeOwnAllBuffers(); 7606 } 7607 7608 //////////////////////////////////////////////////////////////////////////////// 7609 7610 ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7611 : BaseState(codec) { 7612 } 7613 7614 bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7615 bool handled = false; 7616 7617 switch (msg->what()) { 7618 case kWhatShutdown: 7619 { 7620 // We're already doing that... 7621 7622 handled = true; 7623 break; 7624 } 7625 7626 case kWhatFlush: 7627 { 7628 // Don't send me a flush request if you previously wanted me 7629 // to shutdown. 7630 ALOGE("Got flush request in IdleToLoadedState"); 7631 break; 7632 } 7633 7634 default: 7635 handled = BaseState::onMessageReceived(msg); 7636 break; 7637 } 7638 7639 return handled; 7640 } 7641 7642 void ACodec::IdleToLoadedState::stateEntered() { 7643 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7644 } 7645 7646 bool ACodec::IdleToLoadedState::onOMXEvent( 7647 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7648 switch (event) { 7649 case OMX_EventCmdComplete: 7650 { 7651 if (data1 != (OMX_U32)OMX_CommandStateSet 7652 || data2 != (OMX_U32)OMX_StateLoaded) { 7653 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7654 asString((OMX_COMMANDTYPE)data1), data1, 7655 asString((OMX_STATETYPE)data2), data2); 7656 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7657 return true; 7658 } 7659 7660 mCodec->changeState(mCodec->mLoadedState); 7661 7662 return true; 7663 } 7664 7665 default: 7666 return BaseState::onOMXEvent(event, data1, data2); 7667 } 7668 } 7669 7670 //////////////////////////////////////////////////////////////////////////////// 7671 7672 ACodec::FlushingState::FlushingState(ACodec *codec) 7673 : BaseState(codec) { 7674 } 7675 7676 void ACodec::FlushingState::stateEntered() { 7677 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7678 7679 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7680 } 7681 7682 bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7683 bool handled = false; 7684 7685 switch (msg->what()) { 7686 case kWhatShutdown: 7687 { 7688 mCodec->deferMessage(msg); 7689 break; 7690 } 7691 7692 case kWhatFlush: 7693 { 7694 // We're already doing this right now. 7695 handled = true; 7696 break; 7697 } 7698 7699 default: 7700 handled = BaseState::onMessageReceived(msg); 7701 break; 7702 } 7703 7704 return handled; 7705 } 7706 7707 bool ACodec::FlushingState::onOMXEvent( 7708 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7709 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7710 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7711 7712 switch (event) { 7713 case OMX_EventCmdComplete: 7714 { 7715 if (data1 != (OMX_U32)OMX_CommandFlush) { 7716 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7717 asString((OMX_COMMANDTYPE)data1), data1, data2); 7718 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7719 return true; 7720 } 7721 7722 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7723 if (mFlushComplete[data2]) { 7724 ALOGW("Flush already completed for %s port", 7725 data2 == kPortIndexInput ? "input" : "output"); 7726 return true; 7727 } 7728 mFlushComplete[data2] = true; 7729 7730 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7731 changeStateIfWeOwnAllBuffers(); 7732 } 7733 } else if (data2 == OMX_ALL) { 7734 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7735 ALOGW("received flush complete event for OMX_ALL before ports have been" 7736 "flushed (%d/%d)", 7737 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7738 return false; 7739 } 7740 7741 changeStateIfWeOwnAllBuffers(); 7742 } else { 7743 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7744 } 7745 7746 return true; 7747 } 7748 7749 case OMX_EventPortSettingsChanged: 7750 { 7751 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7752 msg->setInt32("type", omx_message::EVENT); 7753 msg->setInt32("node", mCodec->mNode); 7754 msg->setInt32("event", event); 7755 msg->setInt32("data1", data1); 7756 msg->setInt32("data2", data2); 7757 7758 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7759 mCodec->mComponentName.c_str()); 7760 7761 mCodec->deferMessage(msg); 7762 7763 return true; 7764 } 7765 7766 default: 7767 return BaseState::onOMXEvent(event, data1, data2); 7768 } 7769 7770 return true; 7771 } 7772 7773 void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7774 BaseState::onOutputBufferDrained(msg); 7775 7776 changeStateIfWeOwnAllBuffers(); 7777 } 7778 7779 void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7780 BaseState::onInputBufferFilled(msg); 7781 7782 changeStateIfWeOwnAllBuffers(); 7783 } 7784 7785 void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7786 if (mFlushComplete[kPortIndexInput] 7787 && mFlushComplete[kPortIndexOutput] 7788 && mCodec->allYourBuffersAreBelongToUs()) { 7789 // We now own all buffers except possibly those still queued with 7790 // the native window for rendering. Let's get those back as well. 7791 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7792 7793 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7794 7795 sp<AMessage> notify = mCodec->mNotify->dup(); 7796 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7797 notify->post(); 7798 7799 mCodec->mPortEOS[kPortIndexInput] = 7800 mCodec->mPortEOS[kPortIndexOutput] = false; 7801 7802 mCodec->mInputEOSResult = OK; 7803 7804 if (mCodec->mSkipCutBuffer != NULL) { 7805 mCodec->mSkipCutBuffer->clear(); 7806 } 7807 7808 mCodec->changeState(mCodec->mExecutingState); 7809 } 7810 } 7811 7812 status_t ACodec::queryCapabilities( 7813 const AString &name, const AString &mime, bool isEncoder, 7814 sp<MediaCodecInfo::Capabilities> *caps) { 7815 (*caps).clear(); 7816 const char *role = getComponentRole(isEncoder, mime.c_str()); 7817 if (role == NULL) { 7818 return BAD_VALUE; 7819 } 7820 7821 OMXClient client; 7822 status_t err = client.connect(); 7823 if (err != OK) { 7824 return err; 7825 } 7826 7827 sp<IOMX> omx = client.interface(); 7828 sp<CodecObserver> observer = new CodecObserver; 7829 IOMX::node_id node = 0; 7830 7831 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7832 if (err != OK) { 7833 client.disconnect(); 7834 return err; 7835 } 7836 7837 err = setComponentRole(omx, node, role); 7838 if (err != OK) { 7839 omx->freeNode(node); 7840 client.disconnect(); 7841 return err; 7842 } 7843 7844 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7845 bool isVideo = mime.startsWithIgnoreCase("video/"); 7846 7847 if (isVideo) { 7848 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7849 InitOMXParams(¶m); 7850 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7851 7852 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7853 param.nProfileIndex = index; 7854 status_t err = omx->getParameter( 7855 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7856 ¶m, sizeof(param)); 7857 if (err != OK) { 7858 break; 7859 } 7860 builder->addProfileLevel(param.eProfile, param.eLevel); 7861 7862 if (index == kMaxIndicesToCheck) { 7863 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7864 name.c_str(), index, 7865 param.eProfile, param.eLevel); 7866 } 7867 } 7868 7869 // Color format query 7870 // return colors in the order reported by the OMX component 7871 // prefix "flexible" standard ones with the flexible equivalent 7872 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7873 InitOMXParams(&portFormat); 7874 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7875 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7876 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7877 portFormat.nIndex = index; 7878 status_t err = omx->getParameter( 7879 node, OMX_IndexParamVideoPortFormat, 7880 &portFormat, sizeof(portFormat)); 7881 if (err != OK) { 7882 break; 7883 } 7884 7885 OMX_U32 flexibleEquivalent; 7886 if (isFlexibleColorFormat( 7887 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7888 &flexibleEquivalent)) { 7889 bool marked = false; 7890 for (size_t i = 0; i < supportedColors.size(); ++i) { 7891 if (supportedColors[i] == flexibleEquivalent) { 7892 marked = true; 7893 break; 7894 } 7895 } 7896 if (!marked) { 7897 supportedColors.push(flexibleEquivalent); 7898 builder->addColorFormat(flexibleEquivalent); 7899 } 7900 } 7901 supportedColors.push(portFormat.eColorFormat); 7902 builder->addColorFormat(portFormat.eColorFormat); 7903 7904 if (index == kMaxIndicesToCheck) { 7905 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7906 name.c_str(), index, 7907 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7908 } 7909 } 7910 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7911 // More audio codecs if they have profiles. 7912 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7913 InitOMXParams(¶m); 7914 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7915 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7916 param.nProfileIndex = index; 7917 status_t err = omx->getParameter( 7918 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7919 ¶m, sizeof(param)); 7920 if (err != OK) { 7921 break; 7922 } 7923 // For audio, level is ignored. 7924 builder->addProfileLevel(param.eProfile, 0 /* level */); 7925 7926 if (index == kMaxIndicesToCheck) { 7927 ALOGW("[%s] stopping checking profiles after %u: %x", 7928 name.c_str(), index, 7929 param.eProfile); 7930 } 7931 } 7932 7933 // NOTE: Without Android extensions, OMX does not provide a way to query 7934 // AAC profile support 7935 if (param.nProfileIndex == 0) { 7936 ALOGW("component %s doesn't support profile query.", name.c_str()); 7937 } 7938 } 7939 7940 if (isVideo && !isEncoder) { 7941 native_handle_t *sidebandHandle = NULL; 7942 if (omx->configureVideoTunnelMode( 7943 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7944 // tunneled playback includes adaptive playback 7945 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7946 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7947 } else if (omx->storeMetaDataInBuffers( 7948 node, kPortIndexOutput, OMX_TRUE) == OK || 7949 omx->prepareForAdaptivePlayback( 7950 node, kPortIndexOutput, OMX_TRUE, 7951 1280 /* width */, 720 /* height */) == OK) { 7952 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7953 } 7954 } 7955 7956 if (isVideo && isEncoder) { 7957 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7958 InitOMXParams(¶ms); 7959 params.nPortIndex = kPortIndexOutput; 7960 // TODO: should we verify if fallback is supported? 7961 if (omx->getConfig( 7962 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7963 ¶ms, sizeof(params)) == OK) { 7964 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7965 } 7966 } 7967 7968 *caps = builder; 7969 omx->freeNode(node); 7970 client.disconnect(); 7971 return OK; 7972 } 7973 7974 // These are supposed be equivalent to the logic in 7975 // "audio_channel_out_mask_from_count". 7976 //static 7977 status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7978 switch (numChannels) { 7979 case 1: 7980 map[0] = OMX_AUDIO_ChannelCF; 7981 break; 7982 case 2: 7983 map[0] = OMX_AUDIO_ChannelLF; 7984 map[1] = OMX_AUDIO_ChannelRF; 7985 break; 7986 case 3: 7987 map[0] = OMX_AUDIO_ChannelLF; 7988 map[1] = OMX_AUDIO_ChannelRF; 7989 map[2] = OMX_AUDIO_ChannelCF; 7990 break; 7991 case 4: 7992 map[0] = OMX_AUDIO_ChannelLF; 7993 map[1] = OMX_AUDIO_ChannelRF; 7994 map[2] = OMX_AUDIO_ChannelLR; 7995 map[3] = OMX_AUDIO_ChannelRR; 7996 break; 7997 case 5: 7998 map[0] = OMX_AUDIO_ChannelLF; 7999 map[1] = OMX_AUDIO_ChannelRF; 8000 map[2] = OMX_AUDIO_ChannelCF; 8001 map[3] = OMX_AUDIO_ChannelLR; 8002 map[4] = OMX_AUDIO_ChannelRR; 8003 break; 8004 case 6: 8005 map[0] = OMX_AUDIO_ChannelLF; 8006 map[1] = OMX_AUDIO_ChannelRF; 8007 map[2] = OMX_AUDIO_ChannelCF; 8008 map[3] = OMX_AUDIO_ChannelLFE; 8009 map[4] = OMX_AUDIO_ChannelLR; 8010 map[5] = OMX_AUDIO_ChannelRR; 8011 break; 8012 case 7: 8013 map[0] = OMX_AUDIO_ChannelLF; 8014 map[1] = OMX_AUDIO_ChannelRF; 8015 map[2] = OMX_AUDIO_ChannelCF; 8016 map[3] = OMX_AUDIO_ChannelLFE; 8017 map[4] = OMX_AUDIO_ChannelLR; 8018 map[5] = OMX_AUDIO_ChannelRR; 8019 map[6] = OMX_AUDIO_ChannelCS; 8020 break; 8021 case 8: 8022 map[0] = OMX_AUDIO_ChannelLF; 8023 map[1] = OMX_AUDIO_ChannelRF; 8024 map[2] = OMX_AUDIO_ChannelCF; 8025 map[3] = OMX_AUDIO_ChannelLFE; 8026 map[4] = OMX_AUDIO_ChannelLR; 8027 map[5] = OMX_AUDIO_ChannelRR; 8028 map[6] = OMX_AUDIO_ChannelLS; 8029 map[7] = OMX_AUDIO_ChannelRS; 8030 break; 8031 default: 8032 return -EINVAL; 8033 } 8034 8035 return OK; 8036 } 8037 8038 } // namespace android 8039