1 /* 2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "VideoDecoderBase.h" 18 #include "VideoDecoderTrace.h" 19 #include <string.h> 20 #include <va/va_android.h> 21 #include <va/va_tpi.h> 22 #ifdef __SSE4_1__ 23 #include "use_util_sse4.h" 24 #endif 25 26 #define INVALID_PTS ((uint64_t)-1) 27 #define MAXIMUM_POC 0x7FFFFFFF 28 #define MINIMUM_POC 0x80000000 29 #define ANDROID_DISPLAY_HANDLE 0x18C34078 30 31 VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type) 32 : mInitialized(false), 33 mLowDelay(false), 34 mStoreMetaData(false), 35 mDisplay(NULL), 36 mVADisplay(NULL), 37 mVAContext(VA_INVALID_ID), 38 mVAConfig(VA_INVALID_ID), 39 mVAStarted(false), 40 mCurrentPTS(INVALID_PTS), 41 mAcquiredBuffer(NULL), 42 mLastReference(NULL), 43 mForwardReference(NULL), 44 mDecodingFrame(false), 45 mSizeChanged(false), 46 mShowFrame(true), 47 mOutputWindowSize(OUTPUT_WINDOW_SIZE), 48 mRotationDegrees(0), 49 mErrReportEnabled(false), 50 mWiDiOn(false), 51 mRawOutput(false), 52 mManageReference(true), 53 mOutputMethod(OUTPUT_BY_PCT), 54 mNumSurfaces(0), 55 mSurfaceBuffers(NULL), 56 mOutputHead(NULL), 57 mOutputTail(NULL), 58 mSurfaces(NULL), 59 mVASurfaceAttrib(NULL), 60 mSurfaceUserPtr(NULL), 61 mSurfaceAcquirePos(0), 62 mNextOutputPOC(MINIMUM_POC), 63 mParserType(type), 64 mParserHandle(NULL), 65 mSignalBufferSize(0) { 66 67 memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo)); 68 memset(&mConfigBuffer, 0, sizeof(mConfigBuffer)); 69 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { 70 mSignalBufferPre[i] = NULL; 71 } 72 pthread_mutex_init(&mLock, NULL); 73 pthread_mutex_init(&mFormatLock, NULL); 74 mVideoFormatInfo.mimeType = strdup(mimeType); 75 mUseGEN = false; 76 mMetaDataBuffersNum = 0; 77 mLibHandle = NULL; 78 mParserOpen = NULL; 79 mParserClose = NULL; 80 mParserParse = NULL; 81 mParserQuery = NULL; 82 mParserFlush = NULL; 83 mParserUpdate = NULL; 84 } 85 86 VideoDecoderBase::~VideoDecoderBase() { 87 pthread_mutex_destroy(&mLock); 88 pthread_mutex_destroy(&mFormatLock); 89 stop(); 90 free(mVideoFormatInfo.mimeType); 91 } 92 93 Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) { 94 if (buffer == NULL) { 95 return DECODE_INVALID_DATA; 96 } 97 98 if (mParserHandle != NULL) { 99 WTRACE("Decoder has already started."); 100 return DECODE_SUCCESS; 101 } 102 mLibHandle = dlopen("libmixvbp.so", RTLD_NOW); 103 if (mLibHandle == NULL) { 104 return DECODE_NO_PARSER; 105 } 106 mParserOpen = (OpenFunc)dlsym(mLibHandle, "vbp_open"); 107 mParserClose = (CloseFunc)dlsym(mLibHandle, "vbp_close"); 108 mParserParse = (ParseFunc)dlsym(mLibHandle, "vbp_parse"); 109 mParserQuery = (QueryFunc)dlsym(mLibHandle, "vbp_query"); 110 mParserFlush = (FlushFunc)dlsym(mLibHandle, "vbp_flush"); 111 if (mParserOpen == NULL || mParserClose == NULL || mParserParse == NULL 112 || mParserQuery == NULL || mParserFlush == NULL) { 113 return DECODE_NO_PARSER; 114 } 115 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) 116 mParserUpdate = (UpdateFunc)dlsym(mLibHandle, "vbp_update"); 117 if (mParserUpdate == NULL) { 118 return DECODE_NO_PARSER; 119 } 120 #endif 121 if ((int32_t)mParserType != VBP_INVALID) { 122 ITRACE("mParserType = %d", mParserType); 123 if (mParserOpen(mParserType, &mParserHandle) != VBP_OK) { 124 ETRACE("Failed to open VBP parser."); 125 return DECODE_NO_PARSER; 126 } 127 } 128 // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter. 129 mConfigBuffer = *buffer; 130 mConfigBuffer.data = NULL; 131 mConfigBuffer.size = 0; 132 133 mVideoFormatInfo.width = buffer->width; 134 mVideoFormatInfo.height = buffer->height; 135 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) { 136 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; 137 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; 138 } 139 mLowDelay = buffer->flag & WANT_LOW_DELAY; 140 mStoreMetaData = buffer->flag & WANT_STORE_META_DATA; 141 mRawOutput = buffer->flag & WANT_RAW_OUTPUT; 142 if (mRawOutput) { 143 WTRACE("Output is raw data."); 144 } 145 146 return DECODE_SUCCESS; 147 } 148 149 150 Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) { 151 if (buffer == NULL) { 152 return DECODE_INVALID_DATA; 153 } 154 155 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec 156 terminateVA(); 157 158 // reset the mconfigBuffer to pass it for startVA. 159 mConfigBuffer = *buffer; 160 mConfigBuffer.data = NULL; 161 mConfigBuffer.size = 0; 162 163 mVideoFormatInfo.width = buffer->width; 164 mVideoFormatInfo.height = buffer->height; 165 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) { 166 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth; 167 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight; 168 } 169 mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber; 170 mLowDelay = buffer->flag & WANT_LOW_DELAY; 171 mStoreMetaData = buffer->flag & WANT_STORE_META_DATA; 172 mMetaDataBuffersNum = 0; 173 mRawOutput = buffer->flag & WANT_RAW_OUTPUT; 174 if (mRawOutput) { 175 WTRACE("Output is raw data."); 176 } 177 return DECODE_SUCCESS; 178 } 179 180 181 182 void VideoDecoderBase::stop(void) { 183 terminateVA(); 184 185 mCurrentPTS = INVALID_PTS; 186 mAcquiredBuffer = NULL; 187 mLastReference = NULL; 188 mForwardReference = NULL; 189 mDecodingFrame = false; 190 mSizeChanged = false; 191 192 // private variables 193 mLowDelay = false; 194 mStoreMetaData = false; 195 mRawOutput = false; 196 mNumSurfaces = 0; 197 mSurfaceAcquirePos = 0; 198 mNextOutputPOC = MINIMUM_POC; 199 mVideoFormatInfo.valid = false; 200 if (mParserHandle){ 201 mParserClose(mParserHandle); 202 mParserHandle = NULL; 203 } 204 if (mLibHandle) { 205 dlclose(mLibHandle); 206 mLibHandle = NULL; 207 } 208 } 209 210 void VideoDecoderBase::flush(void) { 211 if (mVAStarted == false) { 212 // nothing to flush at this stage 213 return; 214 } 215 216 endDecodingFrame(true); 217 218 VideoSurfaceBuffer *p = mOutputHead; 219 // check if there's buffer with DRC flag in the output queue 220 while (p) { 221 if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) { 222 mSizeChanged = true; 223 break; 224 } 225 p = p->next; 226 } 227 // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing 228 // (surface is still being rendered) 229 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces; 230 mNextOutputPOC = MINIMUM_POC; 231 mCurrentPTS = INVALID_PTS; 232 mAcquiredBuffer = NULL; 233 mLastReference = NULL; 234 mForwardReference = NULL; 235 mOutputHead = NULL; 236 mOutputTail = NULL; 237 mDecodingFrame = false; 238 239 // flush vbp parser 240 if (mParserHandle && (mParserFlush(mParserHandle) != VBP_OK)) { 241 WTRACE("Failed to flush parser. Continue"); 242 } 243 244 // initialize surface buffer without resetting mapped/raw data 245 initSurfaceBuffer(false); 246 247 } 248 249 void VideoDecoderBase::freeSurfaceBuffers(void) { 250 if (mVAStarted == false) { 251 // nothing to free surface buffers at this stage 252 return; 253 } 254 255 pthread_mutex_lock(&mLock); 256 257 endDecodingFrame(true); 258 259 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec 260 terminateVA(); 261 262 pthread_mutex_unlock(&mLock); 263 } 264 265 const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) { 266 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) { 267 // Do nothing here, just to avoid thread 268 // contention in updateFormatInfo() 269 pthread_mutex_lock(&mFormatLock); 270 pthread_mutex_unlock(&mFormatLock); 271 } 272 273 return &mVideoFormatInfo; 274 } 275 276 int VideoDecoderBase::getOutputQueueLength(void) { 277 VideoSurfaceBuffer *p = mOutputHead; 278 279 int i = 0; 280 while (p) { 281 p = p->next; 282 i++; 283 } 284 285 return i; 286 } 287 288 const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) { 289 if (mVAStarted == false) { 290 return NULL; 291 } 292 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; 293 294 if (draining) { 295 // complete decoding the last frame and ignore return 296 endDecodingFrame(false); 297 } 298 299 if (mOutputHead == NULL) { 300 return NULL; 301 } 302 303 // output by position (the first buffer) 304 VideoSurfaceBuffer *outputByPos = mOutputHead; 305 306 if (mLowDelay) { 307 mOutputHead = mOutputHead->next; 308 if (mOutputHead == NULL) { 309 mOutputTail = NULL; 310 } 311 vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp); 312 if (useGraphicBuffer && !mUseGEN) { 313 vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface); 314 fillDecodingErrors(&(outputByPos->renderBuffer)); 315 } 316 if (draining && mOutputTail == NULL) { 317 outputByPos->renderBuffer.flag |= IS_EOS; 318 } 319 drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer)); 320 321 return &(outputByPos->renderBuffer); 322 } 323 324 VideoSurfaceBuffer *output = NULL; 325 if (mOutputMethod == OUTPUT_BY_POC) { 326 output = findOutputByPoc(draining); 327 } else if (mOutputMethod == OUTPUT_BY_PCT) { 328 output = findOutputByPct(draining); 329 } else { 330 ETRACE("Invalid output method."); 331 return NULL; 332 } 333 334 if (output == NULL) { 335 return NULL; 336 } 337 338 if (output != outputByPos) { 339 // remove this output from middle or end of the list 340 VideoSurfaceBuffer *p = outputByPos; 341 while (p->next != output) { 342 p = p->next; 343 } 344 p->next = output->next; 345 if (mOutputTail == output) { 346 mOutputTail = p; 347 } 348 } else { 349 // remove this output from head of the list 350 mOutputHead = mOutputHead->next; 351 if (mOutputHead == NULL) { 352 mOutputTail = NULL; 353 } 354 } 355 //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6); 356 vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp); 357 358 if (useGraphicBuffer && !mUseGEN) { 359 vaSyncSurface(mVADisplay, output->renderBuffer.surface); 360 fillDecodingErrors(&(output->renderBuffer)); 361 } 362 363 if (draining && mOutputTail == NULL) { 364 output->renderBuffer.flag |= IS_EOS; 365 } 366 367 drainDecodingErrors(outErrBuf, &(output->renderBuffer)); 368 369 return &(output->renderBuffer); 370 } 371 372 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts() { 373 // output by presentation time stamp - buffer with the smallest time stamp is output 374 VideoSurfaceBuffer *p = mOutputHead; 375 VideoSurfaceBuffer *outputByPts = NULL; 376 uint64_t pts = INVALID_PTS; 377 do { 378 if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) { 379 // find buffer with the smallest PTS 380 pts = p->renderBuffer.timeStamp; 381 outputByPts = p; 382 } 383 p = p->next; 384 } while (p != NULL); 385 386 return outputByPts; 387 } 388 389 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) { 390 // output by picture coding type (PCT) 391 // if there is more than one reference frame, the first reference frame is ouput, otherwise, 392 // output non-reference frame if there is any. 393 394 VideoSurfaceBuffer *p = mOutputHead; 395 VideoSurfaceBuffer *outputByPct = NULL; 396 int32_t reference = 0; 397 do { 398 if (p->referenceFrame) { 399 reference++; 400 if (reference > 1) { 401 // mOutputHead must be a reference frame 402 outputByPct = mOutputHead; 403 break; 404 } 405 } else { 406 // first non-reference frame 407 outputByPct = p; 408 break; 409 } 410 p = p->next; 411 } while (p != NULL); 412 413 if (outputByPct == NULL && draining) { 414 outputByPct = mOutputHead; 415 } 416 return outputByPct; 417 } 418 419 #if 0 420 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { 421 // output by picture order count (POC) 422 // Output criteria: 423 // if there is IDR frame (POC == 0), all the frames before IDR must be output; 424 // Otherwise, if draining flag is set or list is full, frame with the least POC is output; 425 // Otherwise, NOTHING is output 426 427 int32_t dpbFullness = 0; 428 for (int32_t i = 0; i < mNumSurfaces; i++) { 429 // count num of reference frames 430 if (mSurfaceBuffers[i].asReferernce) { 431 dpbFullness++; 432 } 433 } 434 435 if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) { 436 // frame is being decoded and is not ready for output yet 437 dpbFullness--; 438 } 439 440 VideoSurfaceBuffer *p = mOutputHead; 441 while (p != NULL) { 442 // count dpbFullness with non-reference frame in the output queue 443 if (p->asReferernce == false) { 444 dpbFullness++; 445 } 446 p = p->next; 447 } 448 449 Retry: 450 p = mOutputHead; 451 VideoSurfaceBuffer *outputByPoc = NULL; 452 int32_t count = 0; 453 int32_t poc = MAXIMUM_POC; 454 455 do { 456 if (p->pictureOrder == 0) { 457 // output picture with the least POC before IDR 458 if (outputByPoc != NULL) { 459 mNextOutputPOC = outputByPoc->pictureOrder + 1; 460 return outputByPoc; 461 } else { 462 mNextOutputPOC = MINIMUM_POC; 463 } 464 } 465 466 // POC of the output candidate must not be less than mNextOutputPOC 467 if (p->pictureOrder < mNextOutputPOC) { 468 break; 469 } 470 471 if (p->pictureOrder < poc) { 472 // update the least POC. 473 poc = p->pictureOrder; 474 outputByPoc = p; 475 } 476 count++; 477 p = p->next; 478 } while (p != NULL && count < mOutputWindowSize); 479 480 if (draining == false && dpbFullness < mOutputWindowSize) { 481 // list is not full and we are not in draining state 482 // if DPB is already full, one frame must be output 483 return NULL; 484 } 485 486 if (outputByPoc == NULL) { 487 mNextOutputPOC = MINIMUM_POC; 488 goto Retry; 489 } 490 491 // for debugging purpose 492 if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) { 493 ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder); 494 //gaps_in_frame_num_value_allowed_flag is not currently supported 495 } 496 497 mNextOutputPOC = outputByPoc->pictureOrder + 1; 498 499 return outputByPoc; 500 } 501 #else 502 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) { 503 VideoSurfaceBuffer *output = NULL; 504 VideoSurfaceBuffer *p = mOutputHead; 505 int32_t count = 0; 506 int32_t poc = MAXIMUM_POC; 507 VideoSurfaceBuffer *outputleastpoc = mOutputHead; 508 do { 509 count++; 510 if (p->pictureOrder == 0) { 511 // any picture before this POC (new IDR) must be output 512 if (output == NULL) { 513 mNextOutputPOC = MINIMUM_POC; 514 // looking for any POC with negative value 515 } else { 516 mNextOutputPOC = output->pictureOrder + 1; 517 break; 518 } 519 } 520 if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) { 521 // this POC meets ouput criteria. 522 poc = p->pictureOrder; 523 output = p; 524 outputleastpoc = p; 525 } 526 if (poc == mNextOutputPOC || count == mOutputWindowSize) { 527 if (output != NULL) { 528 // this indicates two cases: 529 // 1) the next output POC is found. 530 // 2) output queue is full and there is at least one buffer meeting the output criteria. 531 mNextOutputPOC = output->pictureOrder + 1; 532 break; 533 } else { 534 // this indicates output queue is full and no buffer in the queue meets the output criteria 535 // restart processing as queue is FULL and output criteria is changed. (next output POC is 0) 536 mNextOutputPOC = MINIMUM_POC; 537 count = 0; 538 poc = MAXIMUM_POC; 539 p = mOutputHead; 540 continue; 541 } 542 } 543 if (p->next == NULL) { 544 output = NULL; 545 } 546 547 p = p->next; 548 } while (p != NULL); 549 550 if (draining == true && output == NULL) { 551 output = outputleastpoc; 552 } 553 554 return output; 555 } 556 #endif 557 558 bool VideoDecoderBase::checkBufferAvail(void) { 559 if (!mInitialized) { 560 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) { 561 return true; 562 } 563 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { 564 if (mSignalBufferPre[i] != NULL) { 565 return true; 566 } 567 } 568 return false; 569 } 570 // check whether there is buffer available for decoding 571 // TODO: check frame being referenced for frame skipping 572 VideoSurfaceBuffer *buffer = NULL; 573 for (int32_t i = 0; i < mNumSurfaces; i++) { 574 buffer = mSurfaceBuffers + i; 575 576 if (buffer->asReferernce == false && 577 buffer->renderBuffer.renderDone == true) { 578 querySurfaceRenderStatus(buffer); 579 if (buffer->renderBuffer.driverRenderDone == true) 580 return true; 581 } 582 } 583 return false; 584 } 585 586 Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) { 587 if (mVAStarted == false) { 588 return DECODE_FAIL; 589 } 590 591 if (mAcquiredBuffer != NULL) { 592 ETRACE("mAcquiredBuffer is not NULL. Implementation bug."); 593 return DECODE_FAIL; 594 } 595 596 int nextAcquire = mSurfaceAcquirePos; 597 VideoSurfaceBuffer *acquiredBuffer = NULL; 598 bool acquired = false; 599 600 while (acquired == false) { 601 acquiredBuffer = mSurfaceBuffers + nextAcquire; 602 603 querySurfaceRenderStatus(acquiredBuffer); 604 605 if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) { 606 // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping 607 VideoSurfaceBuffer *temp; 608 acquired = true; 609 for (int i = 0; i < mNumSurfaces; i++) { 610 if (i == nextAcquire) { 611 continue; 612 } 613 temp = mSurfaceBuffers + i; 614 // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use. 615 if (temp->renderBuffer.surface == mSurfaces[nextAcquire] && 616 temp->renderBuffer.renderDone == false) { 617 ITRACE("Surface is referenced by other surface buffer."); 618 acquired = false; 619 break; 620 } 621 } 622 } 623 if (acquired) { 624 break; 625 } 626 nextAcquire++; 627 if (nextAcquire == mNumSurfaces) { 628 nextAcquire = 0; 629 } 630 if (nextAcquire == mSurfaceAcquirePos) { 631 return DECODE_NO_SURFACE; 632 } 633 } 634 635 if (acquired == false) { 636 return DECODE_NO_SURFACE; 637 } 638 639 mAcquiredBuffer = acquiredBuffer; 640 mSurfaceAcquirePos = nextAcquire; 641 642 // set surface again as surface maybe reset by skipped frame. 643 // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output. 644 mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos]; 645 if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) { 646 mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos]; 647 } 648 mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS; 649 mAcquiredBuffer->renderBuffer.display = mVADisplay; 650 mAcquiredBuffer->renderBuffer.flag = 0; 651 mAcquiredBuffer->renderBuffer.renderDone = false; 652 mAcquiredBuffer->asReferernce = false; 653 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0; 654 mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS; 655 656 return DECODE_SUCCESS; 657 } 658 659 Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) { 660 Decode_Status status; 661 if (mAcquiredBuffer == NULL) { 662 ETRACE("mAcquiredBuffer is NULL. Implementation bug."); 663 return DECODE_FAIL; 664 } 665 666 if (mRawOutput) { 667 status = getRawDataFromSurface(); 668 CHECK_STATUS(); 669 } 670 671 // frame is successfly decoded to the current surface, it is ready for output 672 if (mShowFrame) { 673 mAcquiredBuffer->renderBuffer.renderDone = false; 674 } else { 675 mAcquiredBuffer->renderBuffer.renderDone = true; 676 } 677 678 // decoder must set "asReference and referenceFrame" flags properly 679 680 // update reference frames 681 if (mAcquiredBuffer->referenceFrame) { 682 if (mManageReference) { 683 // managing reference for MPEG4/H.263/WMV. 684 // AVC should manage reference frame in a different way 685 if (mForwardReference != NULL) { 686 // this foward reference is no longer needed 687 mForwardReference->asReferernce = false; 688 } 689 // Forware reference for either P or B frame prediction 690 mForwardReference = mLastReference; 691 mAcquiredBuffer->asReferernce = true; 692 } 693 694 // the last reference frame. 695 mLastReference = mAcquiredBuffer; 696 } 697 // add to the output list 698 if (mShowFrame) { 699 if (mOutputHead == NULL) { 700 mOutputHead = mAcquiredBuffer; 701 } else { 702 mOutputTail->next = mAcquiredBuffer; 703 } 704 mOutputTail = mAcquiredBuffer; 705 mOutputTail->next = NULL; 706 } 707 708 //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6); 709 710 mAcquiredBuffer = NULL; 711 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces; 712 return DECODE_SUCCESS; 713 } 714 715 Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) { 716 if (mAcquiredBuffer == NULL) { 717 // this is harmless error 718 return DECODE_SUCCESS; 719 } 720 721 // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output. 722 mAcquiredBuffer->asReferernce = false; 723 mAcquiredBuffer->renderBuffer.renderDone = true; 724 mAcquiredBuffer = NULL; 725 return DECODE_SUCCESS; 726 } 727 728 void VideoDecoderBase::flushSurfaceBuffers(void) { 729 endDecodingFrame(true); 730 VideoSurfaceBuffer *p = NULL; 731 while (mOutputHead) { 732 mOutputHead->renderBuffer.renderDone = true; 733 p = mOutputHead; 734 mOutputHead = mOutputHead->next; 735 p->next = NULL; 736 } 737 mOutputHead = NULL; 738 mOutputTail = NULL; 739 } 740 741 Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) { 742 Decode_Status status = DECODE_SUCCESS; 743 VAStatus vaStatus; 744 745 if (mDecodingFrame == false) { 746 if (mAcquiredBuffer != NULL) { 747 //ETRACE("mAcquiredBuffer is not NULL. Implementation bug."); 748 releaseSurfaceBuffer(); 749 status = DECODE_FAIL; 750 } 751 return status; 752 } 753 // return through exit label to reset mDecodingFrame 754 if (mAcquiredBuffer == NULL) { 755 ETRACE("mAcquiredBuffer is NULL. Implementation bug."); 756 status = DECODE_FAIL; 757 goto exit; 758 } 759 760 vaStatus = vaEndPicture(mVADisplay, mVAContext); 761 if (vaStatus != VA_STATUS_SUCCESS) { 762 releaseSurfaceBuffer(); 763 ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus); 764 status = DECODE_DRIVER_FAIL; 765 goto exit; 766 } 767 768 if (dropFrame) { 769 // we are asked to drop this decoded picture 770 VTRACE("Frame dropped in endDecodingFrame"); 771 vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface); 772 releaseSurfaceBuffer(); 773 goto exit; 774 } 775 status = outputSurfaceBuffer(); 776 // fall through 777 exit: 778 mDecodingFrame = false; 779 return status; 780 } 781 782 783 Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface) { 784 VAStatus vaStatus = VA_STATUS_SUCCESS; 785 Decode_Status status; 786 787 if (mVAStarted) { 788 return DECODE_SUCCESS; 789 } 790 791 mRotationDegrees = 0; 792 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){ 793 #ifdef TARGET_HAS_ISV 794 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum) 795 #else 796 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber) 797 #endif 798 return DECODE_FORMAT_CHANGE; 799 800 numSurface = mConfigBuffer.surfaceNumber; 801 // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode, 802 // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs 803 if (mSizeChanged) { 804 if (mStoreMetaData || (!mStoreMetaData && (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height))) { 805 mSizeChanged = false; 806 return DECODE_FORMAT_CHANGE; 807 } 808 } 809 } 810 811 // TODO: validate profile 812 if (numSurface == 0) { 813 return DECODE_FAIL; 814 } 815 816 if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) { 817 if (numSurface < mConfigBuffer.surfaceNumber) { 818 WTRACE("surface to allocated %d is less than minimum number required %d", 819 numSurface, mConfigBuffer.surfaceNumber); 820 numSurface = mConfigBuffer.surfaceNumber; 821 } 822 } 823 824 if (mVADisplay != NULL) { 825 ETRACE("VA is partially started."); 826 return DECODE_FAIL; 827 } 828 829 // Display is defined as "unsigned int" 830 #ifndef USE_HYBRID_DRIVER 831 mDisplay = new Display; 832 *mDisplay = ANDROID_DISPLAY_HANDLE; 833 #else 834 if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) { 835 ITRACE("Using GEN driver"); 836 mDisplay = "libva_driver_name=i965"; 837 mUseGEN = true; 838 } else { 839 ITRACE("Using PVR driver"); 840 mDisplay = "libva_driver_name=pvr"; 841 mUseGEN = false; 842 } 843 #endif 844 mVADisplay = vaGetDisplay(mDisplay); 845 if (mVADisplay == NULL) { 846 ETRACE("vaGetDisplay failed."); 847 return DECODE_DRIVER_FAIL; 848 } 849 850 int majorVersion, minorVersion; 851 vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion); 852 CHECK_VA_STATUS("vaInitialize"); 853 854 if ((int32_t)profile != VAProfileSoftwareDecoding) { 855 856 status = checkHardwareCapability(); 857 CHECK_STATUS("checkHardwareCapability"); 858 859 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) 860 status = getCodecSpecificConfigs(profile, &mVAConfig); 861 CHECK_STATUS("getCodecSpecificAttributes"); 862 #else 863 VAConfigAttrib attrib; 864 //We are requesting RT attributes 865 attrib.type = VAConfigAttribRTFormat; 866 attrib.value = VA_RT_FORMAT_YUV420; 867 868 vaStatus = vaCreateConfig( 869 mVADisplay, 870 profile, 871 VAEntrypointVLD, 872 &attrib, 873 1, 874 &mVAConfig); 875 CHECK_VA_STATUS("vaCreateConfig"); 876 #endif 877 } 878 879 mNumSurfaces = numSurface; 880 mNumExtraSurfaces = numExtraSurface; 881 mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces]; 882 mExtraSurfaces = mSurfaces + mNumSurfaces; 883 for (int i = 0; i < mNumSurfaces + mNumExtraSurfaces; ++i) { 884 mSurfaces[i] = VA_INVALID_SURFACE; 885 } 886 if (mSurfaces == NULL) { 887 return DECODE_MEMORY_FAIL; 888 } 889 890 setRenderRect(); 891 setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange); 892 893 int32_t format = VA_RT_FORMAT_YUV420; 894 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { 895 #ifndef USE_AVC_SHORT_FORMAT 896 format |= VA_RT_FORMAT_PROTECTED; 897 WTRACE("Surface is protected."); 898 #endif 899 } 900 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { 901 if (!mStoreMetaData) { 902 VASurfaceAttrib attribs[2]; 903 mVASurfaceAttrib = new VASurfaceAttribExternalBuffers; 904 if (mVASurfaceAttrib == NULL) { 905 return DECODE_MEMORY_FAIL; 906 } 907 908 mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces); 909 if (mVASurfaceAttrib->buffers == NULL) { 910 return DECODE_MEMORY_FAIL; 911 } 912 mVASurfaceAttrib->num_buffers = mNumSurfaces; 913 mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12; 914 mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth; 915 mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight; 916 mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5; 917 mVASurfaceAttrib->num_planes = 2; 918 mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride; 919 mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride; 920 mVASurfaceAttrib->pitches[2] = 0; 921 mVASurfaceAttrib->pitches[3] = 0; 922 mVASurfaceAttrib->offsets[0] = 0; 923 mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight; 924 mVASurfaceAttrib->offsets[2] = 0; 925 mVASurfaceAttrib->offsets[3] = 0; 926 mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow; 927 mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; 928 if (mConfigBuffer.flag & USE_TILING_MEMORY) 929 mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING; 930 931 for (int i = 0; i < mNumSurfaces; i++) { 932 mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i]; 933 } 934 935 attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; 936 attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; 937 attribs[0].value.type = VAGenericValueTypeInteger; 938 attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; 939 940 attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; 941 attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; 942 attribs[1].value.type = VAGenericValueTypePointer; 943 attribs[1].value.value.p = (void *)mVASurfaceAttrib; 944 945 vaStatus = vaCreateSurfaces( 946 mVADisplay, 947 format, 948 mVideoFormatInfo.surfaceWidth, 949 mVideoFormatInfo.surfaceHeight, 950 mSurfaces, 951 mNumSurfaces, 952 attribs, 953 2); 954 } 955 } else { 956 vaStatus = vaCreateSurfaces( 957 mVADisplay, 958 format, 959 mVideoFormatInfo.width, 960 mVideoFormatInfo.height, 961 mSurfaces, 962 mNumSurfaces, 963 NULL, 964 0); 965 mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width; 966 mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height; 967 } 968 CHECK_VA_STATUS("vaCreateSurfaces"); 969 970 if (mNumExtraSurfaces != 0) { 971 vaStatus = vaCreateSurfaces( 972 mVADisplay, 973 format, 974 mVideoFormatInfo.surfaceWidth, 975 mVideoFormatInfo.surfaceHeight, 976 mExtraSurfaces, 977 mNumExtraSurfaces, 978 NULL, 979 0); 980 CHECK_VA_STATUS("vaCreateSurfaces"); 981 } 982 983 mVideoFormatInfo.surfaceNumber = mNumSurfaces; 984 mVideoFormatInfo.ctxSurfaces = mSurfaces; 985 986 if ((int32_t)profile != VAProfileSoftwareDecoding) { 987 if (mStoreMetaData) { 988 if (mUseGEN) { 989 vaStatus = vaCreateContext( 990 mVADisplay, 991 mVAConfig, 992 mVideoFormatInfo.surfaceWidth, 993 mVideoFormatInfo.surfaceHeight, 994 0, 995 NULL, 996 0, 997 &mVAContext); 998 } else { 999 vaStatus = vaCreateContext( 1000 mVADisplay, 1001 mVAConfig, 1002 mVideoFormatInfo.surfaceWidth, 1003 mVideoFormatInfo.surfaceHeight, 1004 0, 1005 NULL, 1006 mNumSurfaces + mNumExtraSurfaces, 1007 &mVAContext); 1008 } 1009 } else { 1010 vaStatus = vaCreateContext( 1011 mVADisplay, 1012 mVAConfig, 1013 mVideoFormatInfo.surfaceWidth, 1014 mVideoFormatInfo.surfaceHeight, 1015 0, 1016 mSurfaces, 1017 mNumSurfaces + mNumExtraSurfaces, 1018 &mVAContext); 1019 } 1020 CHECK_VA_STATUS("vaCreateContext"); 1021 } 1022 1023 mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces]; 1024 if (mSurfaceBuffers == NULL) { 1025 return DECODE_MEMORY_FAIL; 1026 } 1027 initSurfaceBuffer(true); 1028 1029 if ((int32_t)profile == VAProfileSoftwareDecoding) { 1030 // derive user pointer from surface for direct access 1031 status = mapSurface(); 1032 CHECK_STATUS("mapSurface") 1033 } 1034 1035 setRotationDegrees(mConfigBuffer.rotationDegrees); 1036 1037 mVAStarted = true; 1038 1039 pthread_mutex_lock(&mLock); 1040 if (mStoreMetaData) { 1041 for (uint32_t i = 0; i < mMetaDataBuffersNum; i++) { 1042 status = createSurfaceFromHandle(i); 1043 CHECK_STATUS("createSurfaceFromHandle"); 1044 mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i; 1045 } 1046 } 1047 pthread_mutex_unlock(&mLock); 1048 1049 return DECODE_SUCCESS; 1050 } 1051 1052 Decode_Status VideoDecoderBase::terminateVA(void) { 1053 mSignalBufferSize = 0; 1054 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) { 1055 mSignalBufferPre[i] = NULL; 1056 } 1057 1058 if (mVAStarted == false) { 1059 // VA hasn't been started yet 1060 return DECODE_SUCCESS; 1061 } 1062 1063 if (mSurfaceBuffers) { 1064 for (int32_t i = 0; i < mNumSurfaces; i++) { 1065 if (mSurfaceBuffers[i].renderBuffer.rawData) { 1066 if (mSurfaceBuffers[i].renderBuffer.rawData->data) { 1067 delete [] mSurfaceBuffers[i].renderBuffer.rawData->data; 1068 } 1069 delete mSurfaceBuffers[i].renderBuffer.rawData; 1070 } 1071 if (mSurfaceBuffers[i].mappedData) { 1072 // don't delete data pointer as it is mapped from surface 1073 delete mSurfaceBuffers[i].mappedData; 1074 } 1075 } 1076 delete [] mSurfaceBuffers; 1077 mSurfaceBuffers = NULL; 1078 } 1079 1080 if (mVASurfaceAttrib) { 1081 if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers); 1082 delete mVASurfaceAttrib; 1083 mVASurfaceAttrib = NULL; 1084 } 1085 1086 1087 if (mSurfaceUserPtr) { 1088 delete [] mSurfaceUserPtr; 1089 mSurfaceUserPtr = NULL; 1090 } 1091 1092 if (mSurfaces) { 1093 vaDestroySurfaces(mVADisplay, mSurfaces, mStoreMetaData ? mMetaDataBuffersNum : (mNumSurfaces + mNumExtraSurfaces)); 1094 delete [] mSurfaces; 1095 mSurfaces = NULL; 1096 } 1097 1098 if (mVAContext != VA_INVALID_ID) { 1099 vaDestroyContext(mVADisplay, mVAContext); 1100 mVAContext = VA_INVALID_ID; 1101 } 1102 1103 if (mVAConfig != VA_INVALID_ID) { 1104 vaDestroyConfig(mVADisplay, mVAConfig); 1105 mVAConfig = VA_INVALID_ID; 1106 } 1107 1108 if (mVADisplay) { 1109 vaTerminate(mVADisplay); 1110 mVADisplay = NULL; 1111 } 1112 1113 if (mDisplay) { 1114 #ifndef USE_HYBRID_DRIVER 1115 delete mDisplay; 1116 #endif 1117 mDisplay = NULL; 1118 } 1119 1120 mVAStarted = false; 1121 mInitialized = false; 1122 mErrReportEnabled = false; 1123 if (mStoreMetaData) { 1124 mMetaDataBuffersNum = 0; 1125 mSurfaceAcquirePos = 0; 1126 } 1127 return DECODE_SUCCESS; 1128 } 1129 1130 Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) { 1131 // DON'T check if mVAStarted == true 1132 if (mParserHandle == NULL) { 1133 return DECODE_NO_PARSER; 1134 } 1135 1136 uint32_t vbpStatus; 1137 if (buffer == NULL || size <= 0) { 1138 return DECODE_INVALID_DATA; 1139 } 1140 1141 uint8_t configFlag = config ? 1 : 0; 1142 vbpStatus = mParserParse(mParserHandle, buffer, size, configFlag); 1143 CHECK_VBP_STATUS("vbp_parse"); 1144 1145 vbpStatus = mParserQuery(mParserHandle, vbpData); 1146 CHECK_VBP_STATUS("vbp_query"); 1147 1148 return DECODE_SUCCESS; 1149 } 1150 1151 Decode_Status VideoDecoderBase::mapSurface(void) { 1152 VAStatus vaStatus = VA_STATUS_SUCCESS; 1153 VAImage image; 1154 uint8_t *userPtr; 1155 mSurfaceUserPtr = new uint8_t* [mNumSurfaces]; 1156 if (mSurfaceUserPtr == NULL) { 1157 return DECODE_MEMORY_FAIL; 1158 } 1159 1160 for (int32_t i = 0; i< mNumSurfaces; i++) { 1161 vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image); 1162 CHECK_VA_STATUS("vaDeriveImage"); 1163 vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr); 1164 CHECK_VA_STATUS("vaMapBuffer"); 1165 mSurfaceUserPtr[i] = userPtr; 1166 mSurfaceBuffers[i].mappedData = new VideoFrameRawData; 1167 if (mSurfaceBuffers[i].mappedData == NULL) { 1168 return DECODE_MEMORY_FAIL; 1169 } 1170 mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released 1171 mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer 1172 mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc; 1173 mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width; 1174 mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height; 1175 mSurfaceBuffers[i].mappedData->size = image.data_size; 1176 for (int pi = 0; pi < 3; pi++) { 1177 mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi]; 1178 mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi]; 1179 } 1180 // debug information 1181 if (image.pitches[0] != image.pitches[1] || 1182 image.width != mVideoFormatInfo.width || 1183 image.height != mVideoFormatInfo.height || 1184 image.offsets[0] != 0) { 1185 WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]); 1186 } 1187 // TODO: do we need to unmap buffer? 1188 //vaStatus = vaUnmapBuffer(mVADisplay, image.buf); 1189 //CHECK_VA_STATUS("vaMapBuffer"); 1190 vaStatus = vaDestroyImage(mVADisplay,image.image_id); 1191 CHECK_VA_STATUS("vaDestroyImage"); 1192 1193 } 1194 return DECODE_SUCCESS; 1195 } 1196 1197 Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) { 1198 if (internal) { 1199 if (mAcquiredBuffer == NULL) { 1200 return DECODE_FAIL; 1201 } 1202 renderBuffer = &(mAcquiredBuffer->renderBuffer); 1203 } 1204 1205 VAStatus vaStatus; 1206 VAImage vaImage; 1207 vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface); 1208 CHECK_VA_STATUS("vaSyncSurface"); 1209 1210 vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage); 1211 CHECK_VA_STATUS("vaDeriveImage"); 1212 1213 void *pBuf = NULL; 1214 vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf); 1215 CHECK_VA_STATUS("vaMapBuffer"); 1216 1217 1218 // size in NV12 format 1219 uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); 1220 uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); 1221 int32_t size = cropWidth * cropHeight * 3 / 2; 1222 1223 if (internal) { 1224 VideoFrameRawData *rawData = NULL; 1225 if (renderBuffer->rawData == NULL) { 1226 rawData = new VideoFrameRawData; 1227 if (rawData == NULL) { 1228 return DECODE_MEMORY_FAIL; 1229 } 1230 memset(rawData, 0, sizeof(VideoFrameRawData)); 1231 renderBuffer->rawData = rawData; 1232 } else { 1233 rawData = renderBuffer->rawData; 1234 } 1235 1236 if (rawData->data != NULL && rawData->size != size) { 1237 delete [] rawData->data; 1238 rawData->data = NULL; 1239 rawData->size = 0; 1240 } 1241 if (rawData->data == NULL) { 1242 rawData->data = new uint8_t [size]; 1243 if (rawData->data == NULL) { 1244 return DECODE_MEMORY_FAIL; 1245 } 1246 } 1247 1248 rawData->own = true; // allocated by this library 1249 rawData->width = cropWidth; 1250 rawData->height = cropHeight; 1251 rawData->pitch[0] = cropWidth; 1252 rawData->pitch[1] = cropWidth; 1253 rawData->pitch[2] = 0; // interleaved U/V, two planes 1254 rawData->offset[0] = 0; 1255 rawData->offset[1] = cropWidth * cropHeight; 1256 rawData->offset[2] = cropWidth * cropHeight * 3 / 2; 1257 rawData->size = size; 1258 rawData->fourcc = 'NV12'; 1259 1260 pRawData = rawData->data; 1261 } else { 1262 *pSize = size; 1263 } 1264 1265 if (size == (int32_t)vaImage.data_size) { 1266 #ifdef __SSE4_1__ 1267 stream_memcpy(pRawData, pBuf, size); 1268 #else 1269 memcpy(pRawData, pBuf, size); 1270 #endif 1271 } else { 1272 // copy Y data 1273 uint8_t *src = (uint8_t*)pBuf; 1274 uint8_t *dst = pRawData; 1275 uint32_t row = 0; 1276 for (row = 0; row < cropHeight; row++) { 1277 #ifdef __SSE4_1__ 1278 stream_memcpy(dst, src, cropWidth); 1279 #else 1280 memcpy(dst, src, cropWidth); 1281 #endif 1282 dst += cropWidth; 1283 src += vaImage.pitches[0]; 1284 } 1285 // copy interleaved V and U data 1286 src = (uint8_t*)pBuf + vaImage.offsets[1]; 1287 for (row = 0; row < cropHeight / 2; row++) { 1288 #ifdef __SSE4_1__ 1289 stream_memcpy(dst, src, cropWidth); 1290 #else 1291 memcpy(dst, src, cropWidth); 1292 #endif 1293 dst += cropWidth; 1294 src += vaImage.pitches[1]; 1295 } 1296 } 1297 1298 vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf); 1299 CHECK_VA_STATUS("vaUnmapBuffer"); 1300 1301 vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id); 1302 CHECK_VA_STATUS("vaDestroyImage"); 1303 1304 return DECODE_SUCCESS; 1305 } 1306 1307 Decode_Status VideoDecoderBase::createSurfaceFromHandle(int index) { 1308 VAStatus vaStatus = VA_STATUS_SUCCESS; 1309 Decode_Status status; 1310 1311 int32_t format = VA_RT_FORMAT_YUV420; 1312 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) { 1313 #ifndef USE_AVC_SHORT_FORMAT 1314 format |= VA_RT_FORMAT_PROTECTED; 1315 WTRACE("Surface is protected."); 1316 #endif 1317 } 1318 VASurfaceAttrib attribs[2]; 1319 VASurfaceAttribExternalBuffers surfExtBuf; 1320 surfExtBuf.num_buffers = 1; 1321 surfExtBuf.pixel_format = VA_FOURCC_NV12; 1322 surfExtBuf.width = mVideoFormatInfo.surfaceWidth; 1323 surfExtBuf.height = mVideoFormatInfo.surfaceHeight; 1324 surfExtBuf.data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5; 1325 surfExtBuf.num_planes = 2; 1326 surfExtBuf.pitches[0] = mConfigBuffer.graphicBufferStride; 1327 surfExtBuf.pitches[1] = mConfigBuffer.graphicBufferStride; 1328 surfExtBuf.pitches[2] = 0; 1329 surfExtBuf.pitches[3] = 0; 1330 surfExtBuf.offsets[0] = 0; 1331 surfExtBuf.offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight; 1332 surfExtBuf.offsets[2] = 0; 1333 surfExtBuf.offsets[3] = 0; 1334 surfExtBuf.private_data = (void *)mConfigBuffer.nativeWindow; 1335 surfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; 1336 if (mConfigBuffer.flag & USE_TILING_MEMORY) { 1337 surfExtBuf.flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING; 1338 } 1339 1340 surfExtBuf.buffers = (long unsigned int*)&(mConfigBuffer.graphicBufferHandler[index]); 1341 1342 attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; 1343 attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; 1344 attribs[0].value.type = VAGenericValueTypeInteger; 1345 attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC; 1346 1347 attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; 1348 attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; 1349 attribs[1].value.type = VAGenericValueTypePointer; 1350 attribs[1].value.value.p = (void *)&surfExtBuf; 1351 1352 vaStatus = vaCreateSurfaces( 1353 mVADisplay, 1354 format, 1355 mVideoFormatInfo.surfaceWidth, 1356 mVideoFormatInfo.surfaceHeight, 1357 &(mSurfaces[index]), 1358 1, 1359 attribs, 1360 2); 1361 CHECK_VA_STATUS("vaCreateSurfaces"); 1362 1363 return DECODE_SUCCESS; 1364 } 1365 1366 void VideoDecoderBase::initSurfaceBuffer(bool reset) { 1367 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; 1368 if (useGraphicBuffer && reset) { 1369 pthread_mutex_lock(&mLock); 1370 } 1371 for (int32_t i = 0; i < mNumSurfaces; i++) { 1372 mSurfaceBuffers[i].renderBuffer.display = mVADisplay; 1373 mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer 1374 mSurfaceBuffers[i].renderBuffer.flag = 0; 1375 mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE; 1376 mSurfaceBuffers[i].renderBuffer.timeStamp = 0; 1377 mSurfaceBuffers[i].referenceFrame = false; 1378 mSurfaceBuffers[i].asReferernce= false; 1379 mSurfaceBuffers[i].pictureOrder = 0; 1380 mSurfaceBuffers[i].next = NULL; 1381 if (reset == true) { 1382 mSurfaceBuffers[i].renderBuffer.rawData = NULL; 1383 mSurfaceBuffers[i].mappedData = NULL; 1384 } 1385 if (useGraphicBuffer) { 1386 if (reset) { 1387 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i]; 1388 mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false 1389 for (uint32_t j = 0; j < mSignalBufferSize; j++) { 1390 if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) { 1391 mSurfaceBuffers[i].renderBuffer.renderDone = true; 1392 VTRACE("initSurfaceBuffer set renderDone = true index = %d", i); 1393 mSignalBufferPre[j] = NULL; 1394 break; 1395 } 1396 } 1397 } else { 1398 mSurfaceBuffers[i].renderBuffer.renderDone = false; 1399 } 1400 } else { 1401 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL; 1402 mSurfaceBuffers[i].renderBuffer.renderDone = true; 1403 } 1404 mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i; 1405 } 1406 1407 if (useGraphicBuffer && reset) { 1408 mInitialized = true; 1409 mSignalBufferSize = 0; 1410 pthread_mutex_unlock(&mLock); 1411 } 1412 } 1413 1414 Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler, bool isNew) { 1415 Decode_Status status; 1416 if (graphichandler == NULL) { 1417 return DECODE_SUCCESS; 1418 } 1419 pthread_mutex_lock(&mLock); 1420 bool graphicBufferMode = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; 1421 if (mStoreMetaData) { 1422 if (!graphicBufferMode) { 1423 pthread_mutex_unlock(&mLock); 1424 return DECODE_SUCCESS; 1425 } 1426 1427 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) { 1428 mConfigBuffer.graphicBufferHandler[mMetaDataBuffersNum] = graphichandler; 1429 if (mInitialized) { 1430 mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferHandle = graphichandler; 1431 mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferIndex = mMetaDataBuffersNum; 1432 } 1433 } 1434 } 1435 int i = 0; 1436 if (!mInitialized) { 1437 if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) { 1438 pthread_mutex_unlock(&mLock); 1439 return DECODE_INVALID_DATA; 1440 } 1441 mSignalBufferPre[mSignalBufferSize++] = graphichandler; 1442 VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize); 1443 } else { 1444 if (!graphicBufferMode) { 1445 pthread_mutex_unlock(&mLock); 1446 return DECODE_SUCCESS; 1447 } 1448 if (mStoreMetaData) { 1449 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) { 1450 if (mVAStarted) { 1451 status = createSurfaceFromHandle(mMetaDataBuffersNum); 1452 CHECK_STATUS("createSurfaceFromHandle") 1453 } 1454 } 1455 } 1456 for (i = 0; i < mNumSurfaces; i++) { 1457 if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) { 1458 mSurfaceBuffers[i].renderBuffer.renderDone = true; 1459 VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i); 1460 break; 1461 } 1462 } 1463 } 1464 1465 if (mStoreMetaData) { 1466 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) { 1467 mMetaDataBuffersNum++; 1468 } 1469 } 1470 1471 pthread_mutex_unlock(&mLock); 1472 1473 return DECODE_SUCCESS; 1474 1475 } 1476 1477 void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) { 1478 VASurfaceStatus surfStat = VASurfaceReady; 1479 VAStatus vaStat = VA_STATUS_SUCCESS; 1480 1481 if (!surface) { 1482 LOGW("SurfaceBuffer not ready yet"); 1483 return; 1484 } 1485 surface->renderBuffer.driverRenderDone = true; 1486 1487 #ifndef USE_GEN_HW 1488 if (surface->renderBuffer.surface != VA_INVALID_SURFACE && 1489 (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) { 1490 1491 vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat); 1492 1493 if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady)) 1494 surface->renderBuffer.driverRenderDone = false; 1495 1496 } 1497 #endif 1498 1499 } 1500 1501 // This function should be called before start() to load different type of parsers 1502 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING) 1503 Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) { 1504 if ((int32_t)type != VBP_INVALID) { 1505 ITRACE("Parser Type = %d", (int32_t)type); 1506 mParserType = type; 1507 return DECODE_SUCCESS; 1508 } else { 1509 ETRACE("Invalid parser type = %d", (int32_t)type); 1510 return DECODE_NO_PARSER; 1511 } 1512 } 1513 1514 Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) { 1515 if (mParserHandle == NULL) { 1516 return DECODE_NO_PARSER; 1517 } 1518 1519 uint32_t vbpStatus; 1520 if (buffer == NULL || size <= 0) { 1521 return DECODE_INVALID_DATA; 1522 } 1523 1524 vbpStatus = mParserUpdate(mParserHandle, buffer, size, vbpData); 1525 CHECK_VBP_STATUS("vbp_update"); 1526 1527 return DECODE_SUCCESS; 1528 } 1529 1530 Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) { 1531 if (mParserHandle == NULL) { 1532 return DECODE_NO_PARSER; 1533 } 1534 1535 uint32_t vbpStatus; 1536 vbpStatus = mParserQuery(mParserHandle, vbpData); 1537 CHECK_VBP_STATUS("vbp_query"); 1538 1539 return DECODE_SUCCESS; 1540 } 1541 1542 Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) { 1543 VAStatus vaStatus; 1544 VAConfigAttrib attrib; 1545 attrib.type = VAConfigAttribRTFormat; 1546 attrib.value = VA_RT_FORMAT_YUV420; 1547 1548 if (config == NULL) { 1549 ETRACE("Invalid parameter!"); 1550 return DECODE_FAIL; 1551 } 1552 1553 vaStatus = vaCreateConfig( 1554 mVADisplay, 1555 profile, 1556 VAEntrypointVLD, 1557 &attrib, 1558 1, 1559 config); 1560 1561 CHECK_VA_STATUS("vaCreateConfig"); 1562 1563 return DECODE_SUCCESS; 1564 } 1565 #endif 1566 Decode_Status VideoDecoderBase::checkHardwareCapability() { 1567 return DECODE_SUCCESS; 1568 } 1569 1570 void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) { 1571 if (mErrReportEnabled && outErrBuf && currentSurface) { 1572 memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer)); 1573 1574 currentSurface->errBuf.errorNumber = 0; 1575 currentSurface->errBuf.timeStamp = INVALID_PTS; 1576 } 1577 if (outErrBuf) 1578 VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber); 1579 } 1580 1581 void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) { 1582 VAStatus ret; 1583 1584 if (mErrReportEnabled) { 1585 currentSurface->errBuf.timeStamp = currentSurface->timeStamp; 1586 // TODO: is 10 a suitable number? 1587 VASurfaceDecodeMBErrors *err_drv_output = NULL; 1588 ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output); 1589 if (ret || !err_drv_output) { 1590 WTRACE("vaQuerySurfaceError failed."); 1591 return; 1592 } 1593 1594 int offset = 0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1 1595 for (int i = 0; i < MAX_ERR_NUM - offset; i++) { 1596 if (err_drv_output[i].status != -1) { 1597 currentSurface->errBuf.errorNumber++; 1598 currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError; 1599 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb; 1600 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb; 1601 currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1; 1602 ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d", 1603 currentSurface->errBuf.errorNumber - 1, 1604 currentSurface->errBuf.errorArray[i + offset].type, 1605 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb, 1606 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb); 1607 } else break; 1608 } 1609 ITRACE("%s: error number of current surface is %d, timestamp @%llu", 1610 __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp); 1611 } 1612 } 1613 1614 void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) { 1615 if (mRotationDegrees == rotationDegrees) { 1616 return; 1617 } 1618 1619 ITRACE("set new rotation degree: %d", rotationDegrees); 1620 VADisplayAttribute rotate; 1621 rotate.type = VADisplayAttribRotation; 1622 rotate.value = VA_ROTATION_NONE; 1623 if (rotationDegrees == 0) 1624 rotate.value = VA_ROTATION_NONE; 1625 else if (rotationDegrees == 90) 1626 rotate.value = VA_ROTATION_90; 1627 else if (rotationDegrees == 180) 1628 rotate.value = VA_ROTATION_180; 1629 else if (rotationDegrees == 270) 1630 rotate.value = VA_ROTATION_270; 1631 1632 VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1); 1633 if (ret) { 1634 ETRACE("Failed to set rotation degree."); 1635 } 1636 mRotationDegrees = rotationDegrees; 1637 } 1638 1639 void VideoDecoderBase::setRenderRect() { 1640 1641 if (!mVADisplay) 1642 return; 1643 1644 VAStatus ret; 1645 VARectangle rect; 1646 rect.x = mVideoFormatInfo.cropLeft; 1647 rect.y = mVideoFormatInfo.cropTop; 1648 rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight); 1649 rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop); 1650 1651 VADisplayAttribute render_rect; 1652 render_rect.type = VADisplayAttribRenderRect; 1653 render_rect.attrib_ptr = ▭ 1654 1655 ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1); 1656 if (ret) { 1657 ETRACE("Failed to set rotation degree."); 1658 } 1659 } 1660 1661 void VideoDecoderBase::setColorSpaceInfo(int32_t colorMatrix, int32_t videoRange) { 1662 ITRACE("set colorMatrix: 0x%x ", colorMatrix); 1663 VADisplayAttribute cm; 1664 cm.type = VADisplayAttribCSCMatrix; 1665 if (colorMatrix == VA_SRC_BT601) { 1666 cm.attrib_ptr = &s601; 1667 } else if (colorMatrix == VA_SRC_BT709) { 1668 cm.attrib_ptr = &s709; 1669 } else { 1670 // if we can't get the color matrix or it's not BT601 or BT709 1671 // we decide the color matrix according to clip resolution 1672 if (mVideoFormatInfo.width < 1280 && mVideoFormatInfo.height < 720) 1673 cm.attrib_ptr = &s601; 1674 else 1675 cm.attrib_ptr = &s709; 1676 } 1677 1678 VAStatus ret = vaSetDisplayAttributes(mVADisplay, &cm, 1); 1679 1680 if (ret) { 1681 ETRACE("Failed to set colorMatrix."); 1682 } 1683 1684 // 1: full range, 0: reduced range 1685 ITRACE("set videoRange: %d ", videoRange); 1686 VADisplayAttribute vr; 1687 vr.type = VADisplayAttribColorRange; 1688 vr.value = (videoRange == 1) ? VA_SOURCE_RANGE_FULL : VA_SOURCE_RANGE_REDUCED; 1689 1690 ret = vaSetDisplayAttributes(mVADisplay, &vr, 1); 1691 1692 if (ret) { 1693 ETRACE("Failed to set videoRange."); 1694 } 1695 } 1696