1 /* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "StagefrightMetadataRetriever" 19 20 #include <inttypes.h> 21 22 #include <utils/Log.h> 23 #include <gui/Surface.h> 24 25 #include "include/avc_utils.h" 26 #include "include/StagefrightMetadataRetriever.h" 27 28 #include <media/ICrypto.h> 29 #include <media/IMediaHTTPService.h> 30 #include <media/MediaCodecBuffer.h> 31 32 #include <media/stagefright/foundation/ADebug.h> 33 #include <media/stagefright/foundation/AMessage.h> 34 #include <media/stagefright/ColorConverter.h> 35 #include <media/stagefright/DataSource.h> 36 #include <media/stagefright/FileSource.h> 37 #include <media/stagefright/MediaBuffer.h> 38 #include <media/stagefright/MediaCodec.h> 39 #include <media/stagefright/MediaCodecList.h> 40 #include <media/stagefright/MediaDefs.h> 41 #include <media/stagefright/MediaErrors.h> 42 #include <media/stagefright/MediaExtractor.h> 43 #include <media/stagefright/MediaSource.h> 44 #include <media/stagefright/MetaData.h> 45 #include <media/stagefright/Utils.h> 46 47 #include <media/CharacterEncodingDetector.h> 48 49 namespace android { 50 51 static const int64_t kBufferTimeOutUs = 30000ll; // 30 msec 52 static const size_t kRetryCount = 20; // must be >0 53 54 StagefrightMetadataRetriever::StagefrightMetadataRetriever() 55 : mParsedMetaData(false), 56 mAlbumArt(NULL) { 57 ALOGV("StagefrightMetadataRetriever()"); 58 } 59 60 StagefrightMetadataRetriever::~StagefrightMetadataRetriever() { 61 ALOGV("~StagefrightMetadataRetriever()"); 62 clearMetadata(); 63 // Explicitly release extractor before continuing with the destructor, 64 // some extractors might need to callback to close off the DataSource 65 // and we need to make sure it's still there. 66 if (mExtractor != NULL) { 67 mExtractor->release(); 68 } 69 if (mSource != NULL) { 70 mSource->close(); 71 } 72 } 73 74 status_t StagefrightMetadataRetriever::setDataSource( 75 const sp<IMediaHTTPService> &httpService, 76 const char *uri, 77 const KeyedVector<String8, String8> *headers) { 78 ALOGV("setDataSource(%s)", uri); 79 80 clearMetadata(); 81 mSource = DataSource::CreateFromURI(httpService, uri, headers); 82 83 if (mSource == NULL) { 84 ALOGE("Unable to create data source for '%s'.", uri); 85 return UNKNOWN_ERROR; 86 } 87 88 mExtractor = MediaExtractor::Create(mSource); 89 90 if (mExtractor == NULL) { 91 ALOGE("Unable to instantiate an extractor for '%s'.", uri); 92 93 mSource.clear(); 94 95 return UNKNOWN_ERROR; 96 } 97 98 return OK; 99 } 100 101 // Warning caller retains ownership of the filedescriptor! Dup it if necessary. 102 status_t StagefrightMetadataRetriever::setDataSource( 103 int fd, int64_t offset, int64_t length) { 104 fd = dup(fd); 105 106 ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length); 107 108 clearMetadata(); 109 mSource = new FileSource(fd, offset, length); 110 111 status_t err; 112 if ((err = mSource->initCheck()) != OK) { 113 mSource.clear(); 114 115 return err; 116 } 117 118 mExtractor = MediaExtractor::Create(mSource); 119 120 if (mExtractor == NULL) { 121 mSource.clear(); 122 123 return UNKNOWN_ERROR; 124 } 125 126 return OK; 127 } 128 129 status_t StagefrightMetadataRetriever::setDataSource( 130 const sp<DataSource>& source, const char *mime) { 131 ALOGV("setDataSource(DataSource)"); 132 133 clearMetadata(); 134 mSource = source; 135 mExtractor = MediaExtractor::Create(mSource, mime); 136 137 if (mExtractor == NULL) { 138 ALOGE("Failed to instantiate a MediaExtractor."); 139 mSource.clear(); 140 return UNKNOWN_ERROR; 141 } 142 143 return OK; 144 } 145 146 static VideoFrame *allocVideoFrame( 147 const sp<MetaData> &trackMeta, int32_t width, int32_t height, int32_t bpp, bool metaOnly) { 148 int32_t rotationAngle; 149 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { 150 rotationAngle = 0; // By default, no rotation 151 } 152 153 uint32_t type; 154 const void *iccData; 155 size_t iccSize; 156 if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){ 157 iccData = NULL; 158 iccSize = 0; 159 } 160 161 int32_t sarWidth, sarHeight; 162 int32_t displayWidth, displayHeight; 163 if (trackMeta->findInt32(kKeySARWidth, &sarWidth) 164 && trackMeta->findInt32(kKeySARHeight, &sarHeight) 165 && sarHeight != 0) { 166 displayWidth = (width * sarWidth) / sarHeight; 167 displayHeight = height; 168 } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth) 169 && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight) 170 && displayWidth > 0 && displayHeight > 0 171 && width > 0 && height > 0) { 172 ALOGV("found display size %dx%d", displayWidth, displayHeight); 173 } else { 174 displayWidth = width; 175 displayHeight = height; 176 } 177 178 return new VideoFrame(width, height, displayWidth, displayHeight, 179 rotationAngle, bpp, !metaOnly, iccData, iccSize); 180 } 181 182 static bool getDstColorFormat(android_pixel_format_t colorFormat, 183 OMX_COLOR_FORMATTYPE *omxColorFormat, int32_t *bpp) { 184 switch (colorFormat) { 185 case HAL_PIXEL_FORMAT_RGB_565: 186 { 187 *omxColorFormat = OMX_COLOR_Format16bitRGB565; 188 *bpp = 2; 189 return true; 190 } 191 case HAL_PIXEL_FORMAT_RGBA_8888: 192 { 193 *omxColorFormat = OMX_COLOR_Format32BitRGBA8888; 194 *bpp = 4; 195 return true; 196 } 197 case HAL_PIXEL_FORMAT_BGRA_8888: 198 { 199 *omxColorFormat = OMX_COLOR_Format32bitBGRA8888; 200 *bpp = 4; 201 return true; 202 } 203 default: 204 { 205 ALOGE("Unsupported color format: %d", colorFormat); 206 break; 207 } 208 } 209 return false; 210 } 211 212 static VideoFrame *extractVideoFrame( 213 const AString &componentName, 214 const sp<MetaData> &trackMeta, 215 const sp<IMediaSource> &source, 216 int64_t frameTimeUs, 217 int seekMode, 218 int colorFormat, 219 bool metaOnly) { 220 sp<MetaData> format = source->getFormat(); 221 222 MediaSource::ReadOptions::SeekMode mode = 223 static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); 224 if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || 225 seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { 226 ALOGE("Unknown seek mode: %d", seekMode); 227 return NULL; 228 } 229 230 int32_t dstBpp; 231 OMX_COLOR_FORMATTYPE dstFormat; 232 if (!getDstColorFormat( 233 (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) { 234 return NULL; 235 } 236 237 if (metaOnly) { 238 int32_t width, height; 239 CHECK(trackMeta->findInt32(kKeyWidth, &width)); 240 CHECK(trackMeta->findInt32(kKeyHeight, &height)); 241 return allocVideoFrame(trackMeta, width, height, dstBpp, true); 242 } 243 244 MediaSource::ReadOptions options; 245 sp<MetaData> overrideMeta; 246 if (frameTimeUs < 0) { 247 uint32_t type; 248 const void *data; 249 size_t size; 250 int64_t thumbNailTime; 251 int32_t thumbnailWidth, thumbnailHeight; 252 253 // if we have a stand-alone thumbnail, set up the override meta, 254 // and set seekTo time to -1. 255 if (trackMeta->findInt32(kKeyThumbnailWidth, &thumbnailWidth) 256 && trackMeta->findInt32(kKeyThumbnailHeight, &thumbnailHeight) 257 && trackMeta->findData(kKeyThumbnailHVCC, &type, &data, &size)){ 258 overrideMeta = new MetaData(*trackMeta); 259 overrideMeta->remove(kKeyDisplayWidth); 260 overrideMeta->remove(kKeyDisplayHeight); 261 overrideMeta->setInt32(kKeyWidth, thumbnailWidth); 262 overrideMeta->setInt32(kKeyHeight, thumbnailHeight); 263 overrideMeta->setData(kKeyHVCC, type, data, size); 264 thumbNailTime = -1ll; 265 ALOGV("thumbnail: %dx%d", thumbnailWidth, thumbnailHeight); 266 } else if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) 267 || thumbNailTime < 0) { 268 thumbNailTime = 0; 269 } 270 271 options.setSeekTo(thumbNailTime, mode); 272 } else { 273 options.setSeekTo(frameTimeUs, mode); 274 } 275 276 int32_t gridRows = 1, gridCols = 1; 277 if (overrideMeta == NULL) { 278 // check if we're dealing with a tiled heif 279 int32_t gridWidth, gridHeight; 280 if (trackMeta->findInt32(kKeyGridWidth, &gridWidth) && gridWidth > 0 281 && trackMeta->findInt32(kKeyGridHeight, &gridHeight) && gridHeight > 0) { 282 int32_t width, height, displayWidth, displayHeight; 283 CHECK(trackMeta->findInt32(kKeyWidth, &width)); 284 CHECK(trackMeta->findInt32(kKeyHeight, &height)); 285 CHECK(trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)); 286 CHECK(trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)); 287 288 if (width >= displayWidth && height >= displayHeight 289 && (width % gridWidth == 0) && (height % gridHeight == 0)) { 290 ALOGV("grid config: %dx%d, display %dx%d, grid %dx%d", 291 width, height, displayWidth, displayHeight, gridWidth, gridHeight); 292 293 overrideMeta = new MetaData(*trackMeta); 294 overrideMeta->remove(kKeyDisplayWidth); 295 overrideMeta->remove(kKeyDisplayHeight); 296 overrideMeta->setInt32(kKeyWidth, gridWidth); 297 overrideMeta->setInt32(kKeyHeight, gridHeight); 298 gridCols = width / gridWidth; 299 gridRows = height / gridHeight; 300 } else { 301 ALOGE("Bad grid config: %dx%d, display %dx%d, grid %dx%d", 302 width, height, displayWidth, displayHeight, gridWidth, gridHeight); 303 } 304 } 305 if (overrideMeta == NULL) { 306 overrideMeta = trackMeta; 307 } 308 } 309 int32_t numTiles = gridRows * gridCols; 310 311 sp<AMessage> videoFormat; 312 if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) { 313 ALOGE("b/23680780"); 314 ALOGW("Failed to convert meta data to message"); 315 return NULL; 316 } 317 318 // TODO: Use Flexible color instead 319 videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar); 320 321 // For the thumbnail extraction case, try to allocate single buffer in both 322 // input and output ports, if seeking to a sync frame. NOTE: This request may 323 // fail if component requires more than that for decoding. 324 bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST); 325 bool decodeSingleFrame = !isSeekingClosest && (numTiles == 1); 326 if (decodeSingleFrame) { 327 videoFormat->setInt32("android._num-input-buffers", 1); 328 videoFormat->setInt32("android._num-output-buffers", 1); 329 } 330 331 status_t err; 332 sp<ALooper> looper = new ALooper; 333 looper->start(); 334 sp<MediaCodec> decoder = MediaCodec::CreateByComponentName( 335 looper, componentName, &err); 336 337 if (decoder.get() == NULL || err != OK) { 338 ALOGW("Failed to instantiate decoder [%s]", componentName.c_str()); 339 return NULL; 340 } 341 342 err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */); 343 if (err != OK) { 344 ALOGW("configure returned error %d (%s)", err, asString(err)); 345 decoder->release(); 346 return NULL; 347 } 348 349 err = decoder->start(); 350 if (err != OK) { 351 ALOGW("start returned error %d (%s)", err, asString(err)); 352 decoder->release(); 353 return NULL; 354 } 355 356 err = source->start(); 357 if (err != OK) { 358 ALOGW("source failed to start: %d (%s)", err, asString(err)); 359 decoder->release(); 360 return NULL; 361 } 362 363 Vector<sp<MediaCodecBuffer> > inputBuffers; 364 err = decoder->getInputBuffers(&inputBuffers); 365 if (err != OK) { 366 ALOGW("failed to get input buffers: %d (%s)", err, asString(err)); 367 decoder->release(); 368 source->stop(); 369 return NULL; 370 } 371 372 Vector<sp<MediaCodecBuffer> > outputBuffers; 373 err = decoder->getOutputBuffers(&outputBuffers); 374 if (err != OK) { 375 ALOGW("failed to get output buffers: %d (%s)", err, asString(err)); 376 decoder->release(); 377 source->stop(); 378 return NULL; 379 } 380 381 sp<AMessage> outputFormat = NULL; 382 bool haveMoreInputs = true; 383 size_t index, offset, size; 384 int64_t timeUs; 385 size_t retriesLeft = kRetryCount; 386 bool done = false; 387 const char *mime; 388 bool success = format->findCString(kKeyMIMEType, &mime); 389 if (!success) { 390 ALOGE("Could not find mime type"); 391 return NULL; 392 } 393 394 bool isAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC) 395 || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC); 396 397 bool firstSample = true; 398 int64_t targetTimeUs = -1ll; 399 400 VideoFrame *frame = NULL; 401 int32_t tilesDecoded = 0; 402 403 do { 404 size_t inputIndex = -1; 405 int64_t ptsUs = 0ll; 406 uint32_t flags = 0; 407 sp<MediaCodecBuffer> codecBuffer = NULL; 408 409 while (haveMoreInputs) { 410 err = decoder->dequeueInputBuffer(&inputIndex, kBufferTimeOutUs); 411 if (err != OK) { 412 ALOGW("Timed out waiting for input"); 413 if (retriesLeft) { 414 err = OK; 415 } 416 break; 417 } 418 codecBuffer = inputBuffers[inputIndex]; 419 420 MediaBuffer *mediaBuffer = NULL; 421 422 err = source->read(&mediaBuffer, &options); 423 options.clearSeekTo(); 424 if (err != OK) { 425 ALOGW("Input Error or EOS"); 426 haveMoreInputs = false; 427 if (err == ERROR_END_OF_STREAM) { 428 err = OK; 429 } 430 break; 431 } 432 if (firstSample && isSeekingClosest) { 433 mediaBuffer->meta_data()->findInt64(kKeyTargetTime, &targetTimeUs); 434 ALOGV("Seeking closest: targetTimeUs=%lld", (long long)targetTimeUs); 435 } 436 firstSample = false; 437 438 if (mediaBuffer->range_length() > codecBuffer->capacity()) { 439 ALOGE("buffer size (%zu) too large for codec input size (%zu)", 440 mediaBuffer->range_length(), codecBuffer->capacity()); 441 haveMoreInputs = false; 442 err = BAD_VALUE; 443 } else { 444 codecBuffer->setRange(0, mediaBuffer->range_length()); 445 446 CHECK(mediaBuffer->meta_data()->findInt64(kKeyTime, &ptsUs)); 447 memcpy(codecBuffer->data(), 448 (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(), 449 mediaBuffer->range_length()); 450 } 451 452 mediaBuffer->release(); 453 break; 454 } 455 456 if (haveMoreInputs && inputIndex < inputBuffers.size()) { 457 if (isAvcOrHevc && IsIDR(codecBuffer) && decodeSingleFrame) { 458 // Only need to decode one IDR frame, unless we're seeking with CLOSEST 459 // option, in which case we need to actually decode to targetTimeUs. 460 haveMoreInputs = false; 461 flags |= MediaCodec::BUFFER_FLAG_EOS; 462 } 463 464 ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x", 465 codecBuffer->size(), ptsUs, flags); 466 err = decoder->queueInputBuffer( 467 inputIndex, 468 codecBuffer->offset(), 469 codecBuffer->size(), 470 ptsUs, 471 flags); 472 473 // we don't expect an output from codec config buffer 474 if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) { 475 continue; 476 } 477 } 478 479 while (err == OK) { 480 // wait for a decoded buffer 481 err = decoder->dequeueOutputBuffer( 482 &index, 483 &offset, 484 &size, 485 &timeUs, 486 &flags, 487 kBufferTimeOutUs); 488 489 if (err == INFO_FORMAT_CHANGED) { 490 ALOGV("Received format change"); 491 err = decoder->getOutputFormat(&outputFormat); 492 } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { 493 ALOGV("Output buffers changed"); 494 err = decoder->getOutputBuffers(&outputBuffers); 495 } else { 496 if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) { 497 ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft); 498 err = OK; 499 } else if (err == OK) { 500 // If we're seeking with CLOSEST option and obtained a valid targetTimeUs 501 // from the extractor, decode to the specified frame. Otherwise we're done. 502 ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs); 503 sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index); 504 505 int32_t width, height; 506 CHECK(outputFormat != NULL); 507 CHECK(outputFormat->findInt32("width", &width)); 508 CHECK(outputFormat->findInt32("height", &height)); 509 510 int32_t crop_left, crop_top, crop_right, crop_bottom; 511 if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) { 512 crop_left = crop_top = 0; 513 crop_right = width - 1; 514 crop_bottom = height - 1; 515 } 516 517 if (frame == NULL) { 518 frame = allocVideoFrame( 519 trackMeta, 520 (crop_right - crop_left + 1) * gridCols, 521 (crop_bottom - crop_top + 1) * gridRows, 522 dstBpp, 523 false /*metaOnly*/); 524 } 525 526 int32_t srcFormat; 527 CHECK(outputFormat->findInt32("color-format", &srcFormat)); 528 529 ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat); 530 531 int32_t dstLeft, dstTop, dstRight, dstBottom; 532 if (numTiles == 1) { 533 dstLeft = crop_left; 534 dstTop = crop_top; 535 dstRight = crop_right; 536 dstBottom = crop_bottom; 537 } else { 538 dstLeft = tilesDecoded % gridCols * width; 539 dstTop = tilesDecoded / gridCols * height; 540 dstRight = dstLeft + width - 1; 541 dstBottom = dstTop + height - 1; 542 } 543 544 if (converter.isValid()) { 545 err = converter.convert( 546 (const uint8_t *)videoFrameBuffer->data(), 547 width, height, 548 crop_left, crop_top, crop_right, crop_bottom, 549 frame->mData, 550 frame->mWidth, 551 frame->mHeight, 552 dstLeft, dstTop, dstRight, dstBottom); 553 } else { 554 ALOGE("Unable to convert from format 0x%08x to 0x%08x", 555 srcFormat, dstFormat); 556 557 err = ERROR_UNSUPPORTED; 558 } 559 560 done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs); 561 if (numTiles > 1) { 562 tilesDecoded++; 563 done &= (tilesDecoded >= numTiles); 564 } 565 err = decoder->releaseOutputBuffer(index); 566 } else { 567 ALOGW("Received error %d (%s) instead of output", err, asString(err)); 568 done = true; 569 } 570 break; 571 } 572 } 573 } while (err == OK && !done); 574 575 source->stop(); 576 decoder->release(); 577 578 if (err != OK) { 579 ALOGE("failed to get video frame (err %d)", err); 580 delete frame; 581 frame = NULL; 582 } 583 584 return frame; 585 } 586 587 VideoFrame *StagefrightMetadataRetriever::getFrameAtTime( 588 int64_t timeUs, int option, int colorFormat, bool metaOnly) { 589 590 ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d", 591 timeUs, option, colorFormat, metaOnly); 592 593 if (mExtractor.get() == NULL) { 594 ALOGV("no extractor."); 595 return NULL; 596 } 597 598 sp<MetaData> fileMeta = mExtractor->getMetaData(); 599 600 if (fileMeta == NULL) { 601 ALOGV("extractor doesn't publish metadata, failed to initialize?"); 602 return NULL; 603 } 604 605 int32_t drm = 0; 606 if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) { 607 ALOGE("frame grab not allowed."); 608 return NULL; 609 } 610 611 size_t n = mExtractor->countTracks(); 612 size_t i; 613 for (i = 0; i < n; ++i) { 614 sp<MetaData> meta = mExtractor->getTrackMetaData(i); 615 616 const char *mime; 617 CHECK(meta->findCString(kKeyMIMEType, &mime)); 618 619 if (!strncasecmp(mime, "video/", 6)) { 620 break; 621 } 622 } 623 624 if (i == n) { 625 ALOGV("no video track found."); 626 return NULL; 627 } 628 629 sp<MetaData> trackMeta = mExtractor->getTrackMetaData( 630 i, MediaExtractor::kIncludeExtensiveMetaData); 631 632 sp<IMediaSource> source = mExtractor->getTrack(i); 633 634 if (source.get() == NULL) { 635 ALOGV("unable to instantiate video track."); 636 return NULL; 637 } 638 639 const void *data; 640 uint32_t type; 641 size_t dataSize; 642 if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize) 643 && mAlbumArt == NULL) { 644 mAlbumArt = MediaAlbumArt::fromData(dataSize, data); 645 } 646 647 const char *mime; 648 CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); 649 650 Vector<AString> matchingCodecs; 651 MediaCodecList::findMatchingCodecs( 652 mime, 653 false, /* encoder */ 654 MediaCodecList::kPreferSoftwareCodecs, 655 &matchingCodecs); 656 657 for (size_t i = 0; i < matchingCodecs.size(); ++i) { 658 const AString &componentName = matchingCodecs[i]; 659 VideoFrame *frame = extractVideoFrame( 660 componentName, trackMeta, source, timeUs, option, colorFormat, metaOnly); 661 662 if (frame != NULL) { 663 return frame; 664 } 665 ALOGV("%s failed to extract thumbnail, trying next decoder.", componentName.c_str()); 666 } 667 668 return NULL; 669 } 670 671 MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() { 672 ALOGV("extractAlbumArt (extractor: %s)", mExtractor.get() != NULL ? "YES" : "NO"); 673 674 if (mExtractor == NULL) { 675 return NULL; 676 } 677 678 if (!mParsedMetaData) { 679 parseMetaData(); 680 681 mParsedMetaData = true; 682 } 683 684 if (mAlbumArt) { 685 return mAlbumArt->clone(); 686 } 687 688 return NULL; 689 } 690 691 const char *StagefrightMetadataRetriever::extractMetadata(int keyCode) { 692 if (mExtractor == NULL) { 693 return NULL; 694 } 695 696 if (!mParsedMetaData) { 697 parseMetaData(); 698 699 mParsedMetaData = true; 700 } 701 702 ssize_t index = mMetaData.indexOfKey(keyCode); 703 704 if (index < 0) { 705 return NULL; 706 } 707 708 return mMetaData.valueAt(index).string(); 709 } 710 711 void StagefrightMetadataRetriever::parseMetaData() { 712 sp<MetaData> meta = mExtractor->getMetaData(); 713 714 if (meta == NULL) { 715 ALOGV("extractor doesn't publish metadata, failed to initialize?"); 716 return; 717 } 718 719 struct Map { 720 int from; 721 int to; 722 const char *name; 723 }; 724 static const Map kMap[] = { 725 { kKeyMIMEType, METADATA_KEY_MIMETYPE, NULL }, 726 { kKeyCDTrackNumber, METADATA_KEY_CD_TRACK_NUMBER, "tracknumber" }, 727 { kKeyDiscNumber, METADATA_KEY_DISC_NUMBER, "discnumber" }, 728 { kKeyAlbum, METADATA_KEY_ALBUM, "album" }, 729 { kKeyArtist, METADATA_KEY_ARTIST, "artist" }, 730 { kKeyAlbumArtist, METADATA_KEY_ALBUMARTIST, "albumartist" }, 731 { kKeyAuthor, METADATA_KEY_AUTHOR, NULL }, 732 { kKeyComposer, METADATA_KEY_COMPOSER, "composer" }, 733 { kKeyDate, METADATA_KEY_DATE, NULL }, 734 { kKeyGenre, METADATA_KEY_GENRE, "genre" }, 735 { kKeyTitle, METADATA_KEY_TITLE, "title" }, 736 { kKeyYear, METADATA_KEY_YEAR, "year" }, 737 { kKeyWriter, METADATA_KEY_WRITER, "writer" }, 738 { kKeyCompilation, METADATA_KEY_COMPILATION, "compilation" }, 739 { kKeyLocation, METADATA_KEY_LOCATION, NULL }, 740 }; 741 742 static const size_t kNumMapEntries = sizeof(kMap) / sizeof(kMap[0]); 743 744 CharacterEncodingDetector *detector = new CharacterEncodingDetector(); 745 746 for (size_t i = 0; i < kNumMapEntries; ++i) { 747 const char *value; 748 if (meta->findCString(kMap[i].from, &value)) { 749 if (kMap[i].name) { 750 // add to charset detector 751 detector->addTag(kMap[i].name, value); 752 } else { 753 // directly add to output list 754 mMetaData.add(kMap[i].to, String8(value)); 755 } 756 } 757 } 758 759 detector->detectAndConvert(); 760 int size = detector->size(); 761 if (size) { 762 for (int i = 0; i < size; i++) { 763 const char *name; 764 const char *value; 765 detector->getTag(i, &name, &value); 766 for (size_t j = 0; j < kNumMapEntries; ++j) { 767 if (kMap[j].name && !strcmp(kMap[j].name, name)) { 768 mMetaData.add(kMap[j].to, String8(value)); 769 } 770 } 771 } 772 } 773 delete detector; 774 775 const void *data; 776 uint32_t type; 777 size_t dataSize; 778 if (meta->findData(kKeyAlbumArt, &type, &data, &dataSize) 779 && mAlbumArt == NULL) { 780 mAlbumArt = MediaAlbumArt::fromData(dataSize, data); 781 } 782 783 size_t numTracks = mExtractor->countTracks(); 784 785 char tmp[32]; 786 sprintf(tmp, "%zu", numTracks); 787 788 mMetaData.add(METADATA_KEY_NUM_TRACKS, String8(tmp)); 789 790 float captureFps; 791 if (meta->findFloat(kKeyCaptureFramerate, &captureFps)) { 792 sprintf(tmp, "%f", captureFps); 793 mMetaData.add(METADATA_KEY_CAPTURE_FRAMERATE, String8(tmp)); 794 } 795 796 bool hasAudio = false; 797 bool hasVideo = false; 798 int32_t videoWidth = -1; 799 int32_t videoHeight = -1; 800 int32_t audioBitrate = -1; 801 int32_t rotationAngle = -1; 802 803 // The overall duration is the duration of the longest track. 804 int64_t maxDurationUs = 0; 805 String8 timedTextLang; 806 for (size_t i = 0; i < numTracks; ++i) { 807 sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i); 808 809 int64_t durationUs; 810 if (trackMeta->findInt64(kKeyDuration, &durationUs)) { 811 if (durationUs > maxDurationUs) { 812 maxDurationUs = durationUs; 813 } 814 } 815 816 const char *mime; 817 if (trackMeta->findCString(kKeyMIMEType, &mime)) { 818 if (!hasAudio && !strncasecmp("audio/", mime, 6)) { 819 hasAudio = true; 820 821 if (!trackMeta->findInt32(kKeyBitRate, &audioBitrate)) { 822 audioBitrate = -1; 823 } 824 } else if (!hasVideo && !strncasecmp("video/", mime, 6)) { 825 hasVideo = true; 826 827 CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth)); 828 CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight)); 829 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { 830 rotationAngle = 0; 831 } 832 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) { 833 const char *lang; 834 if (trackMeta->findCString(kKeyMediaLanguage, &lang)) { 835 timedTextLang.append(String8(lang)); 836 timedTextLang.append(String8(":")); 837 } else { 838 ALOGE("No language found for timed text"); 839 } 840 } 841 } 842 } 843 844 // To save the language codes for all timed text tracks 845 // If multiple text tracks present, the format will look 846 // like "eng:chi" 847 if (!timedTextLang.isEmpty()) { 848 mMetaData.add(METADATA_KEY_TIMED_TEXT_LANGUAGES, timedTextLang); 849 } 850 851 // The duration value is a string representing the duration in ms. 852 sprintf(tmp, "%" PRId64, (maxDurationUs + 500) / 1000); 853 mMetaData.add(METADATA_KEY_DURATION, String8(tmp)); 854 855 if (hasAudio) { 856 mMetaData.add(METADATA_KEY_HAS_AUDIO, String8("yes")); 857 } 858 859 if (hasVideo) { 860 mMetaData.add(METADATA_KEY_HAS_VIDEO, String8("yes")); 861 862 sprintf(tmp, "%d", videoWidth); 863 mMetaData.add(METADATA_KEY_VIDEO_WIDTH, String8(tmp)); 864 865 sprintf(tmp, "%d", videoHeight); 866 mMetaData.add(METADATA_KEY_VIDEO_HEIGHT, String8(tmp)); 867 868 sprintf(tmp, "%d", rotationAngle); 869 mMetaData.add(METADATA_KEY_VIDEO_ROTATION, String8(tmp)); 870 } 871 872 if (numTracks == 1 && hasAudio && audioBitrate >= 0) { 873 sprintf(tmp, "%d", audioBitrate); 874 mMetaData.add(METADATA_KEY_BITRATE, String8(tmp)); 875 } else { 876 off64_t sourceSize; 877 if (mSource != NULL && mSource->getSize(&sourceSize) == OK) { 878 int64_t avgBitRate = (int64_t)(sourceSize * 8E6 / maxDurationUs); 879 880 sprintf(tmp, "%" PRId64, avgBitRate); 881 mMetaData.add(METADATA_KEY_BITRATE, String8(tmp)); 882 } 883 } 884 885 if (numTracks == 1) { 886 const char *fileMIME; 887 888 if (meta->findCString(kKeyMIMEType, &fileMIME) && 889 !strcasecmp(fileMIME, "video/x-matroska")) { 890 sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0); 891 const char *trackMIME; 892 CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME)); 893 894 if (!strncasecmp("audio/", trackMIME, 6)) { 895 // The matroska file only contains a single audio track, 896 // rewrite its mime type. 897 mMetaData.add( 898 METADATA_KEY_MIMETYPE, String8("audio/x-matroska")); 899 } 900 } 901 } 902 } 903 904 void StagefrightMetadataRetriever::clearMetadata() { 905 mParsedMetaData = false; 906 mMetaData.clear(); 907 delete mAlbumArt; 908 mAlbumArt = NULL; 909 } 910 911 } // namespace android 912