1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include <inttypes.h> 18 19 //#define LOG_NDEBUG 0 20 #define LOG_TAG "SoftVideoEncoderOMXComponent" 21 #include <utils/Log.h> 22 #include <utils/misc.h> 23 24 #include "include/SoftVideoEncoderOMXComponent.h" 25 26 #include <media/hardware/HardwareAPI.h> 27 #include <media/stagefright/foundation/ADebug.h> 28 #include <media/stagefright/foundation/ALooper.h> 29 #include <media/stagefright/foundation/AMessage.h> 30 #include <media/stagefright/foundation/AUtils.h> 31 #include <media/stagefright/MediaDefs.h> 32 33 #include <ui/Fence.h> 34 #include <ui/GraphicBufferMapper.h> 35 #include <ui/Rect.h> 36 37 #include <hardware/gralloc.h> 38 39 #include <OMX_IndexExt.h> 40 41 namespace android { 42 43 const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = { 44 OMX_COLOR_FormatYUV420Planar, 45 OMX_COLOR_FormatYUV420SemiPlanar, 46 OMX_COLOR_FormatAndroidOpaque 47 }; 48 49 template<class T> 50 static void InitOMXParams(T *params) { 51 params->nSize = sizeof(T); 52 params->nVersion.s.nVersionMajor = 1; 53 params->nVersion.s.nVersionMinor = 0; 54 params->nVersion.s.nRevision = 0; 55 params->nVersion.s.nStep = 0; 56 } 57 58 SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent( 59 const char *name, 60 const char *componentRole, 61 OMX_VIDEO_CODINGTYPE codingType, 62 const CodecProfileLevel *profileLevels, 63 size_t numProfileLevels, 64 int32_t width, 65 int32_t height, 66 const OMX_CALLBACKTYPE *callbacks, 67 OMX_PTR appData, 68 OMX_COMPONENTTYPE **component) 69 : SimpleSoftOMXComponent(name, callbacks, appData, component), 70 mInputDataIsMeta(false), 71 mWidth(width), 72 mHeight(height), 73 mBitrate(192000), 74 mFramerate(30 << 16), // Q16 format 75 mColorFormat(OMX_COLOR_FormatYUV420Planar), 76 mMinOutputBufferSize(384), // arbitrary, using one uncompressed macroblock 77 mMinCompressionRatio(1), // max output size is normally the input size 78 mComponentRole(componentRole), 79 mCodingType(codingType), 80 mProfileLevels(profileLevels), 81 mNumProfileLevels(numProfileLevels) { 82 } 83 84 void SoftVideoEncoderOMXComponent::initPorts( 85 OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize, 86 const char *mime, OMX_U32 minCompressionRatio) { 87 OMX_PARAM_PORTDEFINITIONTYPE def; 88 89 mMinOutputBufferSize = outputBufferSize; 90 mMinCompressionRatio = minCompressionRatio; 91 92 InitOMXParams(&def); 93 94 def.nPortIndex = kInputPortIndex; 95 def.eDir = OMX_DirInput; 96 def.nBufferCountMin = numInputBuffers; 97 def.nBufferCountActual = def.nBufferCountMin; 98 def.bEnabled = OMX_TRUE; 99 def.bPopulated = OMX_FALSE; 100 def.eDomain = OMX_PortDomainVideo; 101 def.bBuffersContiguous = OMX_FALSE; 102 def.format.video.pNativeRender = NULL; 103 def.format.video.nFrameWidth = mWidth; 104 def.format.video.nFrameHeight = mHeight; 105 def.format.video.nStride = def.format.video.nFrameWidth; 106 def.format.video.nSliceHeight = def.format.video.nFrameHeight; 107 def.format.video.nBitrate = 0; 108 // frameRate is in Q16 format. 109 def.format.video.xFramerate = mFramerate; 110 def.format.video.bFlagErrorConcealment = OMX_FALSE; 111 def.nBufferAlignment = kInputBufferAlignment; 112 def.format.video.cMIMEType = const_cast<char *>("video/raw"); 113 def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; 114 def.format.video.eColorFormat = mColorFormat; 115 def.format.video.pNativeWindow = NULL; 116 // buffersize set in updatePortParams 117 118 addPort(def); 119 120 InitOMXParams(&def); 121 122 def.nPortIndex = kOutputPortIndex; 123 def.eDir = OMX_DirOutput; 124 def.nBufferCountMin = numOutputBuffers; 125 def.nBufferCountActual = def.nBufferCountMin; 126 def.bEnabled = OMX_TRUE; 127 def.bPopulated = OMX_FALSE; 128 def.eDomain = OMX_PortDomainVideo; 129 def.bBuffersContiguous = OMX_FALSE; 130 def.format.video.pNativeRender = NULL; 131 def.format.video.nFrameWidth = mWidth; 132 def.format.video.nFrameHeight = mHeight; 133 def.format.video.nStride = 0; 134 def.format.video.nSliceHeight = 0; 135 def.format.video.nBitrate = mBitrate; 136 def.format.video.xFramerate = 0 << 16; 137 def.format.video.bFlagErrorConcealment = OMX_FALSE; 138 def.nBufferAlignment = kOutputBufferAlignment; 139 def.format.video.cMIMEType = const_cast<char *>(mime); 140 def.format.video.eCompressionFormat = mCodingType; 141 def.format.video.eColorFormat = OMX_COLOR_FormatUnused; 142 def.format.video.pNativeWindow = NULL; 143 // buffersize set in updatePortParams 144 145 addPort(def); 146 147 updatePortParams(); 148 } 149 150 void SoftVideoEncoderOMXComponent::updatePortParams() { 151 OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef; 152 inDef->format.video.nFrameWidth = mWidth; 153 inDef->format.video.nFrameHeight = mHeight; 154 inDef->format.video.nStride = inDef->format.video.nFrameWidth; 155 inDef->format.video.nSliceHeight = inDef->format.video.nFrameHeight; 156 inDef->format.video.xFramerate = mFramerate; 157 inDef->format.video.eColorFormat = mColorFormat; 158 uint32_t rawBufferSize = 159 inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2; 160 if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { 161 inDef->nBufferSize = max(sizeof(VideoNativeMetadata), sizeof(VideoGrallocMetadata)); 162 } else { 163 inDef->nBufferSize = rawBufferSize; 164 } 165 166 OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef; 167 outDef->format.video.nFrameWidth = mWidth; 168 outDef->format.video.nFrameHeight = mHeight; 169 outDef->format.video.nBitrate = mBitrate; 170 171 outDef->nBufferSize = max(mMinOutputBufferSize, rawBufferSize / mMinCompressionRatio); 172 } 173 174 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams( 175 const OMX_PARAM_PORTDEFINITIONTYPE *port) { 176 177 if (!isValidOMXParam(port)) { 178 return OMX_ErrorBadParameter; 179 } 180 181 if (port->nPortIndex == kInputPortIndex) { 182 mWidth = port->format.video.nFrameWidth; 183 mHeight = port->format.video.nFrameHeight; 184 185 // xFramerate comes in Q16 format, in frames per second unit 186 mFramerate = port->format.video.xFramerate; 187 188 if (port->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused 189 || (port->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar 190 && port->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar 191 && port->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) { 192 return OMX_ErrorUnsupportedSetting; 193 } 194 195 mColorFormat = port->format.video.eColorFormat; 196 } else if (port->nPortIndex == kOutputPortIndex) { 197 if (port->format.video.eCompressionFormat != mCodingType 198 || port->format.video.eColorFormat != OMX_COLOR_FormatUnused) { 199 return OMX_ErrorUnsupportedSetting; 200 } 201 202 mBitrate = port->format.video.nBitrate; 203 } else { 204 return OMX_ErrorBadPortIndex; 205 } 206 207 updatePortParams(); 208 return OMX_ErrorNone; 209 } 210 211 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter( 212 OMX_INDEXTYPE index, const OMX_PTR param) { 213 // can include extension index OMX_INDEXEXTTYPE 214 const int32_t indexFull = index; 215 216 switch (indexFull) { 217 case OMX_IndexParamVideoErrorCorrection: 218 { 219 return OMX_ErrorNotImplemented; 220 } 221 222 case OMX_IndexParamStandardComponentRole: 223 { 224 const OMX_PARAM_COMPONENTROLETYPE *roleParams = 225 (const OMX_PARAM_COMPONENTROLETYPE *)param; 226 227 if (!isValidOMXParam(roleParams)) { 228 return OMX_ErrorBadParameter; 229 } 230 231 if (strncmp((const char *)roleParams->cRole, 232 mComponentRole, 233 OMX_MAX_STRINGNAME_SIZE - 1)) { 234 return OMX_ErrorUnsupportedSetting; 235 } 236 237 return OMX_ErrorNone; 238 } 239 240 case OMX_IndexParamPortDefinition: 241 { 242 OMX_ERRORTYPE err = internalSetPortParams((const OMX_PARAM_PORTDEFINITIONTYPE *)param); 243 244 if (err != OMX_ErrorNone) { 245 return err; 246 } 247 248 return SimpleSoftOMXComponent::internalSetParameter(index, param); 249 } 250 251 case OMX_IndexParamVideoPortFormat: 252 { 253 const OMX_VIDEO_PARAM_PORTFORMATTYPE* format = 254 (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; 255 256 if (!isValidOMXParam(format)) { 257 return OMX_ErrorBadParameter; 258 } 259 260 if (format->nPortIndex == kInputPortIndex) { 261 if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar || 262 format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || 263 format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { 264 mColorFormat = format->eColorFormat; 265 266 updatePortParams(); 267 return OMX_ErrorNone; 268 } else { 269 ALOGE("Unsupported color format %i", format->eColorFormat); 270 return OMX_ErrorUnsupportedSetting; 271 } 272 } else if (format->nPortIndex == kOutputPortIndex) { 273 if (format->eCompressionFormat == mCodingType) { 274 return OMX_ErrorNone; 275 } else { 276 return OMX_ErrorUnsupportedSetting; 277 } 278 } else { 279 return OMX_ErrorBadPortIndex; 280 } 281 } 282 283 case kStoreMetaDataExtensionIndex: 284 { 285 // storeMetaDataInBuffers 286 const StoreMetaDataInBuffersParams *storeParam = 287 (const StoreMetaDataInBuffersParams *)param; 288 289 if (!isValidOMXParam(storeParam)) { 290 return OMX_ErrorBadParameter; 291 } 292 293 if (storeParam->nPortIndex == kOutputPortIndex) { 294 return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone; 295 } else if (storeParam->nPortIndex != kInputPortIndex) { 296 return OMX_ErrorBadPortIndex; 297 } 298 299 mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE); 300 if (mInputDataIsMeta) { 301 mColorFormat = OMX_COLOR_FormatAndroidOpaque; 302 } else if (mColorFormat == OMX_COLOR_FormatAndroidOpaque) { 303 mColorFormat = OMX_COLOR_FormatYUV420Planar; 304 } 305 updatePortParams(); 306 return OMX_ErrorNone; 307 } 308 309 default: 310 return SimpleSoftOMXComponent::internalSetParameter(index, param); 311 } 312 } 313 314 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter( 315 OMX_INDEXTYPE index, OMX_PTR param) { 316 switch ((int)index) { 317 case OMX_IndexParamVideoErrorCorrection: 318 { 319 return OMX_ErrorNotImplemented; 320 } 321 322 case OMX_IndexParamVideoPortFormat: 323 { 324 OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = 325 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; 326 327 if (!isValidOMXParam(formatParams)) { 328 return OMX_ErrorBadParameter; 329 } 330 331 if (formatParams->nPortIndex == kInputPortIndex) { 332 if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) { 333 return OMX_ErrorNoMore; 334 } 335 336 // Color formats, in order of preference 337 formatParams->eColorFormat = kSupportedColorFormats[formatParams->nIndex]; 338 formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; 339 formatParams->xFramerate = mFramerate; 340 return OMX_ErrorNone; 341 } else if (formatParams->nPortIndex == kOutputPortIndex) { 342 formatParams->eCompressionFormat = mCodingType; 343 formatParams->eColorFormat = OMX_COLOR_FormatUnused; 344 formatParams->xFramerate = 0; 345 return OMX_ErrorNone; 346 } else { 347 return OMX_ErrorBadPortIndex; 348 } 349 } 350 351 case OMX_IndexParamVideoProfileLevelQuerySupported: 352 { 353 OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel = 354 (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param; 355 356 if (!isValidOMXParam(profileLevel)) { 357 return OMX_ErrorBadParameter; 358 } 359 360 if (profileLevel->nPortIndex != kOutputPortIndex) { 361 ALOGE("Invalid port index: %u", profileLevel->nPortIndex); 362 return OMX_ErrorUnsupportedIndex; 363 } 364 365 if (profileLevel->nProfileIndex >= mNumProfileLevels) { 366 return OMX_ErrorNoMore; 367 } 368 369 profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile; 370 profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel; 371 return OMX_ErrorNone; 372 } 373 374 case OMX_IndexParamConsumerUsageBits: 375 { 376 OMX_U32 *usageBits = (OMX_U32 *)param; 377 *usageBits = GRALLOC_USAGE_SW_READ_OFTEN; 378 return OMX_ErrorNone; 379 } 380 381 default: 382 return SimpleSoftOMXComponent::internalGetParameter(index, param); 383 } 384 } 385 386 // static 387 __attribute__((no_sanitize("integer"))) 388 void SoftVideoEncoderOMXComponent::ConvertFlexYUVToPlanar( 389 uint8_t *dst, size_t dstStride, size_t dstVStride, 390 struct android_ycbcr *ycbcr, int32_t width, int32_t height) { 391 const uint8_t *src = (const uint8_t *)ycbcr->y; 392 const uint8_t *srcU = (const uint8_t *)ycbcr->cb; 393 const uint8_t *srcV = (const uint8_t *)ycbcr->cr; 394 uint8_t *dstU = dst + dstVStride * dstStride; 395 uint8_t *dstV = dstU + (dstVStride >> 1) * (dstStride >> 1); 396 397 for (size_t y = height; y > 0; --y) { 398 memcpy(dst, src, width); 399 dst += dstStride; 400 src += ycbcr->ystride; 401 } 402 if (ycbcr->cstride == ycbcr->ystride >> 1 && ycbcr->chroma_step == 1) { 403 // planar 404 for (size_t y = height >> 1; y > 0; --y) { 405 memcpy(dstU, srcU, width >> 1); 406 dstU += dstStride >> 1; 407 srcU += ycbcr->cstride; 408 memcpy(dstV, srcV, width >> 1); 409 dstV += dstStride >> 1; 410 srcV += ycbcr->cstride; 411 } 412 } else { 413 // arbitrary 414 for (size_t y = height >> 1; y > 0; --y) { 415 for (size_t x = width >> 1; x > 0; --x) { 416 *dstU++ = *srcU; 417 *dstV++ = *srcV; 418 srcU += ycbcr->chroma_step; 419 srcV += ycbcr->chroma_step; 420 } 421 dstU += (dstStride >> 1) - (width >> 1); 422 dstV += (dstStride >> 1) - (width >> 1); 423 srcU += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step; 424 srcV += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step; 425 } 426 } 427 } 428 429 // static 430 __attribute__((no_sanitize("integer"))) 431 void SoftVideoEncoderOMXComponent::ConvertYUV420SemiPlanarToYUV420Planar( 432 const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height) { 433 // TODO: add support for stride 434 int32_t outYsize = width * height; 435 uint32_t *outY = (uint32_t *) outYUV; 436 uint16_t *outCb = (uint16_t *) (outYUV + outYsize); 437 uint16_t *outCr = (uint16_t *) (outYUV + outYsize + (outYsize >> 2)); 438 439 /* Y copying */ 440 memcpy(outY, inYVU, outYsize); 441 442 /* U & V copying */ 443 // FIXME this only works if width is multiple of 4 444 uint32_t *inYVU_4 = (uint32_t *) (inYVU + outYsize); 445 for (int32_t i = height >> 1; i > 0; --i) { 446 for (int32_t j = width >> 2; j > 0; --j) { 447 uint32_t temp = *inYVU_4++; 448 uint32_t tempU = temp & 0xFF; 449 tempU = tempU | ((temp >> 8) & 0xFF00); 450 451 uint32_t tempV = (temp >> 8) & 0xFF; 452 tempV = tempV | ((temp >> 16) & 0xFF00); 453 454 *outCb++ = tempU; 455 *outCr++ = tempV; 456 } 457 } 458 } 459 460 // static 461 __attribute__((no_sanitize("integer"))) 462 void SoftVideoEncoderOMXComponent::ConvertRGB32ToPlanar( 463 uint8_t *dstY, size_t dstStride, size_t dstVStride, 464 const uint8_t *src, size_t width, size_t height, size_t srcStride, 465 bool bgr) { 466 CHECK((width & 1) == 0); 467 CHECK((height & 1) == 0); 468 469 uint8_t *dstU = dstY + dstStride * dstVStride; 470 uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1); 471 472 #ifdef SURFACE_IS_BGR32 473 bgr = !bgr; 474 #endif 475 476 const size_t redOffset = bgr ? 2 : 0; 477 const size_t greenOffset = 1; 478 const size_t blueOffset = bgr ? 0 : 2; 479 480 for (size_t y = 0; y < height; ++y) { 481 for (size_t x = 0; x < width; ++x) { 482 unsigned red = src[redOffset]; 483 unsigned green = src[greenOffset]; 484 unsigned blue = src[blueOffset]; 485 486 // Using ITU-R BT.601-7 (03/2011) 487 // 2.5.1: Ey' = ( 0.299*R + 0.587*G + 0.114*B) 488 // 2.5.2: ECr' = ( 0.701*R - 0.587*G - 0.114*B) / 1.402 489 // ECb' = (-0.299*R - 0.587*G + 0.886*B) / 1.772 490 // 2.5.3: Y = 219 * Ey' + 16 491 // Cr = 224 * ECr' + 128 492 // Cb = 224 * ECb' + 128 493 494 unsigned luma = 495 ((red * 65 + green * 129 + blue * 25 + 128) >> 8) + 16; 496 497 dstY[x] = luma; 498 499 if ((x & 1) == 0 && (y & 1) == 0) { 500 unsigned U = 501 ((-red * 38 - green * 74 + blue * 112 + 128) >> 8) + 128; 502 503 unsigned V = 504 ((red * 112 - green * 94 - blue * 18 + 128) >> 8) + 128; 505 506 dstU[x >> 1] = U; 507 dstV[x >> 1] = V; 508 } 509 src += 4; 510 } 511 512 if ((y & 1) == 0) { 513 dstU += dstStride >> 1; 514 dstV += dstStride >> 1; 515 } 516 517 src += srcStride - 4 * width; 518 dstY += dstStride; 519 } 520 } 521 522 const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer( 523 uint8_t *dst, size_t dstSize, 524 const uint8_t *src, size_t srcSize, 525 size_t width, size_t height) const { 526 size_t dstStride = width; 527 size_t dstVStride = height; 528 529 MetadataBufferType bufferType = *(MetadataBufferType *)src; 530 bool usingANWBuffer = bufferType == kMetadataBufferTypeANWBuffer; 531 if (!usingANWBuffer && bufferType != kMetadataBufferTypeGrallocSource) { 532 ALOGE("Unsupported metadata type (%d)", bufferType); 533 return NULL; 534 } 535 536 buffer_handle_t handle; 537 int format; 538 size_t srcStride; 539 size_t srcVStride; 540 if (usingANWBuffer) { 541 if (srcSize < sizeof(VideoNativeMetadata)) { 542 ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoNativeMetadata)); 543 return NULL; 544 } 545 546 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)src; 547 ANativeWindowBuffer *buffer = nativeMeta.pBuffer; 548 handle = buffer->handle; 549 format = buffer->format; 550 srcStride = buffer->stride; 551 srcVStride = buffer->height; 552 // convert stride from pixels to bytes 553 if (format != HAL_PIXEL_FORMAT_YV12 && 554 format != HAL_PIXEL_FORMAT_YCrCb_420_SP && 555 format != HAL_PIXEL_FORMAT_YCbCr_420_888) { 556 // TODO do we need to support other formats? 557 srcStride *= 4; 558 } 559 560 if (nativeMeta.nFenceFd >= 0) { 561 sp<Fence> fence = new Fence(nativeMeta.nFenceFd); 562 nativeMeta.nFenceFd = -1; 563 status_t err = fence->wait(IOMX::kFenceTimeoutMs); 564 if (err != OK) { 565 ALOGE("Timed out waiting on input fence"); 566 return NULL; 567 } 568 } 569 } else { 570 // TODO: remove this part. Check if anyone uses this. 571 572 if (srcSize < sizeof(VideoGrallocMetadata)) { 573 ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoGrallocMetadata)); 574 return NULL; 575 } 576 577 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)(src); 578 handle = grallocMeta.pHandle; 579 // assume HAL_PIXEL_FORMAT_RGBA_8888 580 // there is no way to get the src stride without the graphic buffer 581 format = HAL_PIXEL_FORMAT_RGBA_8888; 582 srcStride = width * 4; 583 srcVStride = height; 584 } 585 586 size_t neededSize = 587 dstStride * dstVStride + (width >> 1) 588 + (dstStride >> 1) * ((dstVStride >> 1) + (height >> 1) - 1); 589 if (dstSize < neededSize) { 590 ALOGE("destination buffer is too small (%zu vs %zu)", dstSize, neededSize); 591 return NULL; 592 } 593 594 auto& mapper = GraphicBufferMapper::get(); 595 596 void *bits = NULL; 597 struct android_ycbcr ycbcr; 598 status_t res; 599 if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 600 res = mapper.lockYCbCr( 601 handle, 602 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER, 603 Rect(width, height), &ycbcr); 604 } else { 605 res = mapper.lock( 606 handle, 607 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER, 608 Rect(width, height), &bits); 609 } 610 if (res != OK) { 611 ALOGE("Unable to lock image buffer %p for access", handle); 612 return NULL; 613 } 614 615 switch (format) { 616 case HAL_PIXEL_FORMAT_YV12: // YCrCb / YVU planar 617 ycbcr.y = bits; 618 ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride; 619 ycbcr.cb = (uint8_t *)ycbcr.cr + (srcStride >> 1) * (srcVStride >> 1); 620 ycbcr.chroma_step = 1; 621 ycbcr.cstride = srcStride >> 1; 622 ycbcr.ystride = srcStride; 623 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height); 624 break; 625 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // YCrCb / YVU semiplanar, NV21 626 ycbcr.y = bits; 627 ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride; 628 ycbcr.cb = (uint8_t *)ycbcr.cr + 1; 629 ycbcr.chroma_step = 2; 630 ycbcr.cstride = srcStride; 631 ycbcr.ystride = srcStride; 632 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height); 633 break; 634 case HAL_PIXEL_FORMAT_YCbCr_420_888: // YCbCr / YUV planar 635 ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height); 636 break; 637 case HAL_PIXEL_FORMAT_RGBX_8888: 638 case HAL_PIXEL_FORMAT_RGBA_8888: 639 case HAL_PIXEL_FORMAT_BGRA_8888: 640 ConvertRGB32ToPlanar( 641 dst, dstStride, dstVStride, 642 (const uint8_t *)bits, width, height, srcStride, 643 format == HAL_PIXEL_FORMAT_BGRA_8888); 644 break; 645 default: 646 ALOGE("Unsupported pixel format %#x", format); 647 dst = NULL; 648 break; 649 } 650 651 if (mapper.unlock(handle) != OK) { 652 ALOGE("Unable to unlock image buffer %p for access", handle); 653 } 654 655 return dst; 656 } 657 658 OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex( 659 const char *name, OMX_INDEXTYPE *index) { 660 if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") || 661 !strcmp(name, "OMX.google.android.index.storeANWBufferInMetadata")) { 662 *(int32_t*)index = kStoreMetaDataExtensionIndex; 663 return OMX_ErrorNone; 664 } 665 return SimpleSoftOMXComponent::getExtensionIndex(name, index); 666 } 667 668 } // namespace android 669