Home | History | Annotate | Download | only in omx
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "OMXUtils"
     19 
     20 #include <string.h>
     21 
     22 #include <media/stagefright/omx/OMXUtils.h>
     23 #include <media/stagefright/foundation/ADebug.h>
     24 #include <media/stagefright/foundation/AUtils.h>
     25 #include <media/stagefright/foundation/MediaDefs.h>
     26 #include <media/stagefright/MediaErrors.h>
     27 #include <media/hardware/HardwareAPI.h>
     28 #include <system/graphics-base.h>
     29 
     30 namespace android {
     31 
     32 status_t StatusFromOMXError(OMX_ERRORTYPE err) {
     33     switch (err) {
     34         case OMX_ErrorNone:
     35             return OK;
     36         case OMX_ErrorNoMore:
     37             return NOT_ENOUGH_DATA;
     38         case OMX_ErrorUnsupportedSetting:
     39         case OMX_ErrorUnsupportedIndex:
     40             return ERROR_UNSUPPORTED; // this is a media specific error
     41         case OMX_ErrorBadParameter:
     42             return BAD_VALUE;
     43         case OMX_ErrorInsufficientResources:
     44             return NO_MEMORY;
     45         case OMX_ErrorInvalidComponentName:
     46         case OMX_ErrorComponentNotFound:
     47             return NAME_NOT_FOUND;
     48         default:
     49             return UNKNOWN_ERROR;
     50     }
     51 }
     52 
     53 /**************************************************************************************************/
     54 
     55 DescribeColorFormatParams::DescribeColorFormatParams(const DescribeColorFormat2Params &params) {
     56     InitOMXParams(this);
     57 
     58     eColorFormat = params.eColorFormat;
     59     nFrameWidth = params.nFrameWidth;
     60     nFrameHeight = params.nFrameHeight;
     61     nStride = params.nStride;
     62     nSliceHeight = params.nSliceHeight;
     63     bUsingNativeBuffers = params.bUsingNativeBuffers;
     64     // we don't copy media images as this conversion is only used pre-query
     65 };
     66 
     67 void DescribeColorFormat2Params::initFromV1(const DescribeColorFormatParams &params) {
     68     InitOMXParams(this);
     69 
     70     eColorFormat = params.eColorFormat;
     71     nFrameWidth = params.nFrameWidth;
     72     nFrameHeight = params.nFrameHeight;
     73     nStride = params.nStride;
     74     nSliceHeight = params.nSliceHeight;
     75     bUsingNativeBuffers = params.bUsingNativeBuffers;
     76     sMediaImage.initFromV1(params.sMediaImage);
     77 };
     78 
     79 void MediaImage2::initFromV1(const MediaImage &image) {
     80     memset(this, 0, sizeof(*this));
     81 
     82     if (image.mType != MediaImage::MEDIA_IMAGE_TYPE_YUV) {
     83         mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
     84         return;
     85     }
     86 
     87     for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
     88         if (image.mPlane[ix].mHorizSubsampling > INT32_MAX
     89                 || image.mPlane[ix].mVertSubsampling > INT32_MAX) {
     90             mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
     91             return;
     92         }
     93     }
     94 
     95     mType = (MediaImage2::Type)image.mType;
     96     mNumPlanes = image.mNumPlanes;
     97     mWidth = image.mWidth;
     98     mHeight = image.mHeight;
     99     mBitDepth = image.mBitDepth;
    100     mBitDepthAllocated = 8;
    101     for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
    102         mPlane[ix].mOffset = image.mPlane[ix].mOffset;
    103         mPlane[ix].mColInc = image.mPlane[ix].mColInc;
    104         mPlane[ix].mRowInc = image.mPlane[ix].mRowInc;
    105         mPlane[ix].mHorizSubsampling = (int32_t)image.mPlane[ix].mHorizSubsampling;
    106         mPlane[ix].mVertSubsampling = (int32_t)image.mPlane[ix].mVertSubsampling;
    107     }
    108 }
    109 
    110 /**************************************************************************************************/
    111 
    112 const char *GetComponentRole(bool isEncoder, const char *mime) {
    113     struct MimeToRole {
    114         const char *mime;
    115         const char *decoderRole;
    116         const char *encoderRole;
    117     };
    118 
    119     static const MimeToRole kMimeToRole[] = {
    120         { MEDIA_MIMETYPE_AUDIO_MPEG,
    121             "audio_decoder.mp3", "audio_encoder.mp3" },
    122         { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
    123             "audio_decoder.mp1", "audio_encoder.mp1" },
    124         { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
    125             "audio_decoder.mp2", "audio_encoder.mp2" },
    126         { MEDIA_MIMETYPE_AUDIO_AMR_NB,
    127             "audio_decoder.amrnb", "audio_encoder.amrnb" },
    128         { MEDIA_MIMETYPE_AUDIO_AMR_WB,
    129             "audio_decoder.amrwb", "audio_encoder.amrwb" },
    130         { MEDIA_MIMETYPE_AUDIO_AAC,
    131             "audio_decoder.aac", "audio_encoder.aac" },
    132         { MEDIA_MIMETYPE_AUDIO_VORBIS,
    133             "audio_decoder.vorbis", "audio_encoder.vorbis" },
    134         { MEDIA_MIMETYPE_AUDIO_OPUS,
    135             "audio_decoder.opus", "audio_encoder.opus" },
    136         { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
    137             "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
    138         { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
    139             "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
    140         { MEDIA_MIMETYPE_VIDEO_AVC,
    141             "video_decoder.avc", "video_encoder.avc" },
    142         { MEDIA_MIMETYPE_VIDEO_HEVC,
    143             "video_decoder.hevc", "video_encoder.hevc" },
    144         { MEDIA_MIMETYPE_VIDEO_MPEG4,
    145             "video_decoder.mpeg4", "video_encoder.mpeg4" },
    146         { MEDIA_MIMETYPE_VIDEO_H263,
    147             "video_decoder.h263", "video_encoder.h263" },
    148         { MEDIA_MIMETYPE_VIDEO_VP8,
    149             "video_decoder.vp8", "video_encoder.vp8" },
    150         { MEDIA_MIMETYPE_VIDEO_VP9,
    151             "video_decoder.vp9", "video_encoder.vp9" },
    152         { MEDIA_MIMETYPE_AUDIO_RAW,
    153             "audio_decoder.raw", "audio_encoder.raw" },
    154         { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
    155             "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
    156         { MEDIA_MIMETYPE_AUDIO_FLAC,
    157             "audio_decoder.flac", "audio_encoder.flac" },
    158         { MEDIA_MIMETYPE_AUDIO_MSGSM,
    159             "audio_decoder.gsm", "audio_encoder.gsm" },
    160         { MEDIA_MIMETYPE_VIDEO_MPEG2,
    161             "video_decoder.mpeg2", "video_encoder.mpeg2" },
    162         { MEDIA_MIMETYPE_AUDIO_AC3,
    163             "audio_decoder.ac3", "audio_encoder.ac3" },
    164         { MEDIA_MIMETYPE_AUDIO_EAC3,
    165             "audio_decoder.eac3", "audio_encoder.eac3" },
    166         { MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
    167             "image_decoder.heic", "image_encoder.heic" },
    168     };
    169 
    170     static const size_t kNumMimeToRole =
    171         sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
    172 
    173     size_t i;
    174     for (i = 0; i < kNumMimeToRole; ++i) {
    175         if (!strcasecmp(mime, kMimeToRole[i].mime)) {
    176             break;
    177         }
    178     }
    179 
    180     if (i == kNumMimeToRole) {
    181         return NULL;
    182     }
    183 
    184     return isEncoder ? kMimeToRole[i].encoderRole
    185                   : kMimeToRole[i].decoderRole;
    186 }
    187 
    188 status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role) {
    189     OMX_PARAM_COMPONENTROLETYPE roleParams;
    190     InitOMXParams(&roleParams);
    191 
    192     strncpy((char *)roleParams.cRole,
    193             role, OMX_MAX_STRINGNAME_SIZE - 1);
    194 
    195     roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
    196 
    197     return omxNode->setParameter(
    198             OMX_IndexParamStandardComponentRole,
    199             &roleParams, sizeof(roleParams));
    200 }
    201 
    202 bool DescribeDefaultColorFormat(DescribeColorFormat2Params &params) {
    203     MediaImage2 &image = params.sMediaImage;
    204     memset(&image, 0, sizeof(image));
    205 
    206     image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
    207     image.mNumPlanes = 0;
    208 
    209     const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
    210     image.mWidth = params.nFrameWidth;
    211     image.mHeight = params.nFrameHeight;
    212 
    213     // only supporting YUV420
    214     if (fmt != OMX_COLOR_FormatYUV420Planar &&
    215         fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
    216         fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
    217         fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
    218         fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
    219         ALOGW("do not know color format 0x%x = %d", fmt, fmt);
    220         if (fmt == OMX_COLOR_FormatYUV420Planar16) {
    221             ALOGW("Cannot describe color format OMX_COLOR_FormatYUV420Planar16");
    222         }
    223         return false;
    224     }
    225 
    226     // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
    227     if (params.nStride != 0 && params.nSliceHeight == 0) {
    228         ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
    229                 params.nFrameHeight);
    230         params.nSliceHeight = params.nFrameHeight;
    231     }
    232 
    233     // we need stride and slice-height to be non-zero and sensible. These values were chosen to
    234     // prevent integer overflows further down the line, and do not indicate support for
    235     // 32kx32k video.
    236     if (params.nStride == 0 || params.nSliceHeight == 0
    237             || params.nStride > 32768 || params.nSliceHeight > 32768) {
    238         ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
    239                 fmt, fmt, params.nStride, params.nSliceHeight);
    240         return false;
    241     }
    242 
    243     // set-up YUV format
    244     image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
    245     image.mNumPlanes = 3;
    246     image.mBitDepth = 8;
    247     image.mBitDepthAllocated = 8;
    248     image.mPlane[image.Y].mOffset = 0;
    249     image.mPlane[image.Y].mColInc = 1;
    250     image.mPlane[image.Y].mRowInc = params.nStride;
    251     image.mPlane[image.Y].mHorizSubsampling = 1;
    252     image.mPlane[image.Y].mVertSubsampling = 1;
    253 
    254     switch ((int)fmt) {
    255         case HAL_PIXEL_FORMAT_YV12:
    256             if (params.bUsingNativeBuffers) {
    257                 size_t ystride = align(params.nStride, 16);
    258                 size_t cstride = align(params.nStride / 2, 16);
    259                 image.mPlane[image.Y].mRowInc = ystride;
    260 
    261                 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
    262                 image.mPlane[image.V].mColInc = 1;
    263                 image.mPlane[image.V].mRowInc = cstride;
    264                 image.mPlane[image.V].mHorizSubsampling = 2;
    265                 image.mPlane[image.V].mVertSubsampling = 2;
    266 
    267                 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
    268                         + (cstride * params.nSliceHeight / 2);
    269                 image.mPlane[image.U].mColInc = 1;
    270                 image.mPlane[image.U].mRowInc = cstride;
    271                 image.mPlane[image.U].mHorizSubsampling = 2;
    272                 image.mPlane[image.U].mVertSubsampling = 2;
    273                 break;
    274             } else {
    275                 // fall through as YV12 is used for YUV420Planar by some codecs
    276             }
    277 
    278         case OMX_COLOR_FormatYUV420Planar:
    279         case OMX_COLOR_FormatYUV420PackedPlanar:
    280             image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
    281             image.mPlane[image.U].mColInc = 1;
    282             image.mPlane[image.U].mRowInc = params.nStride / 2;
    283             image.mPlane[image.U].mHorizSubsampling = 2;
    284             image.mPlane[image.U].mVertSubsampling = 2;
    285 
    286             image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
    287                     + (params.nStride * params.nSliceHeight / 4);
    288             image.mPlane[image.V].mColInc = 1;
    289             image.mPlane[image.V].mRowInc = params.nStride / 2;
    290             image.mPlane[image.V].mHorizSubsampling = 2;
    291             image.mPlane[image.V].mVertSubsampling = 2;
    292             break;
    293 
    294         case OMX_COLOR_FormatYUV420SemiPlanar:
    295             // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
    296         case OMX_COLOR_FormatYUV420PackedSemiPlanar:
    297             // NV12
    298             image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
    299             image.mPlane[image.U].mColInc = 2;
    300             image.mPlane[image.U].mRowInc = params.nStride;
    301             image.mPlane[image.U].mHorizSubsampling = 2;
    302             image.mPlane[image.U].mVertSubsampling = 2;
    303 
    304             image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
    305             image.mPlane[image.V].mColInc = 2;
    306             image.mPlane[image.V].mRowInc = params.nStride;
    307             image.mPlane[image.V].mHorizSubsampling = 2;
    308             image.mPlane[image.V].mVertSubsampling = 2;
    309             break;
    310 
    311         default:
    312             TRESPASS();
    313     }
    314     return true;
    315 }
    316 
    317 bool DescribeColorFormat(
    318         const sp<IOMXNode> &omxNode,
    319         DescribeColorFormat2Params &describeParams)
    320 {
    321     OMX_INDEXTYPE describeColorFormatIndex;
    322     if (omxNode->getExtensionIndex(
    323             "OMX.google.android.index.describeColorFormat",
    324             &describeColorFormatIndex) == OK) {
    325         DescribeColorFormatParams describeParamsV1(describeParams);
    326         if (omxNode->getParameter(
    327                 describeColorFormatIndex,
    328                 &describeParamsV1, sizeof(describeParamsV1)) == OK) {
    329             describeParams.initFromV1(describeParamsV1);
    330             return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
    331         }
    332     } else if (omxNode->getExtensionIndex(
    333             "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
    334                && omxNode->getParameter(
    335                        describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
    336         return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
    337     }
    338 
    339     return DescribeDefaultColorFormat(describeParams);
    340 }
    341 
    342 // static
    343 bool IsFlexibleColorFormat(
    344          const sp<IOMXNode> &omxNode,
    345          uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
    346     DescribeColorFormat2Params describeParams;
    347     InitOMXParams(&describeParams);
    348     describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
    349     // reasonable dummy values
    350     describeParams.nFrameWidth = 128;
    351     describeParams.nFrameHeight = 128;
    352     describeParams.nStride = 128;
    353     describeParams.nSliceHeight = 128;
    354     describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
    355 
    356     CHECK(flexibleEquivalent != NULL);
    357 
    358     if (!DescribeColorFormat(omxNode, describeParams)) {
    359         return false;
    360     }
    361 
    362     const MediaImage2 &img = describeParams.sMediaImage;
    363     if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
    364         if (img.mNumPlanes != 3
    365                 || img.mPlane[img.Y].mHorizSubsampling != 1
    366                 || img.mPlane[img.Y].mVertSubsampling != 1) {
    367             return false;
    368         }
    369 
    370         // YUV 420
    371         if (img.mPlane[img.U].mHorizSubsampling == 2
    372                 && img.mPlane[img.U].mVertSubsampling == 2
    373                 && img.mPlane[img.V].mHorizSubsampling == 2
    374                 && img.mPlane[img.V].mVertSubsampling == 2) {
    375             // possible flexible YUV420 format
    376             if (img.mBitDepth <= 8) {
    377                *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
    378                return true;
    379             }
    380         }
    381     }
    382     return false;
    383 }
    384 
    385 }  // namespace android
    386 
    387