Home | History | Annotate | Download | only in hardware
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef METADATA_BUFFER_TYPE_H
     18 #define METADATA_BUFFER_TYPE_H
     19 
     20 #ifdef __cplusplus
     21 extern "C" {
     22 namespace android {
     23 #endif
     24 
     25 /*
     26  * MetadataBufferType defines the type of the metadata buffers that
     27  * can be passed to video encoder component for encoding, via Stagefright
     28  * media recording framework. To see how to work with the metadata buffers
     29  * in media recording framework, please consult HardwareAPI.h
     30  *
     31  * The creator of metadata buffers and video encoder share common knowledge
     32  * on what is actually being stored in these metadata buffers, and
     33  * how the information can be used by the video encoder component
     34  * to locate the actual pixel data as the source input for video
     35  * encoder, plus whatever other information that is necessary. Stagefright
     36  * media recording framework does not need to know anything specific about the
     37  * metadata buffers, except for receving each individual metadata buffer
     38  * as the source input, making a copy of the metadata buffer, and passing the
     39  * copy via OpenMAX API to the video encoder component.
     40  *
     41  * The creator of the metadata buffers must ensure that the first
     42  * 4 bytes in every metadata buffer indicates its buffer type,
     43  * and the rest of the metadata buffer contains the
     44  * actual metadata information. When a video encoder component receives
     45  * a metadata buffer, it uses the first 4 bytes in that buffer to find
     46  * out the type of the metadata buffer, and takes action appropriate
     47  * to that type of metadata buffers (for instance, locate the actual
     48  * pixel data input and then encoding the input data to produce a
     49  * compressed output buffer).
     50  *
     51  * The following shows the layout of a metadata buffer,
     52  * where buffer type is a 4-byte field of MetadataBufferType,
     53  * and the payload is the metadata information.
     54  *
     55  * --------------------------------------------------------------
     56  * |  buffer type  |          payload                           |
     57  * --------------------------------------------------------------
     58  *
     59  */
     60 typedef enum {
     61 
     62     /*
     63      * kMetadataBufferTypeCameraSource is used to indicate that
     64      * the source of the metadata buffer is the camera component.
     65      */
     66     kMetadataBufferTypeCameraSource  = 0,
     67 
     68     /*
     69      * kMetadataBufferTypeGrallocSource is used to indicate that
     70      * the payload of the metadata buffers can be interpreted as
     71      * a buffer_handle_t.
     72      * So in this case,the metadata that the encoder receives
     73      * will have a byte stream that consists of two parts:
     74      * 1. First, there is an integer indicating that it is a GRAlloc
     75      * source (kMetadataBufferTypeGrallocSource)
     76      * 2. This is followed by the buffer_handle_t that is a handle to the
     77      * GRalloc buffer. The encoder needs to interpret this GRalloc handle
     78      * and encode the frames.
     79      * --------------------------------------------------------------
     80      * |  kMetadataBufferTypeGrallocSource | buffer_handle_t buffer |
     81      * --------------------------------------------------------------
     82      *
     83      * See the VideoGrallocMetadata structure.
     84      */
     85     kMetadataBufferTypeGrallocSource = 1,
     86 
     87     /*
     88      * kMetadataBufferTypeGraphicBuffer is used to indicate that
     89      * the payload of the metadata buffers can be interpreted as
     90      * an ANativeWindowBuffer, and that a fence is provided.
     91      *
     92      * In this case, the metadata will have a byte stream that consists of three parts:
     93      * 1. First, there is an integer indicating that the metadata
     94      * contains an ANativeWindowBuffer (kMetadataBufferTypeANWBuffer)
     95      * 2. This is followed by the pointer to the ANativeWindowBuffer.
     96      * Codec must not free this buffer as it does not actually own this buffer.
     97      * 3. Finally, there is an integer containing a fence file descriptor.
     98      * The codec must wait on the fence before encoding or decoding into this
     99      * buffer. When the buffer is returned, codec must replace this file descriptor
    100      * with a new fence, that will be waited on before the buffer is replaced
    101      * (encoder) or read (decoder).
    102      * ---------------------------------
    103      * |  kMetadataBufferTypeANWBuffer |
    104      * ---------------------------------
    105      * |  ANativeWindowBuffer *buffer  |
    106      * ---------------------------------
    107      * |  int fenceFd                  |
    108      * ---------------------------------
    109      *
    110      * See the VideoNativeMetadata structure.
    111      */
    112     kMetadataBufferTypeANWBuffer = 2,
    113 
    114     /*
    115      * kMetadataBufferTypeNativeHandleSource is used to indicate that
    116      * the payload of the metadata buffers can be interpreted as
    117      * a native_handle_t.
    118      *
    119      * In this case, the metadata that the encoder receives
    120      * will have a byte stream that consists of two parts:
    121      * 1. First, there is an integer indicating that the metadata contains a
    122      * native handle (kMetadataBufferTypeNativeHandleSource).
    123      * 2. This is followed by a pointer to native_handle_t. The encoder needs
    124      * to interpret this native handle and encode the frame. The encoder must
    125      * not free this native handle as it does not actually own this native
    126      * handle. The handle will be freed after the encoder releases the buffer
    127      * back to camera.
    128      * ----------------------------------------------------------------
    129      * |  kMetadataBufferTypeNativeHandleSource | native_handle_t* nh |
    130      * ----------------------------------------------------------------
    131      *
    132      * See the VideoNativeHandleMetadata structure.
    133      */
    134     kMetadataBufferTypeNativeHandleSource = 3,
    135 
    136     /* This value is used by framework, but is never used inside a metadata buffer  */
    137     kMetadataBufferTypeInvalid = -1,
    138 
    139 
    140     // Add more here...
    141 
    142 } MetadataBufferType;
    143 
    144 #ifdef __cplusplus
    145 }  // namespace android
    146 }
    147 #endif
    148 
    149 #endif  // METADATA_BUFFER_TYPE_H
    150