Home | History | Annotate | Download | only in va
      1 /*
      2  * Copyright (c) 2007-2011 Intel Corporation. All Rights Reserved.
      3  *
      4  * Permission is hereby granted, free of charge, to any person obtaining a
      5  * copy of this software and associated documentation files (the
      6  * "Software"), to deal in the Software without restriction, including
      7  * without limitation the rights to use, copy, modify, merge, publish,
      8  * distribute, sub license, and/or sell copies of the Software, and to
      9  * permit persons to whom the Software is furnished to do so, subject to
     10  * the following conditions:
     11  *
     12  * The above copyright notice and this permission notice (including the
     13  * next paragraph) shall be included in all copies or substantial portions
     14  * of the Software.
     15  *
     16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
     17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
     18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
     19  * IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
     20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
     21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
     22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     23  */
     24 
     25 /**
     26  * \file va_vpp.h
     27  * \brief The video processing API
     28  *
     29  * This file contains the \ref api_vpp "Video processing API".
     30  */
     31 
     32 #ifndef VA_VPP_H
     33 #define VA_VPP_H
     34 
     35 #ifdef __cplusplus
     36 extern "C" {
     37 #endif
     38 
     39 /**
     40  * \defgroup api_vpp Video processing API
     41  *
     42  * @{
     43  *
     44  * The video processing API uses the same paradigm as for decoding:
     45  * - Query for supported filters;
     46  * - Set up a video processing pipeline;
     47  * - Send video processing parameters through VA buffers.
     48  *
     49  * \section api_vpp_caps Query for supported filters
     50  *
     51  * Checking whether video processing is supported can be performed
     52  * with vaQueryConfigEntrypoints() and the profile argument set to
     53  * #VAProfileNone. If video processing is supported, then the list of
     54  * returned entry-points will include #VAEntrypointVideoProc.
     55  *
     56  * \code
     57  * VAEntrypoint *entrypoints;
     58  * int i, num_entrypoints, supportsVideoProcessing = 0;
     59  *
     60  * num_entrypoints = vaMaxNumEntrypoints();
     61  * entrypoints = malloc(num_entrypoints * sizeof(entrypoints[0]);
     62  * vaQueryConfigEntrypoints(va_dpy, VAProfileNone,
     63  *     entrypoints, &num_entrypoints);
     64  *
     65  * for (i = 0; !supportsVideoProcessing && i < num_entrypoints; i++) {
     66  *     if (entrypoints[i] == VAEntrypointVideoProc)
     67  *         supportsVideoProcessing = 1;
     68  * }
     69  * \endcode
     70  *
     71  * Then, the vaQueryVideoProcFilters() function is used to query the
     72  * list of video processing filters.
     73  *
     74  * \code
     75  * VAProcFilterType filters[VAProcFilterCount];
     76  * unsigned int num_filters = VAProcFilterCount;
     77  *
     78  * // num_filters shall be initialized to the length of the array
     79  * vaQueryVideoProcFilters(va_dpy, vpp_ctx, &filters, &num_filters);
     80  * \endcode
     81  *
     82  * Finally, individual filter capabilities can be checked with
     83  * vaQueryVideoProcFilterCaps().
     84  *
     85  * \code
     86  * VAProcFilterCap denoise_caps;
     87  * unsigned int num_denoise_caps = 1;
     88  * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
     89  *     VAProcFilterNoiseReduction,
     90  *     &denoise_caps, &num_denoise_caps
     91  * );
     92  *
     93  * VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount];
     94  * unsigned int num_deinterlacing_caps = VAProcDeinterlacingCount;
     95  * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
     96  *     VAProcFilterDeinterlacing,
     97  *     &deinterlacing_caps, &num_deinterlacing_caps
     98  * );
     99  * \endcode
    100  *
    101  * \section api_vpp_setup Set up a video processing pipeline
    102  *
    103  * A video processing pipeline buffer is created for each source
    104  * surface we want to process. However, buffers holding filter
    105  * parameters can be created once and for all. Rationale is to avoid
    106  * multiple creation/destruction chains of filter buffers and also
    107  * because filter parameters generally won't change frame after
    108  * frame. e.g. this makes it possible to implement a checkerboard of
    109  * videos where the same filters are applied to each video source.
    110  *
    111  * The general control flow is demonstrated by the following pseudo-code:
    112  * \code
    113  * // Create filters
    114  * VABufferID denoise_filter, deint_filter;
    115  * VABufferID filter_bufs[VAProcFilterCount];
    116  * unsigned int num_filter_bufs;
    117  *
    118  * for (i = 0; i < num_filters; i++) {
    119  *     switch (filters[i]) {
    120  *     case VAProcFilterNoiseReduction: {       // Noise reduction filter
    121  *         VAProcFilterParameterBuffer denoise;
    122  *         denoise.type  = VAProcFilterNoiseReduction;
    123  *         denoise.value = 0.5;
    124  *         vaCreateBuffer(va_dpy, vpp_ctx,
    125  *             VAProcFilterParameterBufferType, sizeof(denoise), 1,
    126  *             &denoise, &denoise_filter
    127  *         );
    128  *         filter_bufs[num_filter_bufs++] = denoise_filter;
    129  *         break;
    130  *     }
    131  *
    132  *     case VAProcFilterDeinterlacing:          // Motion-adaptive deinterlacing
    133  *         for (j = 0; j < num_deinterlacing_caps; j++) {
    134  *             VAProcFilterCapDeinterlacing * const cap = &deinterlacing_caps[j];
    135  *             if (cap->type != VAProcDeinterlacingMotionAdaptive)
    136  *                 continue;
    137  *
    138  *             VAProcFilterParameterBufferDeinterlacing deint;
    139  *             deint.type                   = VAProcFilterDeinterlacing;
    140  *             deint.algorithm              = VAProcDeinterlacingMotionAdaptive;
    141  *             vaCreateBuffer(va_dpy, vpp_ctx,
    142  *                 VAProcFilterParameterBufferType, sizeof(deint), 1,
    143  *                 &deint, &deint_filter
    144  *             );
    145  *             filter_bufs[num_filter_bufs++] = deint_filter;
    146  *         }
    147  *     }
    148  * }
    149  * \endcode
    150  *
    151  * Once the video processing pipeline is set up, the caller shall check the
    152  * implied capabilities and requirements with vaQueryVideoProcPipelineCaps().
    153  * This function can be used to validate the number of reference frames are
    154  * needed by the specified deinterlacing algorithm, the supported color
    155  * primaries, etc.
    156  * \code
    157  * // Create filters
    158  * VAProcPipelineCaps pipeline_caps;
    159  * VASurfaceID *forward_references;
    160  * unsigned int num_forward_references;
    161  * VASurfaceID *backward_references;
    162  * unsigned int num_backward_references;
    163  * VAProcColorStandardType in_color_standards[VAProcColorStandardCount];
    164  * VAProcColorStandardType out_color_standards[VAProcColorStandardCount];
    165  *
    166  * pipeline_caps.input_color_standards      = NULL;
    167  * pipeline_caps.num_input_color_standards  = ARRAY_ELEMS(in_color_standards);
    168  * pipeline_caps.output_color_standards     = NULL;
    169  * pipeline_caps.num_output_color_standards = ARRAY_ELEMS(out_color_standards);
    170  * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
    171  *     filter_bufs, num_filter_bufs,
    172  *     &pipeline_caps
    173  * );
    174  *
    175  * num_forward_references  = pipeline_caps.num_forward_references;
    176  * forward_references      =
    177  *     malloc(num__forward_references * sizeof(VASurfaceID));
    178  * num_backward_references = pipeline_caps.num_backward_references;
    179  * backward_references     =
    180  *     malloc(num_backward_references * sizeof(VASurfaceID));
    181  * \endcode
    182  *
    183  * \section api_vpp_submit Send video processing parameters through VA buffers
    184  *
    185  * Video processing pipeline parameters are submitted for each source
    186  * surface to process. Video filter parameters can also change, per-surface.
    187  * e.g. the list of reference frames used for deinterlacing.
    188  *
    189  * \code
    190  * foreach (iteration) {
    191  *     vaBeginPicture(va_dpy, vpp_ctx, vpp_surface);
    192  *     foreach (surface) {
    193  *         VARectangle output_region;
    194  *         VABufferID pipeline_buf;
    195  *         VAProcPipelineParameterBuffer *pipeline_param;
    196  *
    197  *         vaCreateBuffer(va_dpy, vpp_ctx,
    198  *             VAProcPipelineParameterBuffer, sizeof(*pipeline_param), 1,
    199  *             NULL, &pipeline_buf
    200  *         );
    201  *
    202  *         // Setup output region for this surface
    203  *         // e.g. upper left corner for the first surface
    204  *         output_region.x     = BORDER;
    205  *         output_region.y     = BORDER;
    206  *         output_region.width =
    207  *             (vpp_surface_width - (Nx_surfaces + 1) * BORDER) / Nx_surfaces;
    208  *         output_region.height =
    209  *             (vpp_surface_height - (Ny_surfaces + 1) * BORDER) / Ny_surfaces;
    210  *
    211  *         vaMapBuffer(va_dpy, pipeline_buf, &pipeline_param);
    212  *         pipeline_param->surface              = surface;
    213  *         pipeline_param->surface_region       = NULL;
    214  *         pipeline_param->output_region        = &output_region;
    215  *         pipeline_param->output_background_color = 0;
    216  *         if (first surface to render)
    217  *             pipeline_param->output_background_color = 0xff000000; // black
    218  *         pipeline_param->filter_flags         = VA_FILTER_SCALING_HQ;
    219  *         pipeline_param->filters              = filter_bufs;
    220  *         pipeline_param->num_filters          = num_filter_bufs;
    221  *         vaUnmapBuffer(va_dpy, pipeline_buf);
    222  *
    223  *         // Update reference frames for deinterlacing, if necessary
    224  *         pipeline_param->forward_references      = forward_references;
    225  *         pipeline_param->num_forward_references  = num_forward_references_used;
    226  *         pipeline_param->backward_references     = backward_references;
    227  *         pipeline_param->num_backward_references = num_bacward_references_used;
    228  *
    229  *         // Apply filters
    230  *         vaRenderPicture(va_dpy, vpp_ctx, &pipeline_buf, 1);
    231  *     }
    232  *     vaEndPicture(va_dpy, vpp_ctx);
    233  * }
    234  * \endcode
    235  */
    236 
    237 /** \brief Video filter types. */
    238 typedef enum _VAProcFilterType {
    239     VAProcFilterNone = 0,
    240     /** \brief Noise reduction filter. */
    241     VAProcFilterNoiseReduction,
    242     /** \brief Deinterlacing filter. */
    243     VAProcFilterDeinterlacing,
    244     /** \brief Sharpening filter. */
    245     VAProcFilterSharpening,
    246     /** \brief Color balance parameters. */
    247     VAProcFilterColorBalance,
    248     /** \brief Skin Tone Enhancement. */
    249     VAProcFilterSkinToneEnhancement,
    250     VAProcFilterCount
    251 } VAProcFilterType;
    252 
    253 /** \brief Deinterlacing types. */
    254 typedef enum _VAProcDeinterlacingType {
    255     VAProcDeinterlacingNone = 0,
    256     /** \brief Bob deinterlacing algorithm. */
    257     VAProcDeinterlacingBob,
    258     /** \brief Weave deinterlacing algorithm. */
    259     VAProcDeinterlacingWeave,
    260     /** \brief Motion adaptive deinterlacing algorithm. */
    261     VAProcDeinterlacingMotionAdaptive,
    262     /** \brief Motion compensated deinterlacing algorithm. */
    263     VAProcDeinterlacingMotionCompensated,
    264     /** \brief Number of deinterlacing algorithms. */
    265     VAProcDeinterlacingCount
    266 } VAProcDeinterlacingType;
    267 
    268 /** \brief Color balance types. */
    269 typedef enum _VAProcColorBalanceType {
    270     VAProcColorBalanceNone = 0,
    271     /** \brief Hue. */
    272     VAProcColorBalanceHue,
    273     /** \brief Saturation. */
    274     VAProcColorBalanceSaturation,
    275     /** \brief Brightness. */
    276     VAProcColorBalanceBrightness,
    277     /** \brief Contrast. */
    278     VAProcColorBalanceContrast,
    279     /** \brief Automatically adjusted saturation. */
    280     VAProcColorBalanceAutoSaturation,
    281     /** \brief Automatically adjusted brightness. */
    282     VAProcColorBalanceAutoBrightness,
    283     /** \brief Automatically adjusted contrast. */
    284     VAProcColorBalanceAutoContrast,
    285     /** \brief Number of color balance attributes. */
    286     VAProcColorBalanceCount
    287 } VAProcColorBalanceType;
    288 
    289 /** \brief Color standard types. */
    290 typedef enum _VAProcColorStandardType {
    291     VAProcColorStandardNone = 0,
    292     /** \brief ITU-R BT.601. */
    293     VAProcColorStandardBT601,
    294     /** \brief ITU-R BT.709. */
    295     VAProcColorStandardBT709,
    296     /** \brief ITU-R BT.470-2 System M. */
    297     VAProcColorStandardBT470M,
    298     /** \brief ITU-R BT.470-2 System B, G. */
    299     VAProcColorStandardBT470BG,
    300     /** \brief SMPTE-170M. */
    301     VAProcColorStandardSMPTE170M,
    302     /** \brief SMPTE-240M. */
    303     VAProcColorStandardSMPTE240M,
    304     /** \brief Generic film. */
    305     VAProcColorStandardGenericFilm,
    306     /** \brief Number of color standards. */
    307     VAProcColorStandardCount
    308 } VAProcColorStandardType;
    309 
    310 /** @name Video pipeline flags */
    311 /**@{*/
    312 /** \brief Specifies whether to apply subpictures when processing a surface. */
    313 #define VA_PROC_PIPELINE_SUBPICTURES    0x00000001
    314 /**
    315  * \brief Specifies whether to apply power or performance
    316  * optimizations to a pipeline.
    317  *
    318  * When processing several surfaces, it may be necessary to prioritize
    319  * more certain pipelines than others. This flag is only a hint to the
    320  * video processor so that it can omit certain filters to save power
    321  * for example. Typically, this flag could be used with video surfaces
    322  * decoded from a secondary bitstream.
    323  */
    324 #define VA_PROC_PIPELINE_FAST           0x00000002
    325 /**@}*/
    326 
    327 /** @name Video filter flags */
    328 /**@{*/
    329 /** \brief Specifies whether the filter shall be present in the pipeline. */
    330 #define VA_PROC_FILTER_MANDATORY        0x00000001
    331 /**@}*/
    332 
    333 /** @name Pipeline end flags */
    334 /**@{*/
    335 /** \brief Specifies the pipeline is the last. */
    336 #define VA_PIPELINE_FLAG_END		0x00000004
    337 /**@}*/
    338 
    339 /** \brief Video processing pipeline capabilities. */
    340 typedef struct _VAProcPipelineCaps {
    341     /** \brief Pipeline flags. See VAProcPipelineParameterBuffer::pipeline_flags. */
    342     unsigned int        pipeline_flags;
    343     /** \brief Extra filter flags. See VAProcPipelineParameterBuffer::filter_flags. */
    344     unsigned int        filter_flags;
    345     /** \brief Number of forward reference frames that are needed. */
    346     unsigned int        num_forward_references;
    347     /** \brief Number of backward reference frames that are needed. */
    348     unsigned int        num_backward_references;
    349     /** \brief List of color standards supported on input. */
    350     VAProcColorStandardType *input_color_standards;
    351     /** \brief Number of elements in \ref input_color_standards array. */
    352     unsigned int        num_input_color_standards;
    353     /** \brief List of color standards supported on output. */
    354     VAProcColorStandardType *output_color_standards;
    355     /** \brief Number of elements in \ref output_color_standards array. */
    356     unsigned int        num_output_color_standards;
    357 } VAProcPipelineCaps;
    358 
    359 /** \brief Specification of values supported by the filter. */
    360 typedef struct _VAProcFilterValueRange {
    361     /** \brief Minimum value supported, inclusive. */
    362     float               min_value;
    363     /** \brief Maximum value supported, inclusive. */
    364     float               max_value;
    365     /** \brief Default value. */
    366     float               default_value;
    367     /** \brief Step value that alters the filter behaviour in a sensible way. */
    368     float               step;
    369 } VAProcFilterValueRange;
    370 
    371 /**
    372  * \brief Video processing pipeline configuration.
    373  *
    374  * This buffer defines a video processing pipeline. As for any buffer
    375  * passed to \c vaRenderPicture(), this is a one-time usage model.
    376  * However, the actual filters to be applied are provided in the
    377  * \c filters field, so they can be re-used in other processing
    378  * pipelines.
    379  *
    380  * The target surface is specified by the \c render_target argument of
    381  * \c vaBeginPicture(). The general usage model is described as follows:
    382  * - \c vaBeginPicture(): specify the target surface that receives the
    383  *   processed output;
    384  * - \c vaRenderPicture(): specify a surface to be processed and composed
    385  *   into the \c render_target. Use as many \c vaRenderPicture() calls as
    386  *   necessary surfaces to compose ;
    387  * - \c vaEndPicture(): tell the driver to start processing the surfaces
    388  *   with the requested filters.
    389  *
    390  * If a filter (e.g. noise reduction) needs to be applied with different
    391  * values for multiple surfaces, the application needs to create as many
    392  * filter parameter buffers as necessary. i.e. the filter parameters shall
    393  * not change between two calls to \c vaRenderPicture().
    394  *
    395  * For composition usage models, the first surface to process will generally
    396  * use an opaque background color, i.e. \c output_background_color set with
    397  * the most significant byte set to \c 0xff. For instance, \c 0xff000000 for
    398  * a black background. Then, subsequent surfaces would use a transparent
    399  * background color.
    400  */
    401 typedef struct _VAProcPipelineParameterBuffer {
    402     /**
    403      * \brief Source surface ID.
    404      *
    405      * ID of the source surface to process. If subpictures are associated
    406      * with the video surfaces then they shall be rendered to the target
    407      * surface, if the #VA_PROC_PIPELINE_SUBPICTURES pipeline flag is set.
    408      */
    409     VASurfaceID         surface;
    410     /**
    411      * \brief Region within the source surface to be processed.
    412      *
    413      * Pointer to a #VARectangle defining the region within the source
    414      * surface to be processed. If NULL, \c surface_region implies the
    415      * whole surface.
    416      */
    417     const VARectangle  *surface_region;
    418     /**
    419      * \brief Requested input color primaries.
    420      *
    421      * Color primaries are implicitly converted throughout the processing
    422      * pipeline. The video processor chooses the best moment to apply
    423      * this conversion. The set of supported color primaries primaries
    424      * for input shall be queried with vaQueryVideoProcPipelineCaps().
    425      */
    426     VAProcColorStandardType surface_color_standard;
    427     /**
    428      * \brief Region within the output surface.
    429      *
    430      * Pointer to a #VARectangle defining the region within the output
    431      * surface that receives the processed pixels. If NULL, \c output_region
    432      * implies the whole surface.
    433      *
    434      * Note that any pixels residing outside the specified region will
    435      * be filled in with the \ref output_background_color.
    436      */
    437     const VARectangle  *output_region;
    438     /**
    439      * \brief Background color.
    440      *
    441      * Background color used to fill in pixels that reside outside of the
    442      * specified \ref output_region. The color is specified in ARGB format:
    443      * [31:24] alpha, [23:16] red, [15:8] green, [7:0] blue.
    444      *
    445      * Unless the alpha value is zero or the \ref output_region represents
    446      * the whole target surface size, implementations shall not render the
    447      * source surface to the target surface directly. Rather, in order to
    448      * maintain the exact semantics of \ref output_background_color, the
    449      * driver shall use a temporary surface and fill it in with the
    450      * appropriate background color. Next, the driver will blend this
    451      * temporary surface into the target surface.
    452      */
    453     unsigned int        output_background_color;
    454     /**
    455      * \brief Requested output color primaries.
    456      */
    457     VAProcColorStandardType output_color_standard;
    458     /**
    459      * \brief Pipeline filters. See video pipeline flags.
    460      *
    461      * Flags to control the pipeline, like whether to apply subpictures
    462      * or not, notify the driver that it can opt for power optimizations,
    463      * should this be needed.
    464      */
    465     unsigned int        pipeline_flags;
    466     /**
    467      * \brief Extra filter flags. See vaPutSurface() flags.
    468      *
    469      * Filter flags are used as a fast path, wherever possible, to use
    470      * vaPutSurface() flags instead of explicit filter parameter buffers.
    471      *
    472      * Allowed filter flags API-wise. Use vaQueryVideoProcPipelineCaps()
    473      * to check for implementation details:
    474      * - Bob-deinterlacing: \c VA_FRAME_PICTURE, \c VA_TOP_FIELD,
    475      *   \c VA_BOTTOM_FIELD. Note that any deinterlacing filter
    476      *   (#VAProcFilterDeinterlacing) will override those flags.
    477      * - Color space conversion: \c VA_SRC_BT601, \c VA_SRC_BT709,
    478      *   \c VA_SRC_SMPTE_240.
    479      * - Scaling: \c VA_FILTER_SCALING_DEFAULT, \c VA_FILTER_SCALING_FAST,
    480      *   \c VA_FILTER_SCALING_HQ, \c VA_FILTER_SCALING_NL_ANAMORPHIC.
    481      */
    482     unsigned int        filter_flags;
    483     /**
    484      * \brief Array of filters to apply to the surface.
    485      *
    486      * The list of filters shall be ordered in the same way the driver expects
    487      * them. i.e. as was returned from vaQueryVideoProcFilters().
    488      * Otherwise, a #VA_STATUS_ERROR_INVALID_FILTER_CHAIN is returned
    489      * from vaRenderPicture() with this buffer.
    490      *
    491      * #VA_STATUS_ERROR_UNSUPPORTED_FILTER is returned if the list
    492      * contains an unsupported filter.
    493      *
    494      * Note: no filter buffer is destroyed after a call to vaRenderPicture(),
    495      * only this pipeline buffer will be destroyed as per the core API
    496      * specification. This allows for flexibility in re-using the filter for
    497      * other surfaces to be processed.
    498      */
    499     VABufferID         *filters;
    500     /** \brief Actual number of filters. */
    501     unsigned int        num_filters;
    502     /** \brief Array of forward reference frames. */
    503     VASurfaceID        *forward_references;
    504     /** \brief Number of forward reference frames that were supplied. */
    505     unsigned int        num_forward_references;
    506     /** \brief Array of backward reference frames. */
    507     VASurfaceID        *backward_references;
    508     /** \brief Number of backward reference frames that were supplied. */
    509     unsigned int        num_backward_references;
    510 } VAProcPipelineParameterBuffer;
    511 
    512 /**
    513  * \brief Filter parameter buffer base.
    514  *
    515  * This is a helper structure used by driver implementations only.
    516  * Users are not supposed to allocate filter parameter buffers of this
    517  * type.
    518  */
    519 typedef struct _VAProcFilterParameterBufferBase {
    520     /** \brief Filter type. */
    521     VAProcFilterType    type;
    522 } VAProcFilterParameterBufferBase;
    523 
    524 /**
    525  * \brief Default filter parametrization.
    526  *
    527  * Unless there is a filter-specific parameter buffer,
    528  * #VAProcFilterParameterBuffer is the default type to use.
    529  */
    530 typedef struct _VAProcFilterParameterBuffer {
    531     /** \brief Filter type. */
    532     VAProcFilterType    type;
    533     /** \brief Value. */
    534     float               value;
    535 } VAProcFilterParameterBuffer;
    536 
    537 /** @name De-interlacing flags */
    538 /**@{*/
    539 /**
    540  * \brief Bottom field first in the input frame.
    541  * if this is not set then assumes top field first.
    542  */
    543 #define VA_DEINTERLACING_BOTTOM_FIELD_FIRST	0x0001
    544 /**
    545  * \brief Bottom field used in deinterlacing.
    546  * if this is not set then assumes top field is used.
    547  */
    548 #define VA_DEINTERLACING_BOTTOM_FIELD		0x0002
    549 /**
    550  * \brief A single field is stored in the input frame.
    551  * if this is not set then assumes the frame contains two interleaved fields.
    552  */
    553 #define VA_DEINTERLACING_ONE_FIELD		0x0004
    554 /**@}*/
    555 
    556 /** \brief Deinterlacing filter parametrization. */
    557 typedef struct _VAProcFilterParameterBufferDeinterlacing {
    558     /** \brief Filter type. Shall be set to #VAProcFilterDeinterlacing. */
    559     VAProcFilterType            type;
    560     /** \brief Deinterlacing algorithm. */
    561     VAProcDeinterlacingType     algorithm;
    562     /** \brief Deinterlacing flags. */
    563     unsigned int     		flags;
    564 } VAProcFilterParameterBufferDeinterlacing;
    565 
    566 /**
    567  * \brief Color balance filter parametrization.
    568  *
    569  * This buffer defines color balance attributes. A VA buffer can hold
    570  * several color balance attributes by creating a VA buffer of desired
    571  * number of elements. This can be achieved by the following pseudo-code:
    572  *
    573  * \code
    574  * enum { kHue, kSaturation, kBrightness, kContrast };
    575  *
    576  * // Initial color balance parameters
    577  * static const VAProcFilterParameterBufferColorBalance colorBalanceParams[4] =
    578  * {
    579  *     [kHue] =
    580  *         { VAProcFilterColorBalance, VAProcColorBalanceHue, 0.5 },
    581  *     [kSaturation] =
    582  *         { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 },
    583  *     [kBrightness] =
    584  *         { VAProcFilterColorBalance, VAProcColorBalanceBrightness, 0.5 },
    585  *     [kSaturation] =
    586  *         { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 }
    587  * };
    588  *
    589  * // Create buffer
    590  * VABufferID colorBalanceBuffer;
    591  * vaCreateBuffer(va_dpy, vpp_ctx,
    592  *     VAProcFilterParameterBufferType, sizeof(*pColorBalanceParam), 4,
    593  *     colorBalanceParams,
    594  *     &colorBalanceBuffer
    595  * );
    596  *
    597  * VAProcFilterParameterBufferColorBalance *pColorBalanceParam;
    598  * vaMapBuffer(va_dpy, colorBalanceBuffer, &pColorBalanceParam);
    599  * {
    600  *     // Change brightness only
    601  *     pColorBalanceBuffer[kBrightness].value = 0.75;
    602  * }
    603  * vaUnmapBuffer(va_dpy, colorBalanceBuffer);
    604  * \endcode
    605  */
    606 typedef struct _VAProcFilterParameterBufferColorBalance {
    607     /** \brief Filter type. Shall be set to #VAProcFilterColorBalance. */
    608     VAProcFilterType            type;
    609     /** \brief Color balance attribute. */
    610     VAProcColorBalanceType      attrib;
    611     /**
    612      * \brief Color balance value.
    613      *
    614      * Special case for automatically adjusted attributes. e.g.
    615      * #VAProcColorBalanceAutoSaturation,
    616      * #VAProcColorBalanceAutoBrightness,
    617      * #VAProcColorBalanceAutoContrast.
    618      * - If \ref value is \c 1.0 +/- \c FLT_EPSILON, the attribute is
    619      *   automatically adjusted and overrides any other attribute of
    620      *   the same type that would have been set explicitly;
    621      * - If \ref value is \c 0.0 +/- \c FLT_EPSILON, the attribute is
    622      *   disabled and other attribute of the same type is used instead.
    623      */
    624     float                       value;
    625 } VAProcFilterParameterBufferColorBalance;
    626 
    627 /**
    628  * \brief Default filter cap specification (single range value).
    629  *
    630  * Unless there is a filter-specific cap structure, #VAProcFilterCap is the
    631  * default type to use for output caps from vaQueryVideoProcFilterCaps().
    632  */
    633 typedef struct _VAProcFilterCap {
    634     /** \brief Range of supported values for the filter. */
    635     VAProcFilterValueRange      range;
    636 } VAProcFilterCap;
    637 
    638 /** \brief Capabilities specification for the deinterlacing filter. */
    639 typedef struct _VAProcFilterCapDeinterlacing {
    640     /** \brief Deinterlacing algorithm. */
    641     VAProcDeinterlacingType     type;
    642 } VAProcFilterCapDeinterlacing;
    643 
    644 /** \brief Capabilities specification for the color balance filter. */
    645 typedef struct _VAProcFilterCapColorBalance {
    646     /** \brief Color balance operation. */
    647     VAProcColorBalanceType      type;
    648     /** \brief Range of supported values for the specified operation. */
    649     VAProcFilterValueRange      range;
    650 } VAProcFilterCapColorBalance;
    651 
    652 /**
    653  * \brief Queries video processing filters.
    654  *
    655  * This function returns the list of video processing filters supported
    656  * by the driver. The \c filters array is allocated by the user and
    657  * \c num_filters shall be initialized to the number of allocated
    658  * elements in that array. Upon successful return, the actual number
    659  * of filters will be overwritten into \c num_filters. Otherwise,
    660  * \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and \c num_filters
    661  * is adjusted to the number of elements that would be returned if enough
    662  * space was available.
    663  *
    664  * The list of video processing filters supported by the driver shall
    665  * be ordered in the way they can be iteratively applied. This is needed
    666  * for both correctness, i.e. some filters would not mean anything if
    667  * applied at the beginning of the pipeline; but also for performance
    668  * since some filters can be applied in a single pass (e.g. noise
    669  * reduction + deinterlacing).
    670  *
    671  * @param[in] dpy               the VA display
    672  * @param[in] context           the video processing context
    673  * @param[out] filters          the output array of #VAProcFilterType elements
    674  * @param[in,out] num_filters the number of elements allocated on input,
    675  *      the number of elements actually filled in on output
    676  */
    677 VAStatus
    678 vaQueryVideoProcFilters(
    679     VADisplay           dpy,
    680     VAContextID         context,
    681     VAProcFilterType   *filters,
    682     unsigned int       *num_filters
    683 );
    684 
    685 /**
    686  * \brief Queries video filter capabilities.
    687  *
    688  * This function returns the list of capabilities supported by the driver
    689  * for a specific video filter. The \c filter_caps array is allocated by
    690  * the user and \c num_filter_caps shall be initialized to the number
    691  * of allocated elements in that array. Upon successful return, the
    692  * actual number of filters will be overwritten into \c num_filter_caps.
    693  * Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and
    694  * \c num_filter_caps is adjusted to the number of elements that would be
    695  * returned if enough space was available.
    696  *
    697  * @param[in] dpy               the VA display
    698  * @param[in] context           the video processing context
    699  * @param[in] type              the video filter type
    700  * @param[out] filter_caps      the output array of #VAProcFilterCap elements
    701  * @param[in,out] num_filter_caps the number of elements allocated on input,
    702  *      the number of elements actually filled in output
    703  */
    704 VAStatus
    705 vaQueryVideoProcFilterCaps(
    706     VADisplay           dpy,
    707     VAContextID         context,
    708     VAProcFilterType    type,
    709     void               *filter_caps,
    710     unsigned int       *num_filter_caps
    711 );
    712 
    713 /**
    714  * \brief Queries video processing pipeline capabilities.
    715  *
    716  * This function returns the video processing pipeline capabilities. The
    717  * \c filters array defines the video processing pipeline and is an array
    718  * of buffers holding filter parameters.
    719  *
    720  * Note: the #VAProcPipelineCaps structure contains user-provided arrays.
    721  * If non-NULL, the corresponding \c num_* fields shall be filled in on
    722  * input with the number of elements allocated. Upon successful return,
    723  * the actual number of elements will be overwritten into the \c num_*
    724  * fields. Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned
    725  * and \c num_* fields are adjusted to the number of elements that would
    726  * be returned if enough space was available.
    727  *
    728  * @param[in] dpy               the VA display
    729  * @param[in] context           the video processing context
    730  * @param[in] filters           the array of VA buffers defining the video
    731  *      processing pipeline
    732  * @param[in] num_filters       the number of elements in filters
    733  * @param[in,out] pipeline_caps the video processing pipeline capabilities
    734  */
    735 VAStatus
    736 vaQueryVideoProcPipelineCaps(
    737     VADisplay           dpy,
    738     VAContextID         context,
    739     VABufferID         *filters,
    740     unsigned int        num_filters,
    741     VAProcPipelineCaps *pipeline_caps
    742 );
    743 
    744 /**@}*/
    745 
    746 #ifdef __cplusplus
    747 }
    748 #endif
    749 
    750 #endif /* VA_VPP_H */
    751