Home | History | Annotate | Download | only in va
      1 /*
      2  * Copyright (c) 2007-2011 Intel Corporation. All Rights Reserved.
      3  *
      4  * Permission is hereby granted, free of charge, to any person obtaining a
      5  * copy of this software and associated documentation files (the
      6  * "Software"), to deal in the Software without restriction, including
      7  * without limitation the rights to use, copy, modify, merge, publish,
      8  * distribute, sub license, and/or sell copies of the Software, and to
      9  * permit persons to whom the Software is furnished to do so, subject to
     10  * the following conditions:
     11  *
     12  * The above copyright notice and this permission notice (including the
     13  * next paragraph) shall be included in all copies or substantial portions
     14  * of the Software.
     15  *
     16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
     17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
     18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
     19  * IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
     20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
     21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
     22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     23  */
     24 
     25 /**
     26  * \file va_vpp.h
     27  * \brief The video processing API
     28  *
     29  * This file contains the \ref api_vpp "Video processing API".
     30  */
     31 
     32 #ifndef VA_VPP_H
     33 #define VA_VPP_H
     34 
     35 #ifdef __cplusplus
     36 extern "C" {
     37 #endif
     38 
     39 /**
     40  * \defgroup api_vpp Video processing API
     41  *
     42  * @{
     43  *
     44  * The video processing API uses the same paradigm as for decoding:
     45  * - Query for supported filters;
     46  * - Set up a video processing pipeline;
     47  * - Send video processing parameters through VA buffers.
     48  *
     49  * \section api_vpp_caps Query for supported filters
     50  *
     51  * Checking whether video processing is supported can be performed
     52  * with vaQueryConfigEntrypoints() and the profile argument set to
     53  * #VAProfileNone. If video processing is supported, then the list of
     54  * returned entry-points will include #VAEntrypointVideoProc.
     55  *
     56  * \code
     57  * VAEntrypoint *entrypoints;
     58  * int i, num_entrypoints, supportsVideoProcessing = 0;
     59  *
     60  * num_entrypoints = vaMaxNumEntrypoints();
     61  * entrypoints = malloc(num_entrypoints * sizeof(entrypoints[0]);
     62  * vaQueryConfigEntrypoints(va_dpy, VAProfileNone,
     63  *     entrypoints, &num_entrypoints);
     64  *
     65  * for (i = 0; !supportsVideoProcessing && i < num_entrypoints; i++) {
     66  *     if (entrypoints[i] == VAEntrypointVideoProc)
     67  *         supportsVideoProcessing = 1;
     68  * }
     69  * \endcode
     70  *
     71  * Then, the vaQueryVideoProcFilters() function is used to query the
     72  * list of video processing filters.
     73  *
     74  * \code
     75  * VAProcFilterType filters[VAProcFilterCount];
     76  * unsigned int num_filters = VAProcFilterCount;
     77  *
     78  * // num_filters shall be initialized to the length of the array
     79  * vaQueryVideoProcFilters(va_dpy, vpp_ctx, &filters, &num_filters);
     80  * \endcode
     81  *
     82  * Finally, individual filter capabilities can be checked with
     83  * vaQueryVideoProcFilterCaps().
     84  *
     85  * \code
     86  * VAProcFilterCap denoise_caps;
     87  * unsigned int num_denoise_caps = 1;
     88  * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
     89  *     VAProcFilterNoiseReduction,
     90  *     &denoise_caps, &num_denoise_caps
     91  * );
     92  *
     93  * VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount];
     94  * unsigned int num_deinterlacing_caps = VAProcDeinterlacingCount;
     95  * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
     96  *     VAProcFilterDeinterlacing,
     97  *     &deinterlacing_caps, &num_deinterlacing_caps
     98  * );
     99  * \endcode
    100  *
    101  * \section api_vpp_setup Set up a video processing pipeline
    102  *
    103  * A video processing pipeline buffer is created for each source
    104  * surface we want to process. However, buffers holding filter
    105  * parameters can be created once and for all. Rationale is to avoid
    106  * multiple creation/destruction chains of filter buffers and also
    107  * because filter parameters generally won't change frame after
    108  * frame. e.g. this makes it possible to implement a checkerboard of
    109  * videos where the same filters are applied to each video source.
    110  *
    111  * The general control flow is demonstrated by the following pseudo-code:
    112  * \code
    113  * // Create filters
    114  * VABufferID denoise_filter, deint_filter;
    115  * VABufferID filter_bufs[VAProcFilterCount];
    116  * unsigned int num_filter_bufs;
    117  *
    118  * for (i = 0; i < num_filters; i++) {
    119  *     switch (filters[i]) {
    120  *     case VAProcFilterNoiseReduction: {       // Noise reduction filter
    121  *         VAProcFilterParameterBuffer denoise;
    122  *         denoise.type  = VAProcFilterNoiseReduction;
    123  *         denoise.value = 0.5;
    124  *         vaCreateBuffer(va_dpy, vpp_ctx,
    125  *             VAProcFilterParameterBufferType, sizeof(denoise), 1,
    126  *             &denoise, &denoise_filter
    127  *         );
    128  *         filter_bufs[num_filter_bufs++] = denoise_filter;
    129  *         break;
    130  *     }
    131  *
    132  *     case VAProcFilterDeinterlacing:          // Motion-adaptive deinterlacing
    133  *         for (j = 0; j < num_deinterlacing_caps; j++) {
    134  *             VAProcFilterCapDeinterlacing * const cap = &deinterlacing_caps[j];
    135  *             if (cap->type != VAProcDeinterlacingMotionAdaptive)
    136  *                 continue;
    137  *
    138  *             VAProcFilterParameterBufferDeinterlacing deint;
    139  *             deint.type                   = VAProcFilterDeinterlacing;
    140  *             deint.algorithm              = VAProcDeinterlacingMotionAdaptive;
    141  *             vaCreateBuffer(va_dpy, vpp_ctx,
    142  *                 VAProcFilterParameterBufferType, sizeof(deint), 1,
    143  *                 &deint, &deint_filter
    144  *             );
    145  *             filter_bufs[num_filter_bufs++] = deint_filter;
    146  *         }
    147  *     }
    148  * }
    149  * \endcode
    150  *
    151  * Once the video processing pipeline is set up, the caller shall check the
    152  * implied capabilities and requirements with vaQueryVideoProcPipelineCaps().
    153  * This function can be used to validate the number of reference frames are
    154  * needed by the specified deinterlacing algorithm, the supported color
    155  * primaries, etc.
    156  * \code
    157  * // Create filters
    158  * VAProcPipelineCaps pipeline_caps;
    159  * VASurfaceID *forward_references;
    160  * unsigned int num_forward_references;
    161  * VASurfaceID *backward_references;
    162  * unsigned int num_backward_references;
    163  * VAProcColorStandardType in_color_standards[VAProcColorStandardCount];
    164  * VAProcColorStandardType out_color_standards[VAProcColorStandardCount];
    165  *
    166  * pipeline_caps.input_color_standards      = NULL;
    167  * pipeline_caps.num_input_color_standards  = ARRAY_ELEMS(in_color_standards);
    168  * pipeline_caps.output_color_standards     = NULL;
    169  * pipeline_caps.num_output_color_standards = ARRAY_ELEMS(out_color_standards);
    170  * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
    171  *     filter_bufs, num_filter_bufs,
    172  *     &pipeline_caps
    173  * );
    174  *
    175  * num_forward_references  = pipeline_caps.num_forward_references;
    176  * forward_references      =
    177  *     malloc(num__forward_references * sizeof(VASurfaceID));
    178  * num_backward_references = pipeline_caps.num_backward_references;
    179  * backward_references     =
    180  *     malloc(num_backward_references * sizeof(VASurfaceID));
    181  * \endcode
    182  *
    183  * \section api_vpp_submit Send video processing parameters through VA buffers
    184  *
    185  * Video processing pipeline parameters are submitted for each source
    186  * surface to process. Video filter parameters can also change, per-surface.
    187  * e.g. the list of reference frames used for deinterlacing.
    188  *
    189  * \code
    190  * foreach (iteration) {
    191  *     vaBeginPicture(va_dpy, vpp_ctx, vpp_surface);
    192  *     foreach (surface) {
    193  *         VARectangle output_region;
    194  *         VABufferID pipeline_buf;
    195  *         VAProcPipelineParameterBuffer *pipeline_param;
    196  *
    197  *         vaCreateBuffer(va_dpy, vpp_ctx,
    198  *             VAProcPipelineParameterBuffer, sizeof(*pipeline_param), 1,
    199  *             NULL, &pipeline_buf
    200  *         );
    201  *
    202  *         // Setup output region for this surface
    203  *         // e.g. upper left corner for the first surface
    204  *         output_region.x     = BORDER;
    205  *         output_region.y     = BORDER;
    206  *         output_region.width =
    207  *             (vpp_surface_width - (Nx_surfaces + 1) * BORDER) / Nx_surfaces;
    208  *         output_region.height =
    209  *             (vpp_surface_height - (Ny_surfaces + 1) * BORDER) / Ny_surfaces;
    210  *
    211  *         vaMapBuffer(va_dpy, pipeline_buf, &pipeline_param);
    212  *         pipeline_param->surface              = surface;
    213  *         pipeline_param->surface_region       = NULL;
    214  *         pipeline_param->output_region        = &output_region;
    215  *         pipeline_param->output_background_color = 0;
    216  *         if (first surface to render)
    217  *             pipeline_param->output_background_color = 0xff000000; // black
    218  *         pipeline_param->filter_flags         = VA_FILTER_SCALING_HQ;
    219  *         pipeline_param->filters              = filter_bufs;
    220  *         pipeline_param->num_filters          = num_filter_bufs;
    221  *         vaUnmapBuffer(va_dpy, pipeline_buf);
    222  *
    223  *         // Update reference frames for deinterlacing, if necessary
    224  *         pipeline_param->forward_references      = forward_references;
    225  *         pipeline_param->num_forward_references  = num_forward_references_used;
    226  *         pipeline_param->backward_references     = backward_references;
    227  *         pipeline_param->num_backward_references = num_bacward_references_used;
    228  *
    229  *         // Apply filters
    230  *         vaRenderPicture(va_dpy, vpp_ctx, &pipeline_buf, 1);
    231  *     }
    232  *     vaEndPicture(va_dpy, vpp_ctx);
    233  * }
    234  * \endcode
    235  */
    236 
    237 /** \brief Video filter types. */
    238 typedef enum _VAProcFilterType {
    239     VAProcFilterNone = 0,
    240     /** \brief Noise reduction filter. */
    241     VAProcFilterNoiseReduction,
    242     /** \brief Deinterlacing filter. */
    243     VAProcFilterDeinterlacing,
    244     /** \brief Sharpening filter. */
    245     VAProcFilterSharpening,
    246     /** \brief Color balance parameters. */
    247     VAProcFilterColorBalance,
    248     VAProcFilterCount
    249 } VAProcFilterType;
    250 
    251 /** \brief Deinterlacing types. */
    252 typedef enum _VAProcDeinterlacingType {
    253     VAProcDeinterlacingNone = 0,
    254     /** \brief Bob deinterlacing algorithm. */
    255     VAProcDeinterlacingBob,
    256     /** \brief Weave deinterlacing algorithm. */
    257     VAProcDeinterlacingWeave,
    258     /** \brief Motion adaptive deinterlacing algorithm. */
    259     VAProcDeinterlacingMotionAdaptive,
    260     /** \brief Motion compensated deinterlacing algorithm. */
    261     VAProcDeinterlacingMotionCompensated,
    262     /** \brief Number of deinterlacing algorithms. */
    263     VAProcDeinterlacingCount
    264 } VAProcDeinterlacingType;
    265 
    266 /** \brief Color balance types. */
    267 typedef enum _VAProcColorBalanceType {
    268     VAProcColorBalanceNone = 0,
    269     /** \brief Hue. */
    270     VAProcColorBalanceHue,
    271     /** \brief Saturation. */
    272     VAProcColorBalanceSaturation,
    273     /** \brief Brightness. */
    274     VAProcColorBalanceBrightness,
    275     /** \brief Contrast. */
    276     VAProcColorBalanceContrast,
    277     /** \brief Automatically adjusted saturation. */
    278     VAProcColorBalanceAutoSaturation,
    279     /** \brief Automatically adjusted brightness. */
    280     VAProcColorBalanceAutoBrightness,
    281     /** \brief Automatically adjusted contrast. */
    282     VAProcColorBalanceAutoContrast,
    283     /** \brief Number of color balance attributes. */
    284     VAProcColorBalanceCount
    285 } VAProcColorBalanceType;
    286 
    287 /** \brief Color standard types. */
    288 typedef enum _VAProcColorStandardType {
    289     VAProcColorStandardNone = 0,
    290     /** \brief ITU-R BT.601. */
    291     VAProcColorStandardBT601,
    292     /** \brief ITU-R BT.709. */
    293     VAProcColorStandardBT709,
    294     /** \brief ITU-R BT.470-2 System M. */
    295     VAProcColorStandardBT470M,
    296     /** \brief ITU-R BT.470-2 System B, G. */
    297     VAProcColorStandardBT470BG,
    298     /** \brief SMPTE-170M. */
    299     VAProcColorStandardSMPTE170M,
    300     /** \brief SMPTE-240M. */
    301     VAProcColorStandardSMPTE240M,
    302     /** \brief Generic film. */
    303     VAProcColorStandardGenericFilm,
    304     /** \brief Number of color standards. */
    305     VAProcColorStandardCount
    306 } VAProcColorStandardType;
    307 
    308 /** @name Video pipeline flags */
    309 /**@{*/
    310 /** \brief Specifies whether to apply subpictures when processing a surface. */
    311 #define VA_PROC_PIPELINE_SUBPICTURES    0x00000001
    312 /**
    313  * \brief Specifies whether to apply power or performance
    314  * optimizations to a pipeline.
    315  *
    316  * When processing several surfaces, it may be necessary to prioritize
    317  * more certain pipelines than others. This flag is only a hint to the
    318  * video processor so that it can omit certain filters to save power
    319  * for example. Typically, this flag could be used with video surfaces
    320  * decoded from a secondary bitstream.
    321  */
    322 #define VA_PROC_PIPELINE_FAST           0x00000002
    323 /**@}*/
    324 
    325 /** @name Video filter flags */
    326 /**@{*/
    327 /** \brief Specifies whether the filter shall be present in the pipeline. */
    328 #define VA_PROC_FILTER_MANDATORY        0x00000001
    329 /**@}*/
    330 
    331 /** @name Pipeline end flags */
    332 /**@{*/
    333 /** \brief Specifies the pipeline is the last. */
    334 #define VA_PIPELINE_FLAG_END		0x00000004
    335 /**@}*/
    336 
    337 /** \brief Video processing pipeline capabilities. */
    338 typedef struct _VAProcPipelineCaps {
    339     /** \brief Pipeline flags. See VAProcPipelineParameterBuffer::pipeline_flags. */
    340     unsigned int        pipeline_flags;
    341     /** \brief Extra filter flags. See VAProcPipelineParameterBuffer::filter_flags. */
    342     unsigned int        filter_flags;
    343     /** \brief Number of forward reference frames that are needed. */
    344     unsigned int        num_forward_references;
    345     /** \brief Number of backward reference frames that are needed. */
    346     unsigned int        num_backward_references;
    347     /** \brief List of color standards supported on input. */
    348     VAProcColorStandardType *input_color_standards;
    349     /** \brief Number of elements in \ref input_color_standards array. */
    350     unsigned int        num_input_color_standards;
    351     /** \brief List of color standards supported on output. */
    352     VAProcColorStandardType *output_color_standards;
    353     /** \brief Number of elements in \ref output_color_standards array. */
    354     unsigned int        num_output_color_standards;
    355 } VAProcPipelineCaps;
    356 
    357 /** \brief Specification of values supported by the filter. */
    358 typedef struct _VAProcFilterValueRange {
    359     /** \brief Minimum value supported, inclusive. */
    360     float               min_value;
    361     /** \brief Maximum value supported, inclusive. */
    362     float               max_value;
    363     /** \brief Default value. */
    364     float               default_value;
    365     /** \brief Step value that alters the filter behaviour in a sensible way. */
    366     float               step;
    367 } VAProcFilterValueRange;
    368 
    369 /**
    370  * \brief Video processing pipeline configuration.
    371  *
    372  * This buffer defines a video processing pipeline. As for any buffer
    373  * passed to \c vaRenderPicture(), this is a one-time usage model.
    374  * However, the actual filters to be applied are provided in the
    375  * \c filters field, so they can be re-used in other processing
    376  * pipelines.
    377  *
    378  * The target surface is specified by the \c render_target argument of
    379  * \c vaBeginPicture(). The general usage model is described as follows:
    380  * - \c vaBeginPicture(): specify the target surface that receives the
    381  *   processed output;
    382  * - \c vaRenderPicture(): specify a surface to be processed and composed
    383  *   into the \c render_target. Use as many \c vaRenderPicture() calls as
    384  *   necessary surfaces to compose ;
    385  * - \c vaEndPicture(): tell the driver to start processing the surfaces
    386  *   with the requested filters.
    387  *
    388  * If a filter (e.g. noise reduction) needs to be applied with different
    389  * values for multiple surfaces, the application needs to create as many
    390  * filter parameter buffers as necessary. i.e. the filter parameters shall
    391  * not change between two calls to \c vaRenderPicture().
    392  *
    393  * For composition usage models, the first surface to process will generally
    394  * use an opaque background color, i.e. \c output_background_color set with
    395  * the most significant byte set to \c 0xff. For instance, \c 0xff000000 for
    396  * a black background. Then, subsequent surfaces would use a transparent
    397  * background color.
    398  */
    399 typedef struct _VAProcPipelineParameterBuffer {
    400     /**
    401      * \brief Source surface ID.
    402      *
    403      * ID of the source surface to process. If subpictures are associated
    404      * with the video surfaces then they shall be rendered to the target
    405      * surface, if the #VA_PROC_PIPELINE_SUBPICTURES pipeline flag is set.
    406      */
    407     VASurfaceID         surface;
    408     /**
    409      * \brief Region within the source surface to be processed.
    410      *
    411      * Pointer to a #VARectangle defining the region within the source
    412      * surface to be processed. If NULL, \c surface_region implies the
    413      * whole surface.
    414      */
    415     const VARectangle  *surface_region;
    416     /**
    417      * \brief Requested input color primaries.
    418      *
    419      * Color primaries are implicitly converted throughout the processing
    420      * pipeline. The video processor chooses the best moment to apply
    421      * this conversion. The set of supported color primaries primaries
    422      * for input shall be queried with vaQueryVideoProcPipelineCaps().
    423      */
    424     VAProcColorStandardType surface_color_standard;
    425     /**
    426      * \brief Region within the output surface.
    427      *
    428      * Pointer to a #VARectangle defining the region within the output
    429      * surface that receives the processed pixels. If NULL, \c output_region
    430      * implies the whole surface.
    431      *
    432      * Note that any pixels residing outside the specified region will
    433      * be filled in with the \ref output_background_color.
    434      */
    435     const VARectangle  *output_region;
    436     /**
    437      * \brief Background color.
    438      *
    439      * Background color used to fill in pixels that reside outside of the
    440      * specified \ref output_region. The color is specified in ARGB format:
    441      * [31:24] alpha, [23:16] red, [15:8] green, [7:0] blue.
    442      *
    443      * Unless the alpha value is zero or the \ref output_region represents
    444      * the whole target surface size, implementations shall not render the
    445      * source surface to the target surface directly. Rather, in order to
    446      * maintain the exact semantics of \ref output_background_color, the
    447      * driver shall use a temporary surface and fill it in with the
    448      * appropriate background color. Next, the driver will blend this
    449      * temporary surface into the target surface.
    450      */
    451     unsigned int        output_background_color;
    452     /**
    453      * \brief Requested output color primaries.
    454      */
    455     VAProcColorStandardType output_color_standard;
    456     /**
    457      * \brief Pipeline filters. See video pipeline flags.
    458      *
    459      * Flags to control the pipeline, like whether to apply subpictures
    460      * or not, notify the driver that it can opt for power optimizations,
    461      * should this be needed.
    462      */
    463     unsigned int        pipeline_flags;
    464     /**
    465      * \brief Extra filter flags. See vaPutSurface() flags.
    466      *
    467      * Filter flags are used as a fast path, wherever possible, to use
    468      * vaPutSurface() flags instead of explicit filter parameter buffers.
    469      *
    470      * Allowed filter flags API-wise. Use vaQueryVideoProcPipelineCaps()
    471      * to check for implementation details:
    472      * - Bob-deinterlacing: \c VA_FRAME_PICTURE, \c VA_TOP_FIELD,
    473      *   \c VA_BOTTOM_FIELD. Note that any deinterlacing filter
    474      *   (#VAProcFilterDeinterlacing) will override those flags.
    475      * - Color space conversion: \c VA_SRC_BT601, \c VA_SRC_BT709,
    476      *   \c VA_SRC_SMPTE_240.
    477      * - Scaling: \c VA_FILTER_SCALING_DEFAULT, \c VA_FILTER_SCALING_FAST,
    478      *   \c VA_FILTER_SCALING_HQ, \c VA_FILTER_SCALING_NL_ANAMORPHIC.
    479      */
    480     unsigned int        filter_flags;
    481     /**
    482      * \brief Array of filters to apply to the surface.
    483      *
    484      * The list of filters shall be ordered in the same way the driver expects
    485      * them. i.e. as was returned from vaQueryVideoProcFilters().
    486      * Otherwise, a #VA_STATUS_ERROR_INVALID_FILTER_CHAIN is returned
    487      * from vaRenderPicture() with this buffer.
    488      *
    489      * #VA_STATUS_ERROR_UNSUPPORTED_FILTER is returned if the list
    490      * contains an unsupported filter.
    491      *
    492      * Note: no filter buffer is destroyed after a call to vaRenderPicture(),
    493      * only this pipeline buffer will be destroyed as per the core API
    494      * specification. This allows for flexibility in re-using the filter for
    495      * other surfaces to be processed.
    496      */
    497     VABufferID         *filters;
    498     /** \brief Actual number of filters. */
    499     unsigned int        num_filters;
    500     /** \brief Array of forward reference frames. */
    501     VASurfaceID        *forward_references;
    502     /** \brief Number of forward reference frames that were supplied. */
    503     unsigned int        num_forward_references;
    504     /** \brief Array of backward reference frames. */
    505     VASurfaceID        *backward_references;
    506     /** \brief Number of backward reference frames that were supplied. */
    507     unsigned int        num_backward_references;
    508 } VAProcPipelineParameterBuffer;
    509 
    510 /**
    511  * \brief Filter parameter buffer base.
    512  *
    513  * This is a helper structure used by driver implementations only.
    514  * Users are not supposed to allocate filter parameter buffers of this
    515  * type.
    516  */
    517 typedef struct _VAProcFilterParameterBufferBase {
    518     /** \brief Filter type. */
    519     VAProcFilterType    type;
    520 } VAProcFilterParameterBufferBase;
    521 
    522 /**
    523  * \brief Default filter parametrization.
    524  *
    525  * Unless there is a filter-specific parameter buffer,
    526  * #VAProcFilterParameterBuffer is the default type to use.
    527  */
    528 typedef struct _VAProcFilterParameterBuffer {
    529     /** \brief Filter type. */
    530     VAProcFilterType    type;
    531     /** \brief Value. */
    532     float               value;
    533 } VAProcFilterParameterBuffer;
    534 
    535 /** @name De-interlacing flags */
    536 /**@{*/
    537 /**
    538  * \brief Bottom field first in the input frame.
    539  * if this is not set then assumes top field first.
    540  */
    541 #define VA_DEINTERLACING_BOTTOM_FIELD_FIRST	0x0001
    542 /**
    543  * \brief Bottom field used in deinterlacing.
    544  * if this is not set then assumes top field is used.
    545  */
    546 #define VA_DEINTERLACING_BOTTOM_FIELD		0x0002
    547 /**
    548  * \brief A single field is stored in the input frame.
    549  * if this is not set then assumes the frame contains two interleaved fields.
    550  */
    551 #define VA_DEINTERLACING_ONE_FIELD		0x0004
    552 /**@}*/
    553 
    554 /** \brief Deinterlacing filter parametrization. */
    555 typedef struct _VAProcFilterParameterBufferDeinterlacing {
    556     /** \brief Filter type. Shall be set to #VAProcFilterDeinterlacing. */
    557     VAProcFilterType            type;
    558     /** \brief Deinterlacing algorithm. */
    559     VAProcDeinterlacingType     algorithm;
    560     /** \brief Deinterlacing flags. */
    561     unsigned int     		flags;
    562 } VAProcFilterParameterBufferDeinterlacing;
    563 
    564 /**
    565  * \brief Color balance filter parametrization.
    566  *
    567  * This buffer defines color balance attributes. A VA buffer can hold
    568  * several color balance attributes by creating a VA buffer of desired
    569  * number of elements. This can be achieved by the following pseudo-code:
    570  *
    571  * \code
    572  * enum { kHue, kSaturation, kBrightness, kContrast };
    573  *
    574  * // Initial color balance parameters
    575  * static const VAProcFilterParameterBufferColorBalance colorBalanceParams[4] =
    576  * {
    577  *     [kHue] =
    578  *         { VAProcFilterColorBalance, VAProcColorBalanceHue, 0.5 },
    579  *     [kSaturation] =
    580  *         { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 },
    581  *     [kBrightness] =
    582  *         { VAProcFilterColorBalance, VAProcColorBalanceBrightness, 0.5 },
    583  *     [kSaturation] =
    584  *         { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 }
    585  * };
    586  *
    587  * // Create buffer
    588  * VABufferID colorBalanceBuffer;
    589  * vaCreateBuffer(va_dpy, vpp_ctx,
    590  *     VAProcFilterParameterBufferType, sizeof(*pColorBalanceParam), 4,
    591  *     colorBalanceParams,
    592  *     &colorBalanceBuffer
    593  * );
    594  *
    595  * VAProcFilterParameterBufferColorBalance *pColorBalanceParam;
    596  * vaMapBuffer(va_dpy, colorBalanceBuffer, &pColorBalanceParam);
    597  * {
    598  *     // Change brightness only
    599  *     pColorBalanceBuffer[kBrightness].value = 0.75;
    600  * }
    601  * vaUnmapBuffer(va_dpy, colorBalanceBuffer);
    602  * \endcode
    603  */
    604 typedef struct _VAProcFilterParameterBufferColorBalance {
    605     /** \brief Filter type. Shall be set to #VAProcFilterColorBalance. */
    606     VAProcFilterType            type;
    607     /** \brief Color balance attribute. */
    608     VAProcColorBalanceType      attrib;
    609     /**
    610      * \brief Color balance value.
    611      *
    612      * Special case for automatically adjusted attributes. e.g.
    613      * #VAProcColorBalanceAutoSaturation,
    614      * #VAProcColorBalanceAutoBrightness,
    615      * #VAProcColorBalanceAutoContrast.
    616      * - If \ref value is \c 1.0 +/- \c FLT_EPSILON, the attribute is
    617      *   automatically adjusted and overrides any other attribute of
    618      *   the same type that would have been set explicitly;
    619      * - If \ref value is \c 0.0 +/- \c FLT_EPSILON, the attribute is
    620      *   disabled and other attribute of the same type is used instead.
    621      */
    622     float                       value;
    623 } VAProcFilterParameterBufferColorBalance;
    624 
    625 /**
    626  * \brief Default filter cap specification (single range value).
    627  *
    628  * Unless there is a filter-specific cap structure, #VAProcFilterCap is the
    629  * default type to use for output caps from vaQueryVideoProcFilterCaps().
    630  */
    631 typedef struct _VAProcFilterCap {
    632     /** \brief Range of supported values for the filter. */
    633     VAProcFilterValueRange      range;
    634 } VAProcFilterCap;
    635 
    636 /** \brief Capabilities specification for the deinterlacing filter. */
    637 typedef struct _VAProcFilterCapDeinterlacing {
    638     /** \brief Deinterlacing algorithm. */
    639     VAProcDeinterlacingType     type;
    640 } VAProcFilterCapDeinterlacing;
    641 
    642 /** \brief Capabilities specification for the color balance filter. */
    643 typedef struct _VAProcFilterCapColorBalance {
    644     /** \brief Color balance operation. */
    645     VAProcColorBalanceType      type;
    646     /** \brief Range of supported values for the specified operation. */
    647     VAProcFilterValueRange      range;
    648 } VAProcFilterCapColorBalance;
    649 
    650 /**
    651  * \brief Queries video processing filters.
    652  *
    653  * This function returns the list of video processing filters supported
    654  * by the driver. The \c filters array is allocated by the user and
    655  * \c num_filters shall be initialized to the number of allocated
    656  * elements in that array. Upon successful return, the actual number
    657  * of filters will be overwritten into \c num_filters. Otherwise,
    658  * \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and \c num_filters
    659  * is adjusted to the number of elements that would be returned if enough
    660  * space was available.
    661  *
    662  * The list of video processing filters supported by the driver shall
    663  * be ordered in the way they can be iteratively applied. This is needed
    664  * for both correctness, i.e. some filters would not mean anything if
    665  * applied at the beginning of the pipeline; but also for performance
    666  * since some filters can be applied in a single pass (e.g. noise
    667  * reduction + deinterlacing).
    668  *
    669  * @param[in] dpy               the VA display
    670  * @param[in] context           the video processing context
    671  * @param[out] filters          the output array of #VAProcFilterType elements
    672  * @param[in,out] num_filters the number of elements allocated on input,
    673  *      the number of elements actually filled in on output
    674  */
    675 VAStatus
    676 vaQueryVideoProcFilters(
    677     VADisplay           dpy,
    678     VAContextID         context,
    679     VAProcFilterType   *filters,
    680     unsigned int       *num_filters
    681 );
    682 
    683 /**
    684  * \brief Queries video filter capabilities.
    685  *
    686  * This function returns the list of capabilities supported by the driver
    687  * for a specific video filter. The \c filter_caps array is allocated by
    688  * the user and \c num_filter_caps shall be initialized to the number
    689  * of allocated elements in that array. Upon successful return, the
    690  * actual number of filters will be overwritten into \c num_filter_caps.
    691  * Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and
    692  * \c num_filter_caps is adjusted to the number of elements that would be
    693  * returned if enough space was available.
    694  *
    695  * @param[in] dpy               the VA display
    696  * @param[in] context           the video processing context
    697  * @param[in] type              the video filter type
    698  * @param[out] filter_caps      the output array of #VAProcFilterCap elements
    699  * @param[in,out] num_filter_caps the number of elements allocated on input,
    700  *      the number of elements actually filled in output
    701  */
    702 VAStatus
    703 vaQueryVideoProcFilterCaps(
    704     VADisplay           dpy,
    705     VAContextID         context,
    706     VAProcFilterType    type,
    707     void               *filter_caps,
    708     unsigned int       *num_filter_caps
    709 );
    710 
    711 /**
    712  * \brief Queries video processing pipeline capabilities.
    713  *
    714  * This function returns the video processing pipeline capabilities. The
    715  * \c filters array defines the video processing pipeline and is an array
    716  * of buffers holding filter parameters.
    717  *
    718  * Note: the #VAProcPipelineCaps structure contains user-provided arrays.
    719  * If non-NULL, the corresponding \c num_* fields shall be filled in on
    720  * input with the number of elements allocated. Upon successful return,
    721  * the actual number of elements will be overwritten into the \c num_*
    722  * fields. Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned
    723  * and \c num_* fields are adjusted to the number of elements that would
    724  * be returned if enough space was available.
    725  *
    726  * @param[in] dpy               the VA display
    727  * @param[in] context           the video processing context
    728  * @param[in] filters           the array of VA buffers defining the video
    729  *      processing pipeline
    730  * @param[in] num_filters       the number of elements in filters
    731  * @param[in,out] pipeline_caps the video processing pipeline capabilities
    732  */
    733 VAStatus
    734 vaQueryVideoProcPipelineCaps(
    735     VADisplay           dpy,
    736     VAContextID         context,
    737     VABufferID         *filters,
    738     unsigned int        num_filters,
    739     VAProcPipelineCaps *pipeline_caps
    740 );
    741 
    742 /**@}*/
    743 
    744 #ifdef __cplusplus
    745 }
    746 #endif
    747 
    748 #endif /* VA_VPP_H */
    749