2 * Copyright (c) 2007-2011 Intel Corporation. All Rights Reserved.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 * \brief The video processing API
29 * This file contains the \ref api_vpp "Video processing API".
40 * \defgroup api_vpp Video processing API
44 * The video processing API uses the same paradigm as for decoding:
45 * - Query for supported filters;
46 * - Set up a video processing pipeline;
47 * - Send video processing parameters through VA buffers.
49 * \section api_vpp_caps Query for supported filters
51 * Checking whether video processing is supported can be performed
52 * with vaQueryConfigEntrypoints() and the profile argument set to
53 * #VAProfileNone. If video processing is supported, then the list of
54 * returned entry-points will include #VAEntrypointVideoProc.
57 * VAEntrypoint *entrypoints;
58 * int i, num_entrypoints, supportsVideoProcessing = 0;
60 * num_entrypoints = vaMaxNumEntrypoints();
61 * entrypoints = malloc(num_entrypoints * sizeof(entrypoints[0]);
62 * vaQueryConfigEntrypoints(va_dpy, VAProfileNone,
63 * entrypoints, &num_entrypoints);
65 * for (i = 0; !supportsVideoProcessing && i < num_entrypoints; i++) {
66 * if (entrypoints[i] == VAEntrypointVideoProc)
67 * supportsVideoProcessing = 1;
71 * Then, the vaQueryVideoProcFilters() function is used to query the
72 * list of video processing filters.
75 * VAProcFilterType filters[VAProcFilterCount];
76 * unsigned int num_filters = VAProcFilterCount;
78 * // num_filters shall be initialized to the length of the array
79 * vaQueryVideoProcFilters(va_dpy, vpp_ctx, &filters, &num_filters);
82 * Finally, individual filter capabilities can be checked with
83 * vaQueryVideoProcFilterCaps().
86 * VAProcFilterCap denoise_caps;
87 * unsigned int num_denoise_caps = 1;
88 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
89 * VAProcFilterNoiseReduction,
90 * &denoise_caps, &num_denoise_caps
93 * VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount];
94 * unsigned int num_deinterlacing_caps = VAProcDeinterlacingCount;
95 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
96 * VAProcFilterDeinterlacing,
97 * &deinterlacing_caps, &num_deinterlacing_caps
101 * \section api_vpp_setup Set up a video processing pipeline
103 * A video processing pipeline buffer is created for each source
104 * surface we want to process. However, buffers holding filter
105 * parameters can be created once and for all. Rationale is to avoid
106 * multiple creation/destruction chains of filter buffers and also
107 * because filter parameters generally won't change frame after
108 * frame. e.g. this makes it possible to implement a checkerboard of
109 * videos where the same filters are applied to each video source.
111 * The general control flow is demonstrated by the following pseudo-code:
114 * VABufferID denoise_filter, deint_filter;
115 * VABufferID filter_bufs[VAProcFilterCount];
116 * unsigned int num_filter_bufs;
118 * for (i = 0; i < num_filters; i++) {
119 * switch (filters[i]) {
120 * case VAProcFilterNoiseReduction: { // Noise reduction filter
121 * VAProcFilterParameterBuffer denoise;
122 * denoise.type = VAProcFilterNoiseReduction;
123 * denoise.value = 0.5;
124 * vaCreateBuffer(va_dpy, vpp_ctx,
125 * VAProcFilterParameterBufferType, sizeof(denoise), 1,
126 * &denoise, &denoise_filter
128 * filter_bufs[num_filter_bufs++] = denoise_filter;
132 * case VAProcFilterDeinterlacing: // Motion-adaptive deinterlacing
133 * for (j = 0; j < num_deinterlacing_caps; j++) {
134 * VAProcFilterCapDeinterlacing * const cap = &deinterlacing_caps[j];
135 * if (cap->type != VAProcDeinterlacingMotionAdaptive)
138 * VAProcFilterParameterBufferDeinterlacing deint;
139 * deint.type = VAProcFilterDeinterlacing;
140 * deint.algorithm = VAProcDeinterlacingMotionAdaptive;
141 * vaCreateBuffer(va_dpy, vpp_ctx,
142 * VAProcFilterParameterBufferType, sizeof(deint), 1,
143 * &deint, &deint_filter
145 * filter_bufs[num_filter_bufs++] = deint_filter;
151 * Once the video processing pipeline is set up, the caller shall check the
152 * implied capabilities and requirements with vaQueryVideoProcPipelineCaps().
153 * This function can be used to validate the number of reference frames are
154 * needed by the specified deinterlacing algorithm, the supported color
158 * VAProcPipelineCaps pipeline_caps;
159 * VASurfaceID *forward_references;
160 * unsigned int num_forward_references;
161 * VASurfaceID *backward_references;
162 * unsigned int num_backward_references;
163 * VAProcColorStandardType in_color_standards[VAProcColorStandardCount];
164 * VAProcColorStandardType out_color_standards[VAProcColorStandardCount];
166 * pipeline_caps.input_color_standards = NULL;
167 * pipeline_caps.num_input_color_standards = ARRAY_ELEMS(in_color_standards);
168 * pipeline_caps.output_color_standards = NULL;
169 * pipeline_caps.num_output_color_standards = ARRAY_ELEMS(out_color_standards);
170 * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
171 * filter_bufs, num_filter_bufs,
175 * num_forward_references = pipeline_caps.num_forward_references;
176 * forward_references =
177 * malloc(num__forward_references * sizeof(VASurfaceID));
178 * num_backward_references = pipeline_caps.num_backward_references;
179 * backward_references =
180 * malloc(num_backward_references * sizeof(VASurfaceID));
183 * \section api_vpp_submit Send video processing parameters through VA buffers
185 * Video processing pipeline parameters are submitted for each source
186 * surface to process. Video filter parameters can also change, per-surface.
187 * e.g. the list of reference frames used for deinterlacing.
190 * foreach (iteration) {
191 * vaBeginPicture(va_dpy, vpp_ctx, vpp_surface);
192 * foreach (surface) {
193 * VARectangle output_region;
194 * VABufferID pipeline_buf;
195 * VAProcPipelineParameterBuffer *pipeline_param;
197 * vaCreateBuffer(va_dpy, vpp_ctx,
198 * VAProcPipelineParameterBuffer, sizeof(*pipeline_param), 1,
199 * NULL, &pipeline_buf
202 * // Setup output region for this surface
203 * // e.g. upper left corner for the first surface
204 * output_region.x = BORDER;
205 * output_region.y = BORDER;
206 * output_region.width =
207 * (vpp_surface_width - (Nx_surfaces + 1) * BORDER) / Nx_surfaces;
208 * output_region.height =
209 * (vpp_surface_height - (Ny_surfaces + 1) * BORDER) / Ny_surfaces;
211 * vaMapBuffer(va_dpy, pipeline_buf, &pipeline_param);
212 * pipeline_param->surface = surface;
213 * pipeline_param->surface_region = NULL;
214 * pipeline_param->output_region = &output_region;
215 * pipeline_param->output_background_color = 0;
216 * if (first surface to render)
217 * pipeline_param->output_background_color = 0xff000000; // black
218 * pipeline_param->filter_flags = VA_FILTER_SCALING_HQ;
219 * pipeline_param->filters = filter_bufs;
220 * pipeline_param->num_filters = num_filter_bufs;
221 * vaUnmapBuffer(va_dpy, pipeline_buf);
223 * // Update reference frames for deinterlacing, if necessary
224 * pipeline_param->forward_references = forward_references;
225 * pipeline_param->num_forward_references = num_forward_references_used;
226 * pipeline_param->backward_references = backward_references;
227 * pipeline_param->num_backward_references = num_bacward_references_used;
230 * vaRenderPicture(va_dpy, vpp_ctx, &pipeline_buf, 1);
232 * vaEndPicture(va_dpy, vpp_ctx);
237 /** \brief Video filter types. */
238 typedef enum _VAProcFilterType {
239 VAProcFilterNone = 0,
240 /** \brief Noise reduction filter. */
241 VAProcFilterNoiseReduction,
242 /** \brief Deinterlacing filter. */
243 VAProcFilterDeinterlacing,
244 /** \brief Sharpening filter. */
245 VAProcFilterSharpening,
246 /** \brief Color balance parameters. */
247 VAProcFilterColorBalance,
248 /** \brief Skin Tone Enhancement. */
249 VAProcFilterSkinToneEnhancement,
250 /** \brief Total Color Correction. */
251 VAProcFilterTotalColorCorrection,
252 /** \brief Number of video filters. */
256 /** \brief Deinterlacing types. */
257 typedef enum _VAProcDeinterlacingType {
258 VAProcDeinterlacingNone = 0,
259 /** \brief Bob deinterlacing algorithm. */
260 VAProcDeinterlacingBob,
261 /** \brief Weave deinterlacing algorithm. */
262 VAProcDeinterlacingWeave,
263 /** \brief Motion adaptive deinterlacing algorithm. */
264 VAProcDeinterlacingMotionAdaptive,
265 /** \brief Motion compensated deinterlacing algorithm. */
266 VAProcDeinterlacingMotionCompensated,
267 /** \brief Number of deinterlacing algorithms. */
268 VAProcDeinterlacingCount
269 } VAProcDeinterlacingType;
271 /** \brief Color balance types. */
272 typedef enum _VAProcColorBalanceType {
273 VAProcColorBalanceNone = 0,
275 VAProcColorBalanceHue,
276 /** \brief Saturation. */
277 VAProcColorBalanceSaturation,
278 /** \brief Brightness. */
279 VAProcColorBalanceBrightness,
280 /** \brief Contrast. */
281 VAProcColorBalanceContrast,
282 /** \brief Automatically adjusted saturation. */
283 VAProcColorBalanceAutoSaturation,
284 /** \brief Automatically adjusted brightness. */
285 VAProcColorBalanceAutoBrightness,
286 /** \brief Automatically adjusted contrast. */
287 VAProcColorBalanceAutoContrast,
288 /** \brief Number of color balance attributes. */
289 VAProcColorBalanceCount
290 } VAProcColorBalanceType;
292 /** \brief Color standard types. */
293 typedef enum _VAProcColorStandardType {
294 VAProcColorStandardNone = 0,
295 /** \brief ITU-R BT.601. */
296 VAProcColorStandardBT601,
297 /** \brief ITU-R BT.709. */
298 VAProcColorStandardBT709,
299 /** \brief ITU-R BT.470-2 System M. */
300 VAProcColorStandardBT470M,
301 /** \brief ITU-R BT.470-2 System B, G. */
302 VAProcColorStandardBT470BG,
303 /** \brief SMPTE-170M. */
304 VAProcColorStandardSMPTE170M,
305 /** \brief SMPTE-240M. */
306 VAProcColorStandardSMPTE240M,
307 /** \brief Generic film. */
308 VAProcColorStandardGenericFilm,
310 VAProcColorStandardSRGB,
312 VAProcColorStandardSTRGB,
313 /** \brief xvYCC601. */
314 VAProcColorStandardXVYCC601,
315 /** \brief xvYCC709. */
316 VAProcColorStandardXVYCC709,
317 /** \brief ITU-R BT.2020. */
318 VAProcColorStandardBT2020,
319 /** \brief Number of color standards. */
320 VAProcColorStandardCount
321 } VAProcColorStandardType;
323 /** \brief Total color correction types. */
324 typedef enum _VAProcTotalColorCorrectionType {
325 VAProcTotalColorCorrectionNone = 0,
326 /** \brief Red Saturation. */
327 VAProcTotalColorCorrectionRed,
328 /** \brief Green Saturation. */
329 VAProcTotalColorCorrectionGreen,
330 /** \brief Blue Saturation. */
331 VAProcTotalColorCorrectionBlue,
332 /** \brief Cyan Saturation. */
333 VAProcTotalColorCorrectionCyan,
334 /** \brief Magenta Saturation. */
335 VAProcTotalColorCorrectionMagenta,
336 /** \brief Yellow Saturation. */
337 VAProcTotalColorCorrectionYellow,
338 /** \brief Number of color correction attributes. */
339 VAProcTotalColorCorrectionCount
340 } VAProcTotalColorCorrectionType;
341 /** @name Video blending flags */
343 /** \brief Global alpha blending. */
344 #define VA_BLEND_GLOBAL_ALPHA 0x0001
345 /** \brief Premultiplied alpha blending (RGBA surfaces only). */
346 #define VA_BLEND_PREMULTIPLIED_ALPHA 0x0002
347 /** \brief Luma color key (YUV surfaces only). */
348 #define VA_BLEND_LUMA_KEY 0x0010
351 /** \brief Video blending state definition. */
352 typedef struct _VABlendState {
353 /** \brief Video blending flags. */
356 * \brief Global alpha value.
358 * Valid if \flags has VA_BLEND_GLOBAL_ALPHA.
359 * Valid range is 0.0 to 1.0 inclusive.
363 * \brief Minimum luma value.
365 * Valid if \flags has VA_BLEND_LUMA_KEY.
366 * Valid range is 0.0 to 1.0 inclusive.
367 * \ref min_luma shall be set to a sensible value lower than \ref max_luma.
371 * \brief Maximum luma value.
373 * Valid if \flags has VA_BLEND_LUMA_KEY.
374 * Valid range is 0.0 to 1.0 inclusive.
375 * \ref max_luma shall be set to a sensible value larger than \ref min_luma.
380 /** @name Video pipeline flags */
382 /** \brief Specifies whether to apply subpictures when processing a surface. */
383 #define VA_PROC_PIPELINE_SUBPICTURES 0x00000001
385 * \brief Specifies whether to apply power or performance
386 * optimizations to a pipeline.
388 * When processing several surfaces, it may be necessary to prioritize
389 * more certain pipelines than others. This flag is only a hint to the
390 * video processor so that it can omit certain filters to save power
391 * for example. Typically, this flag could be used with video surfaces
392 * decoded from a secondary bitstream.
394 #define VA_PROC_PIPELINE_FAST 0x00000002
397 /** @name Video filter flags */
399 /** \brief Specifies whether the filter shall be present in the pipeline. */
400 #define VA_PROC_FILTER_MANDATORY 0x00000001
403 /** @name Pipeline end flags */
405 /** \brief Specifies the pipeline is the last. */
406 #define VA_PIPELINE_FLAG_END 0x00000004
409 /** @name Chroma Siting flag */
411 /** vertical chroma sitting take bit 0-1, horizontal chroma sitting take bit 2-3
412 * vertical chromma siting | horizontal chroma sitting to be chroma sitting */
413 #define VA_CHROMA_SITING_UNKNOWN 0x00
414 /** \brief Chroma samples are co-sited vertically on the top with the luma samples. */
415 #define VA_CHROMA_SITING_VERTICAL_TOP 0x01
416 /** \brief Chroma samples are not co-sited vertically with the luma samples. */
417 #define VA_CHROMA_SITING_VERTICAL_CENTER 0x02
418 /** \brief Chroma samples are co-sited vertically on the bottom with the luma samples. */
419 #define VA_CHROMA_SITING_VERTICAL_BOTTOM 0x03
420 /** \brief Chroma samples are co-sited horizontally on the left with the luma samples. */
421 #define VA_CHROMA_SITING_HORIZONTAL_LEFT 0x04
422 /** \brief Chroma samples are not co-sited horizontally with the luma samples. */
423 #define VA_CHROMA_SITING_HORIZONTAL_CENTER 0x08
427 * This is to indicate that the color-space conversion uses full range or reduced range.
428 * VA_SOURCE_RANGE_FULL(Full range): Y/Cb/Cr is in [0, 255]. It is mainly used
429 * for JPEG/JFIF formats. The combination with the BT601 flag means that
430 * JPEG/JFIF color-space conversion matrix is used.
431 * VA_SOURCE_RANGE_REDUCED(Reduced range): Y is in [16, 235] and Cb/Cr is in [16, 240].
432 * It is mainly used for the YUV->RGB color-space conversion in SDTV/HDTV/UHDTV.
434 #define VA_SOURCE_RANGE_UNKNOWN 0
435 #define VA_SOURCE_RANGE_REDUCED 1
436 #define VA_SOURCE_RANGE_FULL 2
438 /** \brief Video processing pipeline capabilities. */
439 typedef struct _VAProcPipelineCaps {
440 /** \brief Pipeline flags. See VAProcPipelineParameterBuffer::pipeline_flags. */
441 uint32_t pipeline_flags;
442 /** \brief Extra filter flags. See VAProcPipelineParameterBuffer::filter_flags. */
443 uint32_t filter_flags;
444 /** \brief Number of forward reference frames that are needed. */
445 uint32_t num_forward_references;
446 /** \brief Number of backward reference frames that are needed. */
447 uint32_t num_backward_references;
448 /** \brief List of color standards supported on input. */
449 VAProcColorStandardType *input_color_standards;
450 /** \brief Number of elements in \ref input_color_standards array. */
451 uint32_t num_input_color_standards;
452 /** \brief List of color standards supported on output. */
453 VAProcColorStandardType *output_color_standards;
454 /** \brief Number of elements in \ref output_color_standards array. */
455 uint32_t num_output_color_standards;
458 * \brief Rotation flags.
460 * For each rotation angle supported by the underlying hardware,
461 * the corresponding bit is set in \ref rotation_flags. See
462 * "Rotation angles" for a description of rotation angles.
464 * A value of 0 means the underlying hardware does not support any
465 * rotation. Otherwise, a check for a specific rotation angle can be
466 * performed as follows:
469 * VAProcPipelineCaps pipeline_caps;
471 * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
472 * filter_bufs, num_filter_bufs,
476 * if (pipeline_caps.rotation_flags & (1 << VA_ROTATION_xxx)) {
477 * // Clockwise rotation by xxx degrees is supported
482 uint32_t rotation_flags;
483 /** \brief Blend flags. See "Video blending flags". */
484 uint32_t blend_flags;
486 * \brief Mirroring flags.
488 * For each mirroring direction supported by the underlying hardware,
489 * the corresponding bit is set in \ref mirror_flags. See
490 * "Mirroring directions" for a description of mirroring directions.
493 uint32_t mirror_flags;
494 /** \brief Number of additional output surfaces supported by the pipeline */
495 uint32_t num_additional_outputs;
497 /** \brief Number of elements in \ref input_pixel_format array. */
498 uint32_t num_input_pixel_formats;
499 /** \brief List of input pixel formats in fourcc. */
500 uint32_t *input_pixel_format;
501 /** \brief Number of elements in \ref output_pixel_format array. */
502 uint32_t num_output_pixel_formats;
503 /** \brief List of output pixel formats in fourcc. */
504 uint32_t *output_pixel_format;
506 /** \brief Max supported input width in pixels. */
507 uint32_t max_input_width;
508 /** \brief Max supported input height in pixels. */
509 uint32_t max_input_height;
510 /** \brief Min supported input width in pixels. */
511 uint32_t min_input_width;
512 /** \brief Min supported input height in pixels. */
513 uint32_t min_input_height;
515 /** \brief Max supported output width in pixels. */
516 uint32_t max_output_width;
517 /** \brief Max supported output height in pixels. */
518 uint32_t max_output_height;
519 /** \brief Min supported output width in pixels. */
520 uint32_t min_output_width;
521 /** \brief Min supported output height in pixels. */
522 uint32_t min_output_height;
523 /** \brief Reserved bytes for future use, must be zero */
524 #if defined(__AMD64__) || defined(__x86_64__) || defined(__amd64__) || defined(__LP64__)
525 uint32_t va_reserved[VA_PADDING_HIGH - 2];
527 uint32_t va_reserved[VA_PADDING_HIGH];
529 } VAProcPipelineCaps;
531 /** \brief Specification of values supported by the filter. */
532 typedef struct _VAProcFilterValueRange {
533 /** \brief Minimum value supported, inclusive. */
535 /** \brief Maximum value supported, inclusive. */
537 /** \brief Default value. */
539 /** \brief Step value that alters the filter behaviour in a sensible way. */
542 /** \brief Reserved bytes for future use, must be zero */
543 uint32_t va_reserved[VA_PADDING_LOW];
544 } VAProcFilterValueRange;
546 typedef struct _VAProcColorProperties {
547 /** Chroma sample location.\c VA_CHROMA_SITING_VERTICAL_XXX | VA_CHROMA_SITING_HORIZONTAL_XXX */
548 uint8_t chroma_sample_location;
549 /** Chroma sample location. \c VA_SOURCE_RANGE_XXX*/
552 } VAProcColorProperties;
555 * \brief Video processing pipeline configuration.
557 * This buffer defines a video processing pipeline. The actual filters to
558 * be applied are provided in the \c filters field, they can be re-used
559 * in other processing pipelines.
561 * The target surface is specified by the \c render_target argument of
562 * \c vaBeginPicture(). The general usage model is described as follows:
563 * - \c vaBeginPicture(): specify the target surface that receives the
565 * - \c vaRenderPicture(): specify a surface to be processed and composed
566 * into the \c render_target. Use as many \c vaRenderPicture() calls as
567 * necessary surfaces to compose ;
568 * - \c vaEndPicture(): tell the driver to start processing the surfaces
569 * with the requested filters.
571 * If a filter (e.g. noise reduction) needs to be applied with different
572 * values for multiple surfaces, the application needs to create as many
573 * filter parameter buffers as necessary. i.e. the filter parameters shall
574 * not change between two calls to \c vaRenderPicture().
576 * For composition usage models, the first surface to process will generally
577 * use an opaque background color, i.e. \c output_background_color set with
578 * the most significant byte set to \c 0xff. For instance, \c 0xff000000 for
579 * a black background. Then, subsequent surfaces would use a transparent
582 typedef struct _VAProcPipelineParameterBuffer {
584 * \brief Source surface ID.
586 * ID of the source surface to process. If subpictures are associated
587 * with the video surfaces then they shall be rendered to the target
588 * surface, if the #VA_PROC_PIPELINE_SUBPICTURES pipeline flag is set.
592 * \brief Region within the source surface to be processed.
594 * Pointer to a #VARectangle defining the region within the source
595 * surface to be processed. If NULL, \c surface_region implies the
598 const VARectangle *surface_region;
600 * \brief Requested input color primaries.
602 * Color primaries are implicitly converted throughout the processing
603 * pipeline. The video processor chooses the best moment to apply
604 * this conversion. The set of supported color primaries primaries
605 * for input shall be queried with vaQueryVideoProcPipelineCaps().
607 VAProcColorStandardType surface_color_standard;
609 * \brief Region within the output surface.
611 * Pointer to a #VARectangle defining the region within the output
612 * surface that receives the processed pixels. If NULL, \c output_region
613 * implies the whole surface.
615 * Note that any pixels residing outside the specified region will
616 * be filled in with the \ref output_background_color.
618 const VARectangle *output_region;
620 * \brief Background color.
622 * Background color used to fill in pixels that reside outside of the
623 * specified \ref output_region. The color is specified in ARGB format:
624 * [31:24] alpha, [23:16] red, [15:8] green, [7:0] blue.
626 * Unless the alpha value is zero or the \ref output_region represents
627 * the whole target surface size, implementations shall not render the
628 * source surface to the target surface directly. Rather, in order to
629 * maintain the exact semantics of \ref output_background_color, the
630 * driver shall use a temporary surface and fill it in with the
631 * appropriate background color. Next, the driver will blend this
632 * temporary surface into the target surface.
634 uint32_t output_background_color;
636 * \brief Requested output color primaries.
638 VAProcColorStandardType output_color_standard;
640 * \brief Pipeline filters. See video pipeline flags.
642 * Flags to control the pipeline, like whether to apply subpictures
643 * or not, notify the driver that it can opt for power optimizations,
644 * should this be needed.
646 uint32_t pipeline_flags;
648 * \brief Extra filter flags. See vaPutSurface() flags.
650 * Filter flags are used as a fast path, wherever possible, to use
651 * vaPutSurface() flags instead of explicit filter parameter buffers.
653 * Allowed filter flags API-wise. Use vaQueryVideoProcPipelineCaps()
654 * to check for implementation details:
655 * - Bob-deinterlacing: \c VA_FRAME_PICTURE, \c VA_TOP_FIELD,
656 * \c VA_BOTTOM_FIELD. Note that any deinterlacing filter
657 * (#VAProcFilterDeinterlacing) will override those flags.
658 * - Color space conversion: \c VA_SRC_BT601, \c VA_SRC_BT709,
659 * \c VA_SRC_SMPTE_240.
660 * - Scaling: \c VA_FILTER_SCALING_DEFAULT, \c VA_FILTER_SCALING_FAST,
661 * \c VA_FILTER_SCALING_HQ, \c VA_FILTER_SCALING_NL_ANAMORPHIC.
663 uint32_t filter_flags;
665 * \brief Array of filters to apply to the surface.
667 * The list of filters shall be ordered in the same way the driver expects
668 * them. i.e. as was returned from vaQueryVideoProcFilters().
669 * Otherwise, a #VA_STATUS_ERROR_INVALID_FILTER_CHAIN is returned
670 * from vaRenderPicture() with this buffer.
672 * #VA_STATUS_ERROR_UNSUPPORTED_FILTER is returned if the list
673 * contains an unsupported filter.
677 /** \brief Actual number of filters. */
678 uint32_t num_filters;
679 /** \brief Array of forward reference frames. */
680 VASurfaceID *forward_references;
681 /** \brief Number of forward reference frames that were supplied. */
682 uint32_t num_forward_references;
683 /** \brief Array of backward reference frames. */
684 VASurfaceID *backward_references;
685 /** \brief Number of backward reference frames that were supplied. */
686 uint32_t num_backward_references;
688 * \brief Rotation state. See rotation angles.
690 * The rotation angle is clockwise. There is no specific rotation
691 * center for this operation. Rather, The source \ref surface is
692 * first rotated by the specified angle and then scaled to fit the
693 * \ref output_region.
695 * This means that the top-left hand corner (0,0) of the output
696 * (rotated) surface is expressed as follows:
697 * - \ref VA_ROTATION_NONE: (0,0) is the top left corner of the
698 * source surface -- no rotation is performed ;
699 * - \ref VA_ROTATION_90: (0,0) is the bottom-left corner of the
701 * - \ref VA_ROTATION_180: (0,0) is the bottom-right corner of the
702 * source surface -- the surface is flipped around the X axis ;
703 * - \ref VA_ROTATION_270: (0,0) is the top-right corner of the
706 * Check VAProcPipelineCaps::rotation_flags first prior to
707 * defining a specific rotation angle. Otherwise, the hardware can
708 * perfectly ignore this variable if it does not support any
711 uint32_t rotation_state;
713 * \brief blending state. See "Video blending state definition".
715 * If \ref blend_state is NULL, then default operation mode depends
716 * on the source \ref surface format:
717 * - RGB: per-pixel alpha blending ;
718 * - YUV: no blending, i.e override the underlying pixels.
720 * Otherwise, \ref blend_state is a pointer to a #VABlendState
721 * structure that shall be live until vaEndPicture().
723 * Implementation note: the driver is responsible for checking the
724 * blend state flags against the actual source \ref surface format.
725 * e.g. premultiplied alpha blending is only applicable to RGB
726 * surfaces, and luma keying is only applicable to YUV surfaces.
727 * If a mismatch occurs, then #VA_STATUS_ERROR_INVALID_BLEND_STATE
730 const VABlendState *blend_state;
732 * \bried mirroring state. See "Mirroring directions".
734 * Mirroring of an image can be performed either along the
735 * horizontal or vertical axis. It is assumed that the rotation
736 * operation is always performed before the mirroring operation.
738 uint32_t mirror_state;
739 /** \brief Array of additional output surfaces. */
740 VASurfaceID *additional_outputs;
741 /** \brief Number of additional output surfaces. */
742 uint32_t num_additional_outputs;
744 * \brief Flag to indicate the input surface flag
746 * bit0: 0 non-protected 1: protected
747 * bit 1~31 for future
749 uint32_t input_surface_flag;
751 * \brief Flag to indicate the output surface flag
753 * bit0: 0 non-protected 1: protected
754 * bit 1~31 for future
756 uint32_t output_surface_flag;
758 VAProcColorProperties input_color_properties;
760 VAProcColorProperties output_color_properties;
762 /** \brief Reserved bytes for future use, must be zero */
763 #if defined(__AMD64__) || defined(__x86_64__) || defined(__amd64__)|| defined(__LP64__)
764 uint32_t va_reserved[VA_PADDING_LARGE - 13];
766 uint32_t va_reserved[VA_PADDING_LARGE - 11];
768 } VAProcPipelineParameterBuffer;
771 * \brief Filter parameter buffer base.
773 * This is a helper structure used by driver implementations only.
774 * Users are not supposed to allocate filter parameter buffers of this
777 typedef struct _VAProcFilterParameterBufferBase {
778 /** \brief Filter type. */
779 VAProcFilterType type;
780 } VAProcFilterParameterBufferBase;
783 * \brief Default filter parametrization.
785 * Unless there is a filter-specific parameter buffer,
786 * #VAProcFilterParameterBuffer is the default type to use.
788 typedef struct _VAProcFilterParameterBuffer {
789 /** \brief Filter type. */
790 VAProcFilterType type;
794 /** \brief Reserved bytes for future use, must be zero */
795 uint32_t va_reserved[VA_PADDING_LOW];
796 } VAProcFilterParameterBuffer;
798 /** @name De-interlacing flags */
801 * \brief Bottom field first in the input frame.
802 * if this is not set then assumes top field first.
804 #define VA_DEINTERLACING_BOTTOM_FIELD_FIRST 0x0001
806 * \brief Bottom field used in deinterlacing.
807 * if this is not set then assumes top field is used.
809 #define VA_DEINTERLACING_BOTTOM_FIELD 0x0002
811 * \brief A single field is stored in the input frame.
812 * if this is not set then assumes the frame contains two interleaved fields.
814 #define VA_DEINTERLACING_ONE_FIELD 0x0004
816 * \brief Film Mode Detection is enabled. If enabled, driver performs inverse
817 * of various pulldowns, such as 3:2 pulldown.
818 * if this is not set then assumes FMD is disabled.
820 #define VA_DEINTERLACING_FMD_ENABLE 0x0008
822 //Scene change parameter for ADI on Linux, if enabled, driver use spatial DI(Bob), instead of ADI. if not, use old behavior for ADI
823 //Input stream is TFF(set flags = 0), SRC0,1,2,3 are interlaced frame (top +bottom fields), DSTs are progressive frames
825 //SRC0 -> BOBDI, no reference, set flag = 0, output DST0
826 //SRC1 -> ADI, reference frame=SRC0, set flags = 0, call VP, output DST1
827 //SRC2 -> ADI, reference frame=SRC1, set flags = 0x0010(decimal 16), call VP, output DST2(T4)
828 //SRC3 -> ADI, reference frame=SRC2, set flags = 0, call VP, output DST3
830 //SRC0 -> BOBDI, no reference, set flag = 0, output DST0
831 //SRC0 -> BOBDI, no reference, set flag =0x0002, output DST1
833 //SRC1 -> ADI, reference frame =SRC0, set flags = 0, call VP, output DST2
834 //SRC1 -> ADI, reference frame =SRC0, set flags = 0x0012(decimal18), call VP, output DST3(B3)
836 //SRC2 -> ADI, reference frame =SRC1, set flags = 0x0010(decimal 16), call VP, output DST4(T4)
837 //SRC2 -> ADI, reference frame =SRC1, set flags = 0x0002, call VP, output DST5
839 //SRC3 -> ADI, reference frame =SRC2, set flags = 0, call VP, output DST6
840 //SRC3 -> ADI, reference frame =SRC1, set flags = 0x0002, call VP, output DST7
842 #define VA_DEINTERLACING_SCD_ENABLE 0x0010
846 /** \brief Deinterlacing filter parametrization. */
847 typedef struct _VAProcFilterParameterBufferDeinterlacing {
848 /** \brief Filter type. Shall be set to #VAProcFilterDeinterlacing. */
849 VAProcFilterType type;
850 /** \brief Deinterlacing algorithm. */
851 VAProcDeinterlacingType algorithm;
852 /** \brief Deinterlacing flags. */
855 /** \brief Reserved bytes for future use, must be zero */
856 uint32_t va_reserved[VA_PADDING_LOW];
857 } VAProcFilterParameterBufferDeinterlacing;
860 * \brief Color balance filter parametrization.
862 * This buffer defines color balance attributes. A VA buffer can hold
863 * several color balance attributes by creating a VA buffer of desired
864 * number of elements. This can be achieved by the following pseudo-code:
867 * enum { kHue, kSaturation, kBrightness, kContrast };
869 * // Initial color balance parameters
870 * static const VAProcFilterParameterBufferColorBalance colorBalanceParams[4] =
873 * { VAProcFilterColorBalance, VAProcColorBalanceHue, 0.5 },
875 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 },
877 * { VAProcFilterColorBalance, VAProcColorBalanceBrightness, 0.5 },
879 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 }
883 * VABufferID colorBalanceBuffer;
884 * vaCreateBuffer(va_dpy, vpp_ctx,
885 * VAProcFilterParameterBufferType, sizeof(*pColorBalanceParam), 4,
886 * colorBalanceParams,
887 * &colorBalanceBuffer
890 * VAProcFilterParameterBufferColorBalance *pColorBalanceParam;
891 * vaMapBuffer(va_dpy, colorBalanceBuffer, &pColorBalanceParam);
893 * // Change brightness only
894 * pColorBalanceBuffer[kBrightness].value = 0.75;
896 * vaUnmapBuffer(va_dpy, colorBalanceBuffer);
899 typedef struct _VAProcFilterParameterBufferColorBalance {
900 /** \brief Filter type. Shall be set to #VAProcFilterColorBalance. */
901 VAProcFilterType type;
902 /** \brief Color balance attribute. */
903 VAProcColorBalanceType attrib;
905 * \brief Color balance value.
907 * Special case for automatically adjusted attributes. e.g.
908 * #VAProcColorBalanceAutoSaturation,
909 * #VAProcColorBalanceAutoBrightness,
910 * #VAProcColorBalanceAutoContrast.
911 * - If \ref value is \c 1.0 +/- \c FLT_EPSILON, the attribute is
912 * automatically adjusted and overrides any other attribute of
913 * the same type that would have been set explicitly;
914 * - If \ref value is \c 0.0 +/- \c FLT_EPSILON, the attribute is
915 * disabled and other attribute of the same type is used instead.
919 /** \brief Reserved bytes for future use, must be zero */
920 uint32_t va_reserved[VA_PADDING_LOW];
921 } VAProcFilterParameterBufferColorBalance;
923 /** \brief Total color correction filter parametrization. */
924 typedef struct _VAProcFilterParameterBufferTotalColorCorrection {
925 /** \brief Filter type. Shall be set to #VAProcFilterTotalColorCorrection. */
926 VAProcFilterType type;
927 /** \brief Color to correct. */
928 VAProcTotalColorCorrectionType attrib;
929 /** \brief Color correction value. */
931 } VAProcFilterParameterBufferTotalColorCorrection;
933 * \brief Default filter cap specification (single range value).
935 * Unless there is a filter-specific cap structure, #VAProcFilterCap is the
936 * default type to use for output caps from vaQueryVideoProcFilterCaps().
938 typedef struct _VAProcFilterCap {
939 /** \brief Range of supported values for the filter. */
940 VAProcFilterValueRange range;
942 /** \brief Reserved bytes for future use, must be zero */
943 uint32_t va_reserved[VA_PADDING_LOW];
946 /** \brief Capabilities specification for the deinterlacing filter. */
947 typedef struct _VAProcFilterCapDeinterlacing {
948 /** \brief Deinterlacing algorithm. */
949 VAProcDeinterlacingType type;
951 /** \brief Reserved bytes for future use, must be zero */
952 uint32_t va_reserved[VA_PADDING_LOW];
953 } VAProcFilterCapDeinterlacing;
955 /** \brief Capabilities specification for the color balance filter. */
956 typedef struct _VAProcFilterCapColorBalance {
957 /** \brief Color balance operation. */
958 VAProcColorBalanceType type;
959 /** \brief Range of supported values for the specified operation. */
960 VAProcFilterValueRange range;
962 /** \brief Reserved bytes for future use, must be zero */
963 uint32_t va_reserved[VA_PADDING_LOW];
964 } VAProcFilterCapColorBalance;
966 /** \brief Capabilities specification for the Total Color Correction filter. */
967 typedef struct _VAProcFilterCapTotalColorCorrection {
968 /** \brief Color to correct. */
969 VAProcTotalColorCorrectionType type;
970 /** \brief Range of supported values for the specified color. */
971 VAProcFilterValueRange range;
972 } VAProcFilterCapTotalColorCorrection;
975 * \brief Queries video processing filters.
977 * This function returns the list of video processing filters supported
978 * by the driver. The \c filters array is allocated by the user and
979 * \c num_filters shall be initialized to the number of allocated
980 * elements in that array. Upon successful return, the actual number
981 * of filters will be overwritten into \c num_filters. Otherwise,
982 * \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and \c num_filters
983 * is adjusted to the number of elements that would be returned if enough
984 * space was available.
986 * The list of video processing filters supported by the driver shall
987 * be ordered in the way they can be iteratively applied. This is needed
988 * for both correctness, i.e. some filters would not mean anything if
989 * applied at the beginning of the pipeline; but also for performance
990 * since some filters can be applied in a single pass (e.g. noise
991 * reduction + deinterlacing).
993 * @param[in] dpy the VA display
994 * @param[in] context the video processing context
995 * @param[out] filters the output array of #VAProcFilterType elements
996 * @param[in,out] num_filters the number of elements allocated on input,
997 * the number of elements actually filled in on output
1000 vaQueryVideoProcFilters(
1002 VAContextID context,
1003 VAProcFilterType *filters,
1004 unsigned int *num_filters
1008 * \brief Queries video filter capabilities.
1010 * This function returns the list of capabilities supported by the driver
1011 * for a specific video filter. The \c filter_caps array is allocated by
1012 * the user and \c num_filter_caps shall be initialized to the number
1013 * of allocated elements in that array. Upon successful return, the
1014 * actual number of filters will be overwritten into \c num_filter_caps.
1015 * Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and
1016 * \c num_filter_caps is adjusted to the number of elements that would be
1017 * returned if enough space was available.
1019 * @param[in] dpy the VA display
1020 * @param[in] context the video processing context
1021 * @param[in] type the video filter type
1022 * @param[out] filter_caps the output array of #VAProcFilterCap elements
1023 * @param[in,out] num_filter_caps the number of elements allocated on input,
1024 * the number of elements actually filled in output
1027 vaQueryVideoProcFilterCaps(
1029 VAContextID context,
1030 VAProcFilterType type,
1032 unsigned int *num_filter_caps
1036 * \brief Queries video processing pipeline capabilities.
1038 * This function returns the video processing pipeline capabilities. The
1039 * \c filters array defines the video processing pipeline and is an array
1040 * of buffers holding filter parameters.
1042 * Note: the #VAProcPipelineCaps structure contains user-provided arrays.
1043 * If non-NULL, the corresponding \c num_* fields shall be filled in on
1044 * input with the number of elements allocated. Upon successful return,
1045 * the actual number of elements will be overwritten into the \c num_*
1046 * fields. Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned
1047 * and \c num_* fields are adjusted to the number of elements that would
1048 * be returned if enough space was available.
1050 * @param[in] dpy the VA display
1051 * @param[in] context the video processing context
1052 * @param[in] filters the array of VA buffers defining the video
1053 * processing pipeline
1054 * @param[in] num_filters the number of elements in filters
1055 * @param[in,out] pipeline_caps the video processing pipeline capabilities
1058 vaQueryVideoProcPipelineCaps(
1060 VAContextID context,
1061 VABufferID *filters,
1062 unsigned int num_filters,
1063 VAProcPipelineCaps *pipeline_caps
1072 #endif /* VA_VPP_H */