2 * Copyright (c) 2007-2011 Intel Corporation. All Rights Reserved.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 * \brief The video processing API
29 * This file contains the \ref api_vpp "Video processing API".
40 * \defgroup api_vpp Video processing API
44 * The video processing API uses the same paradigm as for decoding:
45 * - Query for supported capabilities;
46 * - Set up a video processing pipeline;
47 * - Send video processing parameters through VA buffers.
49 * \section api_vpp_caps Query for supported capabilities
51 * Checking whether video processing is supported can be performed
52 * with vaQueryConfigEntrypoints() and the profile argument set to
56 * VAEntrypoint *entrypoints;
57 * int i, num_entrypoints, supportsVideoProcessing = 0;
59 * num_entrypoints = vaMaxNumEntrypoints();
60 * entrypoints = malloc(num_entrypoints * sizeof(entrypoints[0]);
61 * vaQueryConfigEntrypoints(va_dpy, VAProfileNone,
62 * entrypoints, &num_entrypoints);
64 * for (i = 0; !supportsVideoProcessing && i < num_entrypoints; i++) {
65 * if (entrypoints[i] == VAEntrypointVideoProc)
66 * supportsVideoProcessing = 1;
70 * Then, video processing pipeline capabilities, i.e. which video
71 * filters does the driver support, can be checked with the
72 * vaQueryVideoProcPipelineCaps() function.
75 * VAProcPipelineCap pipeline_caps[VAProcFilterCount];
76 * unsigned int num_pipeline_caps = VAProcFilterCount;
78 * // num_pipeline_caps shall be initialized to the length of the array
79 * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx, &pipe_caps, &num_pipeline_caps);
82 * Finally, individual filter capabilities can be checked with
83 * vaQueryVideoProcFilterCaps().
86 * VAProcFilterCap denoise_caps;
87 * unsigned int num_denoise_caps = 1;
88 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
89 * VAProcFilterNoiseReduction,
90 * &denoise_caps, &num_denoise_caps
93 * VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount];
94 * unsigned int num_deinterlacing_caps = VAProcDeinterlacingCount;
95 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
96 * VAProcFilterDeinterlacing,
97 * &deinterlacing_caps, &num_deinterlacing_caps
101 * \section api_vpp_setup Set up a video processing pipeline
103 * A video processing pipeline buffer is created for each source
104 * surface we want to process. However, buffers holding filter
105 * parameters can be created once and for all. Rationale is to avoid
106 * multiple creation/destruction chains of filter buffers and also
107 * because filter parameters generally won't change frame after
108 * frame. e.g. this makes it possible to implement a checkerboard of
109 * videos where the same filters are applied to each video source.
111 * The general control flow is demonstrated by the following pseudo-code:
114 * VABufferID denoise_filter, deint_filter;
115 * VABufferID filter_bufs[VAProcFilterCount];
116 * unsigned int num_filter_bufs;
118 * for (i = 0; i < num_pipeline_caps; i++) {
119 * VAProcPipelineCap * const pipeline_cap = &pipeline_caps[i];
120 * switch (pipeline_cap->type) {
121 * case VAProcFilterNoiseReduction: { // Noise reduction filter
122 * VAProcFilterParameterBuffer denoise;
123 * denoise.type = VAProcFilterNoiseReduction;
124 * denoise.value = 0.5;
125 * vaCreateBuffer(va_dpy, vpp_ctx,
126 * VAProcFilterParameterBufferType, sizeof(denoise), 1,
127 * &denoise, &denoise_filter
129 * filter_bufs[num_filter_bufs++] = denoise_filter;
133 * case VAProcFilterDeinterlacing: // Motion-adaptive deinterlacing
134 * for (j = 0; j < num_deinterlacing_caps; j++) {
135 * VAProcFilterCapDeinterlacing * const cap = &deinterlacing_caps[j];
136 * if (cap->type != VAProcDeinterlacingMotionAdaptive)
139 * VAProcFilterParameterBufferDeinterlacing deint;
140 * deint.type = VAProcFilterDeinterlacing;
141 * deint.algorithm = VAProcDeinterlacingMotionAdaptive;
142 * deint.forward_references =
143 * malloc(cap->num_forward_references * sizeof(VASurfaceID));
144 * deint.num_forward_references = 0; // none for now
145 * deint.backward_references =
146 * malloc(cap->num_backward_references * sizeof(VASurfaceID));
147 * deint.num_backward_references = 0; // none for now
148 * vaCreateBuffer(va_dpy, vpp_ctx,
149 * VAProcFilterParameterBufferType, sizeof(deint), 1,
150 * &deint, &deint_filter
152 * filter_bufs[num_filter_bufs++] = deint_filter;
158 * \section api_vpp_submit Send video processing parameters through VA buffers
160 * Video processing pipeline parameters are submitted for each source
161 * surface to process. Video filter parameters can also change, per-surface.
162 * e.g. the list of reference frames used for deinterlacing.
165 * foreach (iteration) {
166 * vaBeginPicture(va_dpy, vpp_ctx, vpp_surface);
167 * foreach (surface) {
168 * VARectangle output_region;
169 * VABufferID pipeline_buf;
170 * VAProcPipelineParameterBuffer *pipeline_param;
172 * vaCreateBuffer(va_dpy, vpp_ctx,
173 * VAProcPipelineParameterBuffer, sizeof(*pipeline_param), 1,
174 * NULL, &pipeline_param
177 * // Setup output region for this surface
178 * // e.g. upper left corner for the first surface
179 * output_region.x = BORDER;
180 * output_region.y = BORDER;
181 * output_region.width =
182 * (vpp_surface_width - (Nx_surfaces + 1) * BORDER) / Nx_surfaces;
183 * output_region.height =
184 * (vpp_surface_height - (Ny_surfaces + 1) * BORDER) / Ny_surfaces;
186 * vaMapBuffer(va_dpy, pipeline_buf, &pipeline_param);
187 * pipeline_param->surface = surface;
188 * pipeline_param->surface_region = NULL;
189 * pipeline_param->output_region = &output_region;
190 * pipeline_param->output_background_color = 0;
191 * if (first surface to render)
192 * pipeline_param->output_background_color = 0xff000000; // black
193 * pipeline_param->flags = VA_FILTER_SCALING_HQ;
194 * pipeline_param->filters = filter_bufs;
195 * pipeline_param->num_filters = num_filter_bufs;
196 * vaUnmapBuffer(va_dpy, pipeline_buf);
198 * VAProcFilterParameterBufferDeinterlacing *deint_param;
199 * vaMapBuffer(va_dpy, deint_filter, &deint_param);
200 * // Update deinterlacing parameters, if necessary
202 * vaUnmapBuffer(va_dpy, deint_filter);
205 * vaRenderPicture(va_dpy, vpp_ctx, &pipeline_buf, 1);
207 * vaEndPicture(va_dpy, vpp_ctx);
212 /** \brief Video filter types. */
213 typedef enum _VAProcFilterType {
214 VAProcFilterNone = 0,
215 /** \brief Noise reduction filter. */
216 VAProcFilterNoiseReduction,
217 /** \brief Deinterlacing filter. */
218 VAProcFilterDeinterlacing,
219 /** \brief Sharpening filter. */
220 VAProcFilterSharpening,
221 /** \brief Color balance parameters. */
222 VAProcFilterColorBalance,
223 /** \brief Color standard conversion. */
224 VAProcFilterColorStandard,
225 /** \brief Max number of video filters. */
229 /** \brief Deinterlacing types. */
230 typedef enum _VAProcDeinterlacingType {
231 VAProcDeinterlacingNone = 0,
232 /** \brief Bob deinterlacing algorithm. */
233 VAProcDeinterlacingBob,
234 /** \brief Weave deinterlacing algorithm. */
235 VAProcDeinterlacingWeave,
236 /** \brief Motion adaptive deinterlacing algorithm. */
237 VAProcDeinterlacingMotionAdaptive,
238 /** \brief Motion compensated deinterlacing algorithm. */
239 VAProcDeinterlacingMotionCompensated,
240 /** \brief Max number of deinterlacing algorithms. */
241 VAProcDeinterlacingCount
242 } VAProcDeinterlacingType;
244 /** \brief Color balance types. */
245 typedef enum _VAProcColorBalanceType {
246 VAProcColorBalanceNone = 0,
248 VAProcColorBalanceHue,
249 /** \brief Saturation. */
250 VAProcColorBalanceSaturation,
251 /** \brief Brightness. */
252 VAProcColorBalanceBrightness,
253 /** \brief Contrast. */
254 VAProcColorBalanceContrast,
255 /** \brief Max number of color balance operations. */
256 VAProcColorBalanceCount
257 } VAProcColorBalanceType;
259 /** \brief Color standard types. */
260 typedef enum _VAProcColorStandardType {
261 VAProcColorStandardNone = 0,
262 /** \brief ITU-R BT.601. */
263 VAProcColorStandardBT601,
264 /** \brief ITU-R BT.709. */
265 VAProcColorStandardBT709,
266 /** \brief ITU-R BT.470-2 System M. */
267 VAProcColorStandardBT470M,
268 /** \brief ITU-R BT.470-2 System B, G. */
269 VAProcColorStandardBT470BG,
270 /** \brief SMPTE-170M. */
271 VAProcColorStandardSMPTE170M,
272 /** \brief SMPTE-240M. */
273 VAProcColorStandardSMPTE240M,
274 /** \brief Generic film. */
275 VAProcColorStandardGenericFilm,
276 } VAProcColorStandardType;
278 /** @name Video filter flags */
280 /** \brief Specifies whether the filter shall be present in the pipeline. */
281 #define VA_PROC_FILTER_MANDATORY 0x00000001
284 /** \brief Video processing pipeline capabilities. */
285 typedef struct _VAProcPipelineCap {
286 /** \brief Video filter type. */
287 VAProcFilterType type;
288 /** \brief Video filter flags. See video filter flags. */
292 /** \brief Specification of values supported by the filter. */
293 typedef struct _VAProcFilterValueRange {
294 /** \brief Minimum value supported, inclusive. */
296 /** \brief Maximum value supported, inclusive. */
298 /** \brief Default value. */
300 /** \brief Step value that alters the filter behaviour in a sensible way. */
302 } VAProcFilterValueRange;
305 * \brief Video processing pipeline configuration.
307 * This buffers defines a video processing pipeline. As for any buffer
308 * passed to \c vaRenderPicture(), this is one-time usage model. However,
309 * the actual filters to be applied are provided in the \c filters field,
310 * so they can be re-used in other processing pipelines.
312 * The target surface is specified by the \c render_target argument of
313 * \c vaBeginPicture(). The general usage model is described as follows:
314 * - \c vaBeginPicture(): specify the target surface that receives the
316 * - \c vaRenderPicture(): specify a surface to be processed and composed
317 * into the \c render_target. Use as many \c vaRenderPicture() calls as
318 * necessary surfaces to compose ;
319 * - \c vaEndPicture(): tell the driver to start processing the surfaces
320 * with the requested filters.
322 * If a filter (e.g. noise reduction) needs to be applied with different
323 * values for multiple surfaces, the application needs to create as many
324 * filter parameter buffers as necessary. i.e. the filter parameters shall
325 * not change between two calls to \c vaRenderPicture().
327 * For composition usage models, the first surface to process will generally
328 * use an opaque background color, i.e. \c output_background_color set with
329 * the most significant byte set to \c 0xff. For instance, \c 0xff000000 for
330 * a black background. Then, subsequent surfaces would use a transparent
333 typedef struct _VAProcPipelineParameterBuffer {
335 * \brief Source surface ID.
337 * ID of the source surface to process. If subpictures are associated with
338 * the video surfaces then they shall be rendered to the target surface.
342 * \brief Region within the source surface to be processed.
344 * Pointer to a #VARectangle defining the region within the source
345 * surface to be processed. If NULL, \c surface_region implies the
348 const VARectangle *surface_region;
350 * \brief Region within the output surface.
352 * Pointer to a #VARectangle defining the region within the output
353 * surface that receives the processed pixels. If NULL, \c output_region
354 * implies the whole surface.
356 * Note that any pixels residing outside the specified region will
357 * be filled in with the \ref output_background_color.
359 const VARectangle *output_region;
361 * \brief Background color.
363 * Background color used to fill in pixels that reside outside of the
364 * specified \ref output_region. The color is specified in ARGB format:
365 * [31:24] alpha, [23:16] red, [15:8] green, [7:0] blue.
367 unsigned int output_background_color;
369 * \brief Pipeline flags. See vaPutSurface() flags.
372 * - Bob-deinterlacing: \c VA_FRAME_PICTURE, \c VA_TOP_FIELD,
373 * \c VA_BOTTOM_FIELD. Note that any deinterlacing filter
374 * (#VAProcFilterDeinterlacing) will override those flags.
375 * - Color space conversion: \c VA_SRC_BT601, \c VA_SRC_BT709,
376 * \c VA_SRC_SMPTE_240. Note that any color standard filter
377 * (#VAProcFilterColorStandard) will override those flags.
378 * - Scaling: \c VA_FILTER_SCALING_DEFAULT, \c VA_FILTER_SCALING_FAST,
379 * \c VA_FILTER_SCALING_HQ, \c VA_FILTER_SCALING_NL_ANAMORPHIC.
383 * \brief Array of filters to apply to the surface.
385 * The list of filters shall be ordered in the same way the driver expects
386 * them. i.e. as was returned from vaQueryVideoProcPipelineCaps().
387 * Otherwise, a #VA_STATUS_ERROR_INVALID_FILTER_CHAIN is returned
388 * from vaRenderPicture() with this buffer.
390 * #VA_STATUS_ERROR_UNSUPPORTED_FILTER is returned if the list
391 * contains an unsupported filter.
393 * Note: no filter buffer is destroyed after a call to vaRenderPicture(),
394 * only this pipeline buffer will be destroyed as per the core API
395 * specification. This allows for flexibility in re-using the filter for
396 * other surfaces to be processed.
399 /** \brief Actual number of filters. */
400 unsigned int num_filters;
401 } VAProcPipelineParameterBuffer;
404 * \brief Filter parameter buffer base.
406 * This is a helper structure used by driver implementations only.
407 * Users are not supposed to allocate filter parameter buffers of this
410 typedef struct _VAProcFilterParameterBufferBase {
411 /** \brief Filter type. */
412 VAProcFilterType type;
413 } VAProcFilterParameterBufferBase;
416 * \brief Default filter parametrization.
418 * Unless there is a filter-specific parameter buffer,
419 * #VAProcFilterParameterBuffer is the default type to use.
421 typedef struct _VAProcFilterParameterBuffer {
422 /** \brief Filter type. */
423 VAProcFilterType type;
425 /* XXX: use VAGenericValue? */
427 } VAProcFilterParameterBuffer;
429 /** \brief Deinterlacing filter parametrization. */
430 typedef struct _VAProcFilterParameterBufferDeinterlacing {
431 /** \brief Filter type. Shall be set to #VAProcFilterDeinterlacing. */
432 VAProcFilterType type;
433 /** \brief Deinterlacing algorithm. */
434 VAProcDeinterlacingType algorithm;
435 /** \brief Array of forward reference frames. */
436 VASurfaceID *forward_references;
437 /** \brief Number of forward reference frames that were supplied. */
438 unsigned int num_forward_references;
439 /** \brief Array of backward reference frames. */
440 VASurfaceID *backward_references;
441 /** \brief Number of backward reference frames that were supplied. */
442 unsigned int num_backward_references;
443 } VAProcFilterParameterBufferDeinterlacing;
446 * \brief Color balance filter parametrization.
448 * This buffer defines color balance attributes. A VA buffer can hold
449 * several color balance attributes by creating a VA buffer of desired
450 * number of elements. This can be achieved by the following pseudo-code:
453 * enum { kHue, kSaturation, kBrightness, kContrast };
455 * // Initial color balance parameters
456 * static const VAProcFilterParameterBufferColorBalance colorBalanceParams[4] =
459 * { VAProcFilterColorBalance, VAProcColorBalanceHue, 0.5 },
461 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 },
463 * { VAProcFilterColorBalance, VAProcColorBalanceBrightness, 0.5 },
465 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 }
469 * VABufferID colorBalanceBuffer;
470 * vaCreateBuffer(va_dpy, vpp_ctx,
471 * VAProcFilterParameterBufferType, sizeof(*pColorBalanceParam), 4,
472 * colorBalanceParams,
473 * &colorBalanceBuffer
476 * VAProcFilterParameterBufferColorBalance *pColorBalanceParam;
477 * vaMapBuffer(va_dpy, colorBalanceBuffer, &pColorBalanceParam);
479 * // Change brightness only
480 * pColorBalanceBuffer[kBrightness].value = 0.75;
482 * vaUnmapBuffer(va_dpy, colorBalanceBuffer);
485 typedef struct _VAProcFilterParameterBufferColorBalance {
486 /** \brief Filter type. Shall be set to #VAProcFilterColorBalance. */
487 VAProcFilterType type;
488 /** \brief Color balance attribute. */
489 VAProcColorBalanceType attrib;
490 /** \brief Color balance value. */
492 } VAProcFilterParameterBufferColorBalance;
494 /** \brief Color standard filter parametrization. */
495 typedef struct _VAProcFilterParameterBufferColorStandard {
496 /** \brief Filter type. Shall be set to #VAProcFilterColorStandard. */
497 VAProcFilterType type;
498 /** \brief Color standard to use. */
499 VAProcColorStandardType standard;
500 } VAProcFilterParameterBufferColorStandard;
503 * \brief Default filter cap specification (single range value).
505 * Unless there is a filter-specific cap structure, #VAProcFilterCap is the
506 * default type to use for output caps from vaQueryVideoProcFilterCaps().
508 typedef struct _VAProcFilterCap {
509 /** \brief Range of supported values for the filter. */
510 VAProcFilterValueRange range;
513 /** \brief Capabilities specification for the deinterlacing filter. */
514 typedef struct _VAProcFilterCapDeinterlacing {
515 /** \brief Deinterlacing algorithm. */
516 VAProcDeinterlacingType type;
517 /** \brief Number of forward references needed for deinterlacing. */
518 unsigned int num_forward_references;
519 /** \brief Number of backward references needed for deinterlacing. */
520 unsigned int num_backward_references;
521 } VAProcFilterCapDeinterlacing;
523 /** \brief Capabilities specification for the color balance filter. */
524 typedef struct _VAProcFilterCapColorBalance {
525 /** \brief Color balance operation. */
526 VAProcColorBalanceType type;
527 /** \brief Range of supported values for the specified operation. */
528 VAProcFilterValueRange range;
529 } VAProcFilterCapColorBalance;
531 /** \brief Capabilities specification for the color standard filter. */
532 typedef struct _VAProcFilterCapColorStandard {
533 /** \brief Color standard type. */
534 VAProcColorStandardType type;
535 } VAProcFilterCapColorStandard;
538 * \brief Queries video processing pipeline capabilities.
540 * This function returns the list of video processing filters supported
541 * by the driver. The \c pipeline_caps array is allocated by the user and
542 * \c num_pipeline_caps shall be initialized to the number of allocated
543 * elements in that array. Upon successful return, the actual number
544 * of filters will be overwritten into \c num_pipeline_caps. Otherwise,
545 * \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and \c num_pipeline_caps
546 * is adjusted to the number of elements that would be returned if enough
547 * space was available.
549 * The list of video processing filters supported by the driver shall
550 * be ordered in the way they can be iteratively applied. This is needed
551 * for both correctness, i.e. some filters would not mean anything if
552 * applied at the beginning of the pipeline; but also for performance
553 * since some filters can be applied in a single pass (e.g. noise
554 * reduction + deinterlacing).
556 * @param[in] dpy the VA display
557 * @param[in] context the video processing context
558 * @param[out] pipeline_caps the output array of #VAProcPipelineCap elements
559 * @param[in,out] num_pipeline_caps the number of elements allocated on input,
560 * the number of elements actually filled in on output
563 vaQueryVideoProcPipelineCaps(
566 VAProcPipelineCap *pipeline_caps,
567 unsigned int *num_pipeline_caps
571 * \brief Queries video filter capabilities.
573 * This function returns the list of capabilities supported by the driver
574 * for a specific video filter. The \c filter_caps array is allocated by
575 * the user and \c num_filter_caps shall be initialized to the number
576 * of allocated elements in that array. Upon successful return, the
577 * actual number of filters will be overwritten into \c num_filter_caps.
578 * Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and
579 * \c num_filter_caps is adjusted to the number of elements that would be
580 * returned if enough space was available.
582 * @param[in] dpy the VA display
583 * @param[in] context the video processing context
584 * @param[in] type the video filter type
585 * @param[out] filter_caps the output array of #VAProcFilterCap elements
586 * @param[in,out] num_filter_caps the number of elements allocated on input,
587 * the number of elements actually filled in output
590 vaQueryVideoProcFilterCaps(
593 VAProcFilterType type,
595 unsigned int *num_filter_caps
604 #endif /* VA_VPP_H */