2 * Copyright © 2011 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Li Xiaowei <xiaowei.a.li@intel.com>
33 #include "intel_batchbuffer.h"
34 #include "intel_driver.h"
35 #include "i965_defines.h"
36 #include "i965_structs.h"
38 #include "i965_drv_video.h"
39 #include "i965_post_processing.h"
40 #include "gen75_picture_process.h"
41 #include "gen8_post_processing.h"
42 #include "intel_gen_vppapi.h"
44 extern struct hw_context *
45 i965_proc_context_init(VADriverContextP ctx,
46 struct object_config *obj_config);
49 gen75_vpp_fmt_cvt(VADriverContextP ctx,
51 union codec_state *codec_state,
52 struct hw_context *hw_context)
54 VAStatus va_status = VA_STATUS_SUCCESS;
55 struct intel_video_process_context *proc_ctx =
56 (struct intel_video_process_context *)hw_context;
58 va_status = i965_proc_picture(ctx, profile, codec_state,
59 proc_ctx->vpp_fmt_cvt_ctx);
65 gen75_vpp_vebox(VADriverContextP ctx,
66 struct intel_video_process_context* proc_ctx)
68 VAStatus va_status = VA_STATUS_SUCCESS;
69 VAProcPipelineParameterBuffer* pipeline_param = proc_ctx->pipeline_param;
70 struct i965_driver_data *i965 = i965_driver_data(ctx);
72 /* vpp features based on VEBox fixed function */
73 if (proc_ctx->vpp_vebox_ctx == NULL) {
74 proc_ctx->vpp_vebox_ctx = gen75_vebox_context_init(ctx);
77 proc_ctx->vpp_vebox_ctx->pipeline_param = pipeline_param;
78 proc_ctx->vpp_vebox_ctx->surface_input_object = proc_ctx->surface_pipeline_input_object;
79 proc_ctx->vpp_vebox_ctx->surface_output_object = proc_ctx->surface_render_output_object;
81 if (IS_HASWELL(i965->intel.device_info))
82 va_status = gen75_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
83 else if (IS_GEN8(i965->intel.device_info))
84 va_status = gen8_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
85 else if (IS_GEN9(i965->intel.device_info))
86 va_status = gen9_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
92 rgb_to_yuv(unsigned int argb,
98 int r = ((argb >> 16) & 0xff);
99 int g = ((argb >> 8) & 0xff);
100 int b = ((argb >> 0) & 0xff);
102 *y = (257 * r + 504 * g + 98 * b) / 1000 + 16;
103 *v = (439 * r - 368 * g - 71 * b) / 1000 + 128;
104 *u = (-148 * r - 291 * g + 439 * b) / 1000 + 128;
105 *a = ((argb >> 24) & 0xff);
109 gen8plus_vpp_clear_surface(VADriverContextP ctx,
110 struct i965_post_processing_context *pp_context,
111 struct object_surface *obj_surface,
114 struct intel_batchbuffer *batch = pp_context->batch;
115 unsigned int blt_cmd, br13;
116 unsigned int tiling = 0, swizzle = 0;
118 unsigned char y, u, v, a = 0;
119 int region_width, region_height;
121 /* Currently only support NV12 surface */
122 if (!obj_surface || obj_surface->fourcc != VA_FOURCC_NV12)
125 rgb_to_yuv(color, &y, &u, &v, &a);
130 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
131 blt_cmd = GEN8_XY_COLOR_BLT_CMD;
132 pitch = obj_surface->width;
134 if (tiling != I915_TILING_NONE) {
135 assert(tiling == I915_TILING_Y);
136 // blt_cmd |= XY_COLOR_BLT_DST_TILED;
144 intel_batchbuffer_start_atomic_blt(batch, 56);
145 BEGIN_BLT_BATCH(batch, 14);
147 region_width = obj_surface->width;
148 region_height = obj_surface->height;
150 OUT_BATCH(batch, blt_cmd);
151 OUT_BATCH(batch, br13);
156 region_height << 16 |
158 OUT_RELOC64(batch, obj_surface->bo,
159 I915_GEM_DOMAIN_RENDER, I915_GEM_DOMAIN_RENDER,
167 region_width = obj_surface->width / 2;
168 region_height = obj_surface->height / 2;
170 if (tiling == I915_TILING_Y) {
171 region_height = ALIGN(obj_surface->height / 2, 32);
174 OUT_BATCH(batch, blt_cmd);
175 OUT_BATCH(batch, br13);
180 region_height << 16 |
182 OUT_RELOC64(batch, obj_surface->bo,
183 I915_GEM_DOMAIN_RENDER, I915_GEM_DOMAIN_RENDER,
184 obj_surface->width * obj_surface->y_cb_offset);
185 OUT_BATCH(batch, v << 8 | u);
187 ADVANCE_BATCH(batch);
188 intel_batchbuffer_end_atomic(batch);
192 gen75_proc_picture(VADriverContextP ctx,
194 union codec_state *codec_state,
195 struct hw_context *hw_context)
197 struct i965_driver_data *i965 = i965_driver_data(ctx);
198 struct proc_state* proc_st = &(codec_state->proc);
199 struct intel_video_process_context *proc_ctx =
200 (struct intel_video_process_context *)hw_context;
201 VAProcPipelineParameterBuffer *pipeline_param =
202 (VAProcPipelineParameterBuffer *)proc_st->pipeline_param->buffer;
203 struct object_surface *obj_dst_surf = NULL;
204 struct object_surface *obj_src_surf = NULL;
206 VAProcPipelineParameterBuffer pipeline_param2;
207 struct object_surface *stage1_dst_surf = NULL;
208 struct object_surface *stage2_dst_surf = NULL;
209 VARectangle src_rect, dst_rect;
210 VASurfaceID tmp_surfaces[2];
211 VASurfaceID out_surface_id1 = VA_INVALID_ID, out_surface_id2 = VA_INVALID_ID;
212 int num_tmp_surfaces = 0;
216 proc_ctx->pipeline_param = pipeline_param;
218 if (proc_st->current_render_target == VA_INVALID_SURFACE ||
219 pipeline_param->surface == VA_INVALID_SURFACE) {
220 status = VA_STATUS_ERROR_INVALID_SURFACE;
224 obj_dst_surf = SURFACE(proc_st->current_render_target);
227 status = VA_STATUS_ERROR_INVALID_SURFACE;
231 obj_src_surf = SURFACE(proc_ctx->pipeline_param->surface);
234 status = VA_STATUS_ERROR_INVALID_SURFACE;
238 if (!obj_src_surf->bo) {
239 status = VA_STATUS_ERROR_INVALID_VALUE; /* The input surface is created without valid content */
243 if (pipeline_param->num_filters && !pipeline_param->filters) {
244 status = VA_STATUS_ERROR_INVALID_PARAMETER;
248 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
249 /* explicitly initialize the VPP based on Render ring */
250 if (proc_ctx->vpp_fmt_cvt_ctx == NULL)
251 proc_ctx->vpp_fmt_cvt_ctx = i965_proc_context_init(ctx, NULL);
254 if (!obj_dst_surf->bo) {
255 unsigned int is_tiled = 1;
256 unsigned int fourcc = VA_FOURCC_NV12;
257 int sampling = SUBSAMPLE_YUV420;
259 if (obj_dst_surf->expected_format == VA_RT_FORMAT_YUV420_10BPP)
260 fourcc = VA_FOURCC_P010;
262 i965_check_alloc_surface_bo(ctx, obj_dst_surf, is_tiled, fourcc, sampling);
265 if (pipeline_param->surface_region) {
266 src_rect.x = pipeline_param->surface_region->x;
267 src_rect.y = pipeline_param->surface_region->y;
268 src_rect.width = pipeline_param->surface_region->width;
269 src_rect.height = pipeline_param->surface_region->height;
273 src_rect.width = obj_src_surf->orig_width;
274 src_rect.height = obj_src_surf->orig_height;
277 if (pipeline_param->output_region) {
278 dst_rect.x = pipeline_param->output_region->x;
279 dst_rect.y = pipeline_param->output_region->y;
280 dst_rect.width = pipeline_param->output_region->width;
281 dst_rect.height = pipeline_param->output_region->height;
285 dst_rect.width = obj_dst_surf->orig_width;
286 dst_rect.height = obj_dst_surf->orig_height;
289 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
290 VAStatus status = VA_STATUS_ERROR_UNIMPLEMENTED;
291 struct i965_proc_context *gpe_proc_ctx;
292 struct i965_surface src_surface, dst_surface;
294 gpe_proc_ctx = (struct i965_proc_context *)proc_ctx->vpp_fmt_cvt_ctx;
295 assert(gpe_proc_ctx != NULL); // gpe_proc_ctx must be a non-NULL pointer
297 if ((gpe_proc_ctx->pp_context.scaling_gpe_context_initialized & VPPGPE_8BIT_8BIT) &&
298 (obj_dst_surf->fourcc == VA_FOURCC_NV12) &&
299 pipeline_param->output_background_color)
300 gen8plus_vpp_clear_surface(ctx,
301 &gpe_proc_ctx->pp_context,
303 pipeline_param->output_background_color);
305 src_surface.base = (struct object_base *)obj_src_surf;
306 src_surface.type = I965_SURFACE_TYPE_SURFACE;
307 dst_surface.base = (struct object_base *)obj_dst_surf;
308 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
310 status = intel_common_scaling_post_processing(ctx,
311 &gpe_proc_ctx->pp_context,
312 &src_surface, &src_rect,
313 &dst_surface, &dst_rect);
315 if (status != VA_STATUS_ERROR_UNIMPLEMENTED)
319 proc_ctx->surface_render_output_object = obj_dst_surf;
320 proc_ctx->surface_pipeline_input_object = obj_src_surf;
321 assert(pipeline_param->num_filters <= 4);
323 int vpp_stage1 = 0, vpp_stage2 = 1, vpp_stage3 = 0;
326 if (obj_src_surf->fourcc == VA_FOURCC_P010) {
330 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
331 if (src_rect.x != dst_rect.x ||
332 src_rect.y != dst_rect.y ||
333 src_rect.width != dst_rect.width ||
334 src_rect.height != dst_rect.height)
337 if (obj_dst_surf->fourcc != VA_FOURCC_NV12 &&
338 obj_dst_surf->fourcc != VA_FOURCC_P010)
343 if (vpp_stage2 == 1) {
344 if (obj_dst_surf->fourcc == VA_FOURCC_P010)
347 } else if (obj_dst_surf->fourcc == VA_FOURCC_P010) {
351 if ((obj_src_surf->fourcc == VA_FOURCC_NV12) &&
352 (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL)) {
353 if ((src_rect.x == dst_rect.x) &&
354 (src_rect.y == dst_rect.y) &&
355 (src_rect.width == dst_rect.width) &&
356 (src_rect.height == dst_rect.height))
361 if (vpp_stage1 == 1) {
362 memset((void *)&pipeline_param2, 0, sizeof(pipeline_param2));
363 pipeline_param2.surface = pipeline_param->surface;
364 pipeline_param2.surface_region = &src_rect;
365 pipeline_param2.output_region = &src_rect;
366 pipeline_param2.filter_flags = 0;
367 pipeline_param2.num_filters = 0;
369 proc_ctx->pipeline_param = &pipeline_param2;
371 if (vpp_stage2 == 1) {
372 status = i965_CreateSurfaces(ctx,
373 obj_src_surf->orig_width,
374 obj_src_surf->orig_height,
378 assert(status == VA_STATUS_SUCCESS);
379 tmp_surfaces[num_tmp_surfaces++] = out_surface_id1;
380 stage1_dst_surf = SURFACE(out_surface_id1);
381 assert(stage1_dst_surf);
382 i965_check_alloc_surface_bo(ctx, stage1_dst_surf, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
384 proc_ctx->surface_render_output_object = stage1_dst_surf;
387 gen75_vpp_vebox(ctx, proc_ctx);
390 if ((vpp_stage3 == 1) && (vpp_stage2 == 1)) {
391 status = i965_CreateSurfaces(ctx,
392 obj_dst_surf->orig_width,
393 obj_dst_surf->orig_height,
397 assert(status == VA_STATUS_SUCCESS);
398 tmp_surfaces[num_tmp_surfaces++] = out_surface_id2;
399 stage2_dst_surf = SURFACE(out_surface_id2);
400 assert(stage2_dst_surf);
401 i965_check_alloc_surface_bo(ctx, stage2_dst_surf, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
404 VABufferID *filter_id = (VABufferID*) pipeline_param->filters;
406 if (vpp_stage2 == 1) {
407 if (stage1_dst_surf != NULL) {
408 proc_ctx->surface_pipeline_input_object = stage1_dst_surf;
409 proc_ctx->surface_render_output_object = obj_dst_surf;
411 pipeline_param->surface = out_surface_id1;
414 if (stage2_dst_surf != NULL) {
415 proc_ctx->surface_render_output_object = stage2_dst_surf;
417 proc_st->current_render_target = out_surface_id2;
420 proc_ctx->pipeline_param = pipeline_param;
422 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
423 /* implicity surface format coversion and scaling */
425 status = gen75_vpp_fmt_cvt(ctx, profile, codec_state, hw_context);
426 if (status != VA_STATUS_SUCCESS)
428 } else if (pipeline_param->num_filters == 1) {
429 struct object_buffer * obj_buf = BUFFER((*filter_id) + 0);
431 assert(obj_buf && obj_buf->buffer_store && obj_buf->buffer_store->buffer);
434 !obj_buf->buffer_store ||
435 !obj_buf->buffer_store->buffer) {
436 status = VA_STATUS_ERROR_INVALID_FILTER_CHAIN;
440 VAProcFilterParameterBuffer* filter =
441 (VAProcFilterParameterBuffer*)obj_buf-> buffer_store->buffer;
443 if (filter->type == VAProcFilterNoiseReduction ||
444 filter->type == VAProcFilterDeinterlacing ||
445 filter->type == VAProcFilterSkinToneEnhancement ||
446 filter->type == VAProcFilterSharpening ||
447 filter->type == VAProcFilterColorBalance) {
448 gen75_vpp_vebox(ctx, proc_ctx);
450 } else if (pipeline_param->num_filters >= 2) {
452 for (i = 0; i < pipeline_param->num_filters; i++) {
453 struct object_buffer * obj_buf = BUFFER(pipeline_param->filters[i]);
456 !obj_buf->buffer_store ||
457 !obj_buf->buffer_store->buffer) {
458 status = VA_STATUS_ERROR_INVALID_FILTER_CHAIN;
462 VAProcFilterParameterBuffer* filter =
463 (VAProcFilterParameterBuffer*)obj_buf-> buffer_store->buffer;
465 if (filter->type != VAProcFilterNoiseReduction &&
466 filter->type != VAProcFilterDeinterlacing &&
467 filter->type != VAProcFilterSkinToneEnhancement &&
468 filter->type != VAProcFilterColorBalance) {
469 fprintf(stderr, "Do not support multiply filters outside vebox pipeline \n");
473 gen75_vpp_vebox(ctx, proc_ctx);
477 if (vpp_stage3 == 1) {
478 if (vpp_stage2 == 1) {
479 memset(&pipeline_param2, 0, sizeof(pipeline_param2));
480 pipeline_param2.surface = out_surface_id2;
481 pipeline_param2.surface_region = &dst_rect;
482 pipeline_param2.output_region = &dst_rect;
483 pipeline_param2.filter_flags = 0;
484 pipeline_param2.num_filters = 0;
486 proc_ctx->pipeline_param = &pipeline_param2;
487 proc_ctx->surface_pipeline_input_object = proc_ctx->surface_render_output_object;
488 proc_ctx->surface_render_output_object = obj_dst_surf;
491 gen75_vpp_vebox(ctx, proc_ctx);
494 if (num_tmp_surfaces)
495 i965_DestroySurfaces(ctx,
499 return VA_STATUS_SUCCESS;
502 if (num_tmp_surfaces)
503 i965_DestroySurfaces(ctx,
511 gen75_proc_context_destroy(void *hw_context)
513 struct intel_video_process_context *proc_ctx =
514 (struct intel_video_process_context *)hw_context;
515 VADriverContextP ctx = (VADriverContextP)(proc_ctx->driver_context);
517 if (proc_ctx->vpp_fmt_cvt_ctx) {
518 proc_ctx->vpp_fmt_cvt_ctx->destroy(proc_ctx->vpp_fmt_cvt_ctx);
519 proc_ctx->vpp_fmt_cvt_ctx = NULL;
522 if (proc_ctx->vpp_vebox_ctx) {
523 gen75_vebox_context_destroy(ctx, proc_ctx->vpp_vebox_ctx);
524 proc_ctx->vpp_vebox_ctx = NULL;
531 gen75_proc_context_init(VADriverContextP ctx,
532 struct object_config *obj_config)
534 struct intel_video_process_context *proc_context
535 = calloc(1, sizeof(struct intel_video_process_context));
537 assert(proc_context);
538 proc_context->base.destroy = gen75_proc_context_destroy;
539 proc_context->base.run = gen75_proc_picture;
541 proc_context->vpp_vebox_ctx = NULL;
542 proc_context->vpp_fmt_cvt_ctx = NULL;
544 proc_context->driver_context = ctx;
546 return (struct hw_context *)proc_context;