2 * Copyright © 2011 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Li Xiaowei <xiaowei.a.li@intel.com>
33 #include "intel_batchbuffer.h"
34 #include "intel_driver.h"
35 #include "i965_defines.h"
36 #include "i965_structs.h"
38 #include "i965_drv_video.h"
39 #include "i965_post_processing.h"
40 #include "gen75_picture_process.h"
41 #include "gen8_post_processing.h"
42 #include "intel_gen_vppapi.h"
44 extern struct hw_context *
45 i965_proc_context_init(VADriverContextP ctx,
46 struct object_config *obj_config);
49 gen75_vpp_fmt_cvt(VADriverContextP ctx,
51 union codec_state *codec_state,
52 struct hw_context *hw_context)
54 VAStatus va_status = VA_STATUS_SUCCESS;
55 struct intel_video_process_context *proc_ctx =
56 (struct intel_video_process_context *)hw_context;
58 va_status = i965_proc_picture(ctx, profile, codec_state,
59 proc_ctx->vpp_fmt_cvt_ctx);
65 gen75_vpp_vebox(VADriverContextP ctx,
66 struct intel_video_process_context* proc_ctx)
68 VAStatus va_status = VA_STATUS_ERROR_UNIMPLEMENTED;
69 VAProcPipelineParameterBuffer* pipeline_param = proc_ctx->pipeline_param;
70 struct i965_driver_data *i965 = i965_driver_data(ctx);
72 /* vpp features based on VEBox fixed function */
73 if (proc_ctx->vpp_vebox_ctx == NULL) {
74 proc_ctx->vpp_vebox_ctx = gen75_vebox_context_init(ctx);
77 proc_ctx->vpp_vebox_ctx->pipeline_param = pipeline_param;
78 proc_ctx->vpp_vebox_ctx->surface_input_object = proc_ctx->surface_pipeline_input_object;
79 proc_ctx->vpp_vebox_ctx->surface_output_object = proc_ctx->surface_render_output_object;
81 if (IS_HASWELL(i965->intel.device_info))
82 va_status = gen75_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
83 else if (IS_GEN8(i965->intel.device_info))
84 va_status = gen8_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
85 else if (IS_GEN9(i965->intel.device_info))
86 va_status = gen9_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
87 else if (IS_GEN10(i965->intel.device_info))
88 va_status = gen10_vebox_process_picture(ctx, proc_ctx->vpp_vebox_ctx);
94 rgb_to_yuv(unsigned int argb,
100 int r = ((argb >> 16) & 0xff);
101 int g = ((argb >> 8) & 0xff);
102 int b = ((argb >> 0) & 0xff);
104 *y = (257 * r + 504 * g + 98 * b) / 1000 + 16;
105 *v = (439 * r - 368 * g - 71 * b) / 1000 + 128;
106 *u = (-148 * r - 291 * g + 439 * b) / 1000 + 128;
107 *a = ((argb >> 24) & 0xff);
111 gen8plus_vpp_clear_surface(VADriverContextP ctx,
112 struct i965_post_processing_context *pp_context,
113 struct object_surface *obj_surface,
116 struct intel_batchbuffer *batch = pp_context->batch;
117 unsigned int blt_cmd, br13;
118 unsigned int tiling = 0, swizzle = 0;
120 unsigned char y, u, v, a = 0;
121 int region_width, region_height;
123 /* Currently only support NV12 surface */
124 if (!obj_surface || obj_surface->fourcc != VA_FOURCC_NV12)
127 rgb_to_yuv(color, &y, &u, &v, &a);
132 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
133 blt_cmd = GEN8_XY_COLOR_BLT_CMD;
134 pitch = obj_surface->width;
136 if (tiling != I915_TILING_NONE) {
137 assert(tiling == I915_TILING_Y);
138 // blt_cmd |= XY_COLOR_BLT_DST_TILED;
146 intel_batchbuffer_start_atomic_blt(batch, 56);
147 BEGIN_BLT_BATCH(batch, 14);
149 region_width = obj_surface->width;
150 region_height = obj_surface->height;
152 OUT_BATCH(batch, blt_cmd);
153 OUT_BATCH(batch, br13);
158 region_height << 16 |
160 OUT_RELOC64(batch, obj_surface->bo,
161 I915_GEM_DOMAIN_RENDER, I915_GEM_DOMAIN_RENDER,
169 region_width = obj_surface->width / 2;
170 region_height = obj_surface->height / 2;
172 if (tiling == I915_TILING_Y) {
173 region_height = ALIGN(obj_surface->height / 2, 32);
176 OUT_BATCH(batch, blt_cmd);
177 OUT_BATCH(batch, br13);
182 region_height << 16 |
184 OUT_RELOC64(batch, obj_surface->bo,
185 I915_GEM_DOMAIN_RENDER, I915_GEM_DOMAIN_RENDER,
186 obj_surface->width * obj_surface->y_cb_offset);
187 OUT_BATCH(batch, v << 8 | u);
189 ADVANCE_BATCH(batch);
190 intel_batchbuffer_end_atomic(batch);
194 gen75_proc_picture(VADriverContextP ctx,
196 union codec_state *codec_state,
197 struct hw_context *hw_context)
199 struct i965_driver_data *i965 = i965_driver_data(ctx);
200 struct proc_state* proc_st = &(codec_state->proc);
201 struct intel_video_process_context *proc_ctx =
202 (struct intel_video_process_context *)hw_context;
203 VAProcPipelineParameterBuffer *pipeline_param =
204 (VAProcPipelineParameterBuffer *)proc_st->pipeline_param->buffer;
205 struct object_surface *obj_dst_surf = NULL;
206 struct object_surface *obj_src_surf = NULL;
208 VAProcPipelineParameterBuffer pipeline_param2;
209 struct object_surface *stage1_dst_surf = NULL;
210 struct object_surface *stage2_dst_surf = NULL;
211 VARectangle src_rect, dst_rect;
212 VASurfaceID tmp_surfaces[2];
213 VASurfaceID out_surface_id1 = VA_INVALID_ID, out_surface_id2 = VA_INVALID_ID;
214 int num_tmp_surfaces = 0;
218 proc_ctx->pipeline_param = pipeline_param;
220 if (proc_st->current_render_target == VA_INVALID_SURFACE ||
221 pipeline_param->surface == VA_INVALID_SURFACE) {
222 status = VA_STATUS_ERROR_INVALID_SURFACE;
226 obj_dst_surf = SURFACE(proc_st->current_render_target);
229 status = VA_STATUS_ERROR_INVALID_SURFACE;
233 obj_src_surf = SURFACE(proc_ctx->pipeline_param->surface);
236 status = VA_STATUS_ERROR_INVALID_SURFACE;
240 if (!obj_src_surf->bo) {
241 status = VA_STATUS_ERROR_INVALID_VALUE; /* The input surface is created without valid content */
245 if (pipeline_param->num_filters && !pipeline_param->filters) {
246 status = VA_STATUS_ERROR_INVALID_PARAMETER;
250 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
251 /* explicitly initialize the VPP based on Render ring */
252 if (proc_ctx->vpp_fmt_cvt_ctx == NULL)
253 proc_ctx->vpp_fmt_cvt_ctx = i965_proc_context_init(ctx, NULL);
256 if (!obj_dst_surf->bo) {
257 unsigned int is_tiled = 1;
258 unsigned int fourcc = VA_FOURCC_NV12;
259 int sampling = SUBSAMPLE_YUV420;
261 if (obj_dst_surf->expected_format == VA_RT_FORMAT_YUV420_10BPP)
262 fourcc = VA_FOURCC_P010;
264 i965_check_alloc_surface_bo(ctx, obj_dst_surf, is_tiled, fourcc, sampling);
267 if (pipeline_param->surface_region) {
268 src_rect.x = pipeline_param->surface_region->x;
269 src_rect.y = pipeline_param->surface_region->y;
270 src_rect.width = pipeline_param->surface_region->width;
271 src_rect.height = pipeline_param->surface_region->height;
275 src_rect.width = obj_src_surf->orig_width;
276 src_rect.height = obj_src_surf->orig_height;
279 if (pipeline_param->output_region) {
280 dst_rect.x = pipeline_param->output_region->x;
281 dst_rect.y = pipeline_param->output_region->y;
282 dst_rect.width = pipeline_param->output_region->width;
283 dst_rect.height = pipeline_param->output_region->height;
287 dst_rect.width = obj_dst_surf->orig_width;
288 dst_rect.height = obj_dst_surf->orig_height;
291 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
292 VAStatus status = VA_STATUS_ERROR_UNIMPLEMENTED;
293 struct i965_proc_context *gpe_proc_ctx;
294 struct i965_surface src_surface, dst_surface;
296 gpe_proc_ctx = (struct i965_proc_context *)proc_ctx->vpp_fmt_cvt_ctx;
297 assert(gpe_proc_ctx != NULL); // gpe_proc_ctx must be a non-NULL pointer
299 if ((gpe_proc_ctx->pp_context.scaling_gpe_context_initialized & VPPGPE_8BIT_8BIT) &&
300 (obj_dst_surf->fourcc == VA_FOURCC_NV12) &&
301 pipeline_param->output_background_color)
302 gen8plus_vpp_clear_surface(ctx,
303 &gpe_proc_ctx->pp_context,
305 pipeline_param->output_background_color);
307 src_surface.base = (struct object_base *)obj_src_surf;
308 src_surface.type = I965_SURFACE_TYPE_SURFACE;
309 dst_surface.base = (struct object_base *)obj_dst_surf;
310 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
312 status = intel_common_scaling_post_processing(ctx,
313 &gpe_proc_ctx->pp_context,
314 &src_surface, &src_rect,
315 &dst_surface, &dst_rect);
317 if (status != VA_STATUS_ERROR_UNIMPLEMENTED)
321 proc_ctx->surface_render_output_object = obj_dst_surf;
322 proc_ctx->surface_pipeline_input_object = obj_src_surf;
323 assert(pipeline_param->num_filters <= 4);
325 int vpp_stage1 = 0, vpp_stage2 = 1, vpp_stage3 = 0;
328 if (obj_src_surf->fourcc == VA_FOURCC_P010) {
332 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
333 if (src_rect.x != dst_rect.x ||
334 src_rect.y != dst_rect.y ||
335 src_rect.width != dst_rect.width ||
336 src_rect.height != dst_rect.height)
339 if (obj_dst_surf->fourcc != VA_FOURCC_NV12 &&
340 obj_dst_surf->fourcc != VA_FOURCC_P010)
345 if (vpp_stage2 == 1) {
346 if (obj_dst_surf->fourcc == VA_FOURCC_P010)
349 } else if (obj_dst_surf->fourcc == VA_FOURCC_P010) {
353 if ((obj_src_surf->fourcc == VA_FOURCC_NV12) &&
354 (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL)) {
355 if ((src_rect.x == dst_rect.x) &&
356 (src_rect.y == dst_rect.y) &&
357 (src_rect.width == dst_rect.width) &&
358 (src_rect.height == dst_rect.height))
363 if (vpp_stage1 == 1) {
364 memset((void *)&pipeline_param2, 0, sizeof(pipeline_param2));
365 pipeline_param2.surface = pipeline_param->surface;
366 pipeline_param2.surface_region = &src_rect;
367 pipeline_param2.output_region = &src_rect;
368 pipeline_param2.filter_flags = 0;
369 pipeline_param2.num_filters = 0;
371 proc_ctx->pipeline_param = &pipeline_param2;
373 if (vpp_stage2 == 1) {
374 status = i965_CreateSurfaces(ctx,
375 obj_src_surf->orig_width,
376 obj_src_surf->orig_height,
380 assert(status == VA_STATUS_SUCCESS);
381 tmp_surfaces[num_tmp_surfaces++] = out_surface_id1;
382 stage1_dst_surf = SURFACE(out_surface_id1);
383 assert(stage1_dst_surf);
384 i965_check_alloc_surface_bo(ctx, stage1_dst_surf, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
386 proc_ctx->surface_render_output_object = stage1_dst_surf;
389 gen75_vpp_vebox(ctx, proc_ctx);
392 if ((vpp_stage3 == 1) && (vpp_stage2 == 1)) {
393 status = i965_CreateSurfaces(ctx,
394 obj_dst_surf->orig_width,
395 obj_dst_surf->orig_height,
399 assert(status == VA_STATUS_SUCCESS);
400 tmp_surfaces[num_tmp_surfaces++] = out_surface_id2;
401 stage2_dst_surf = SURFACE(out_surface_id2);
402 assert(stage2_dst_surf);
403 i965_check_alloc_surface_bo(ctx, stage2_dst_surf, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
406 VABufferID *filter_id = (VABufferID*) pipeline_param->filters;
408 if (vpp_stage2 == 1) {
409 if (stage1_dst_surf != NULL) {
410 proc_ctx->surface_pipeline_input_object = stage1_dst_surf;
411 proc_ctx->surface_render_output_object = obj_dst_surf;
413 pipeline_param->surface = out_surface_id1;
416 if (stage2_dst_surf != NULL) {
417 proc_ctx->surface_render_output_object = stage2_dst_surf;
419 proc_st->current_render_target = out_surface_id2;
422 proc_ctx->pipeline_param = pipeline_param;
424 if (pipeline_param->num_filters == 0 || pipeline_param->filters == NULL) {
425 /* implicity surface format coversion and scaling */
427 status = gen75_vpp_fmt_cvt(ctx, profile, codec_state, hw_context);
428 if (status != VA_STATUS_SUCCESS)
430 } else if (pipeline_param->num_filters == 1) {
431 struct object_buffer * obj_buf = BUFFER((*filter_id) + 0);
433 assert(obj_buf && obj_buf->buffer_store && obj_buf->buffer_store->buffer);
436 !obj_buf->buffer_store ||
437 !obj_buf->buffer_store->buffer) {
438 status = VA_STATUS_ERROR_INVALID_FILTER_CHAIN;
442 VAProcFilterParameterBuffer* filter =
443 (VAProcFilterParameterBuffer*)obj_buf-> buffer_store->buffer;
445 if (filter->type == VAProcFilterNoiseReduction ||
446 filter->type == VAProcFilterDeinterlacing ||
447 filter->type == VAProcFilterSkinToneEnhancement ||
448 filter->type == VAProcFilterSharpening ||
449 filter->type == VAProcFilterColorBalance) {
450 gen75_vpp_vebox(ctx, proc_ctx);
452 } else if (pipeline_param->num_filters >= 2) {
454 for (i = 0; i < pipeline_param->num_filters; i++) {
455 struct object_buffer * obj_buf = BUFFER(pipeline_param->filters[i]);
458 !obj_buf->buffer_store ||
459 !obj_buf->buffer_store->buffer) {
460 status = VA_STATUS_ERROR_INVALID_FILTER_CHAIN;
464 VAProcFilterParameterBuffer* filter =
465 (VAProcFilterParameterBuffer*)obj_buf-> buffer_store->buffer;
467 if (filter->type != VAProcFilterNoiseReduction &&
468 filter->type != VAProcFilterDeinterlacing &&
469 filter->type != VAProcFilterSkinToneEnhancement &&
470 filter->type != VAProcFilterColorBalance) {
471 fprintf(stderr, "Do not support multiply filters outside vebox pipeline \n");
475 gen75_vpp_vebox(ctx, proc_ctx);
479 if (vpp_stage3 == 1) {
480 if (vpp_stage2 == 1) {
481 memset(&pipeline_param2, 0, sizeof(pipeline_param2));
482 pipeline_param2.surface = out_surface_id2;
483 pipeline_param2.surface_region = &dst_rect;
484 pipeline_param2.output_region = &dst_rect;
485 pipeline_param2.filter_flags = 0;
486 pipeline_param2.num_filters = 0;
488 proc_ctx->pipeline_param = &pipeline_param2;
489 proc_ctx->surface_pipeline_input_object = proc_ctx->surface_render_output_object;
490 proc_ctx->surface_render_output_object = obj_dst_surf;
493 gen75_vpp_vebox(ctx, proc_ctx);
496 if (num_tmp_surfaces)
497 i965_DestroySurfaces(ctx,
501 return VA_STATUS_SUCCESS;
504 if (num_tmp_surfaces)
505 i965_DestroySurfaces(ctx,
513 gen75_proc_context_destroy(void *hw_context)
515 struct intel_video_process_context *proc_ctx =
516 (struct intel_video_process_context *)hw_context;
517 VADriverContextP ctx = (VADriverContextP)(proc_ctx->driver_context);
519 if (proc_ctx->vpp_fmt_cvt_ctx) {
520 proc_ctx->vpp_fmt_cvt_ctx->destroy(proc_ctx->vpp_fmt_cvt_ctx);
521 proc_ctx->vpp_fmt_cvt_ctx = NULL;
524 if (proc_ctx->vpp_vebox_ctx) {
525 gen75_vebox_context_destroy(ctx, proc_ctx->vpp_vebox_ctx);
526 proc_ctx->vpp_vebox_ctx = NULL;
533 gen75_proc_context_init(VADriverContextP ctx,
534 struct object_config *obj_config)
536 struct intel_video_process_context *proc_context
537 = calloc(1, sizeof(struct intel_video_process_context));
539 assert(proc_context);
540 proc_context->base.destroy = gen75_proc_context_destroy;
541 proc_context->base.run = gen75_proc_picture;
543 proc_context->vpp_vebox_ctx = NULL;
544 proc_context->vpp_fmt_cvt_ctx = NULL;
546 proc_context->driver_context = ctx;
548 return (struct hw_context *)proc_context;