2 * Copyright © 2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Xiang Haihao <haihao.xiang@intel.com>
27 #ifndef _I965_GPE_UTILS_H_
28 #define _I965_GPE_UTILS_H_
31 #include <intel_bufmgr.h>
33 #include "i965_defines.h"
34 #include "i965_structs.h"
36 #define MAX_GPE_KERNELS 32
38 struct i965_buffer_surface
41 unsigned int num_blocks;
42 unsigned int size_block;
47 I965_GPE_RESOURCE_BUFFER = 0,
51 struct i965_gpe_resource
66 struct gpe_dynamic_state_parameter
70 unsigned int curbe_offset;
71 unsigned int idrt_offset;
72 unsigned int sampler_offset;
75 #define PIPE_CONTROL_FLUSH_NONE 0
76 #define PIPE_CONTROL_FLUSH_WRITE_CACHE 1
77 #define PIPE_CONTROL_FLUSH_READ_CACHE 2
79 struct gpe_pipe_control_parameter
83 unsigned int flush_mode;
84 unsigned int disable_cs_stall;
89 struct i965_gpe_context
93 unsigned int length; /* in bytes */
94 unsigned int max_entries;
95 unsigned int binding_table_offset;
96 unsigned int surface_state_offset;
97 } surface_state_binding_table;
101 unsigned int max_entries;
102 unsigned int entry_size; /* in bytes */
108 unsigned int length; /* in bytes */
114 unsigned int max_entries;
115 unsigned int entry_size; /* in bytes */
120 unsigned int gpgpu_mode : 1;
121 unsigned int pad0 : 7;
122 unsigned int max_num_threads : 16;
123 unsigned int num_urb_entries : 8;
124 unsigned int urb_entry_size : 16;
125 unsigned int curbe_allocation_size : 16;
128 /* vfe_desc5/6/7 is used to determine whether the HW scoreboard is used.
129 * If scoreboard is not used, don't touch them
137 unsigned int enable:1;
169 unsigned int num_kernels;
170 struct i965_kernel kernels[MAX_GPE_KERNELS];
175 unsigned int end_offset;
185 unsigned int end_offset;
189 struct gpe_mi_flush_dw_parameter
193 unsigned int video_pipeline_cache_invalidate;
198 struct gpe_mi_store_data_imm_parameter
201 unsigned int is_qword;
207 struct gpe_mi_store_register_mem_parameter
211 unsigned int mmio_offset;
214 struct gpe_mi_load_register_mem_parameter
218 unsigned int mmio_offset;
221 struct gpe_mi_load_register_imm_parameter
224 unsigned int mmio_offset;
227 struct gpe_mi_load_register_reg_parameter
229 unsigned int src_mmio_offset;
230 unsigned int dst_mmio_offset;
233 struct gpe_mi_math_parameter
235 unsigned int num_instructions;
236 unsigned int *instruction_list;
239 struct gpe_mi_conditional_batch_buffer_end_parameter
243 unsigned int compare_mask_mode_disabled;
244 unsigned int compare_data;
247 struct gpe_mi_batch_buffer_start_parameter
251 unsigned int is_second_level;
252 unsigned int use_global_gtt;
255 void i965_gpe_context_destroy(struct i965_gpe_context *gpe_context);
256 void i965_gpe_context_init(VADriverContextP ctx,
257 struct i965_gpe_context *gpe_context);
258 void i965_gpe_load_kernels(VADriverContextP ctx,
259 struct i965_gpe_context *gpe_context,
260 struct i965_kernel *kernel_list,
261 unsigned int num_kernels);
262 void gen6_gpe_pipeline_setup(VADriverContextP ctx,
263 struct i965_gpe_context *gpe_context,
264 struct intel_batchbuffer *batch);
265 void i965_gpe_surface2_setup(VADriverContextP ctx,
266 struct i965_gpe_context *gpe_context,
267 struct object_surface *obj_surface,
268 unsigned long binding_table_offset,
269 unsigned long surface_state_offset);
270 void i965_gpe_media_rw_surface_setup(VADriverContextP ctx,
271 struct i965_gpe_context *gpe_context,
272 struct object_surface *obj_surface,
273 unsigned long binding_table_offset,
274 unsigned long surface_state_offset,
276 void i965_gpe_buffer_suface_setup(VADriverContextP ctx,
277 struct i965_gpe_context *gpe_context,
278 struct i965_buffer_surface *buffer_surface,
279 unsigned long binding_table_offset,
280 unsigned long surface_state_offset);
281 void gen7_gpe_surface2_setup(VADriverContextP ctx,
282 struct i965_gpe_context *gpe_context,
283 struct object_surface *obj_surface,
284 unsigned long binding_table_offset,
285 unsigned long surface_state_offset);
286 void gen7_gpe_media_rw_surface_setup(VADriverContextP ctx,
287 struct i965_gpe_context *gpe_context,
288 struct object_surface *obj_surface,
289 unsigned long binding_table_offset,
290 unsigned long surface_state_offset,
292 void gen7_gpe_buffer_suface_setup(VADriverContextP ctx,
293 struct i965_gpe_context *gpe_context,
294 struct i965_buffer_surface *buffer_surface,
295 unsigned long binding_table_offset,
296 unsigned long surface_state_offset);
297 void gen75_gpe_media_chroma_surface_setup(VADriverContextP ctx,
298 struct i965_gpe_context *gpe_context,
299 struct object_surface *obj_surface,
300 unsigned long binding_table_offset,
301 unsigned long surface_state_offset,
304 extern void gen8_gpe_surface2_setup(VADriverContextP ctx,
305 struct i965_gpe_context *gpe_context,
306 struct object_surface *obj_surface,
307 unsigned long binding_table_offset,
308 unsigned long surface_state_offset);
309 extern void gen8_gpe_media_rw_surface_setup(VADriverContextP ctx,
310 struct i965_gpe_context *gpe_context,
311 struct object_surface *obj_surface,
312 unsigned long binding_table_offset,
313 unsigned long surface_state_offset,
315 extern void gen8_gpe_buffer_suface_setup(VADriverContextP ctx,
316 struct i965_gpe_context *gpe_context,
317 struct i965_buffer_surface *buffer_surface,
318 unsigned long binding_table_offset,
319 unsigned long surface_state_offset);
320 extern void gen8_gpe_media_chroma_surface_setup(VADriverContextP ctx,
321 struct i965_gpe_context *gpe_context,
322 struct object_surface *obj_surface,
323 unsigned long binding_table_offset,
324 unsigned long surface_state_offset,
327 void gen8_gpe_pipeline_setup(VADriverContextP ctx,
328 struct i965_gpe_context *gpe_context,
329 struct intel_batchbuffer *batch);
331 gen8_gpe_context_set_dynamic_buffer(VADriverContextP ctx,
332 struct i965_gpe_context *gpe_context,
333 struct gpe_dynamic_state_parameter *ds);
336 void gen8_gpe_context_destroy(struct i965_gpe_context *gpe_context);
337 void gen8_gpe_context_init(VADriverContextP ctx,
338 struct i965_gpe_context *gpe_context);
340 void gen8_gpe_load_kernels(VADriverContextP ctx,
341 struct i965_gpe_context *gpe_context,
342 struct i965_kernel *kernel_list,
343 unsigned int num_kernels);
345 void gen9_gpe_pipeline_setup(VADriverContextP ctx,
346 struct i965_gpe_context *gpe_context,
347 struct intel_batchbuffer *batch);
349 void gen9_gpe_pipeline_end(VADriverContextP ctx,
350 struct i965_gpe_context *gpe_context,
351 struct intel_batchbuffer *batch);
353 Bool i965_allocate_gpe_resource(dri_bufmgr *bufmgr,
354 struct i965_gpe_resource *res,
358 void i965_object_surface_to_2d_gpe_resource(struct i965_gpe_resource *res,
359 struct object_surface *obj_surface);
361 void i965_object_surface_to_2d_gpe_resource_with_align(struct i965_gpe_resource *res,
362 struct object_surface *obj_surface,
363 unsigned int alignment);
365 void i965_dri_object_to_buffer_gpe_resource(struct i965_gpe_resource *res,
368 void i965_dri_object_to_2d_gpe_resource(struct i965_gpe_resource *res,
374 void i965_zero_gpe_resource(struct i965_gpe_resource *res);
376 void i965_free_gpe_resource(struct i965_gpe_resource *res);
378 void *i965_map_gpe_resource(struct i965_gpe_resource *res);
380 void i965_unmap_gpe_resource(struct i965_gpe_resource *res);
382 void gen8_gpe_mi_flush_dw(VADriverContextP ctx,
383 struct intel_batchbuffer *batch,
384 struct gpe_mi_flush_dw_parameter *params);
386 void gen8_gpe_mi_store_data_imm(VADriverContextP ctx,
387 struct intel_batchbuffer *batch,
388 struct gpe_mi_store_data_imm_parameter *params);
390 void gen8_gpe_mi_store_register_mem(VADriverContextP ctx,
391 struct intel_batchbuffer *batch,
392 struct gpe_mi_store_register_mem_parameter *params);
394 void gen8_gpe_mi_load_register_mem(VADriverContextP ctx,
395 struct intel_batchbuffer *batch,
396 struct gpe_mi_load_register_mem_parameter *params);
398 void gen8_gpe_mi_load_register_imm(VADriverContextP ctx,
399 struct intel_batchbuffer *batch,
400 struct gpe_mi_load_register_imm_parameter *params);
402 void gen8_gpe_mi_load_register_reg(VADriverContextP ctx,
403 struct intel_batchbuffer *batch,
404 struct gpe_mi_load_register_reg_parameter *params);
406 void gen9_gpe_mi_math(VADriverContextP ctx,
407 struct intel_batchbuffer *batch,
408 struct gpe_mi_math_parameter *params);
410 void gen9_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
411 struct intel_batchbuffer *batch,
412 struct gpe_mi_conditional_batch_buffer_end_parameter *params);
414 void gen8_gpe_mi_batch_buffer_start(VADriverContextP ctx,
415 struct intel_batchbuffer *batch,
416 struct gpe_mi_batch_buffer_start_parameter *params);
419 struct gpe_media_object_inline_data
425 unsigned int reserved:16;
431 struct gpe_media_object_parameter
433 unsigned int use_scoreboard;
434 unsigned int scoreboard_x;
435 unsigned int scoreboard_y;
436 unsigned int scoreboard_mask;
437 unsigned int interface_offset;
439 unsigned int inline_size;
442 struct i965_gpe_surface
444 unsigned int is_buffer:1;
445 unsigned int is_2d_surface:1;
446 unsigned int is_adv_surface:1;
447 unsigned int is_uv_surface:1;
448 unsigned int is_media_block_rw:1;
449 unsigned int is_raw_buffer:1;
450 unsigned int is_16bpp :1;
451 /* use the override_offset for 2d_surface */
452 unsigned int is_override_offset : 1;
454 unsigned int vert_line_stride_offset;
455 unsigned int vert_line_stride;
456 unsigned int cacheability_control;
457 unsigned int format; // 2d surface only
458 unsigned int v_direction; // adv surface only
459 unsigned int size; // buffer only
462 struct i965_gpe_resource *gpe_resource;
466 gen9_gpe_reset_binding_table(VADriverContextP ctx,
467 struct i965_gpe_context *gpe_context);
469 void *i965_gpe_context_map_curbe(struct i965_gpe_context *gpe_context);
472 void i965_gpe_context_unmap_curbe(struct i965_gpe_context *gpe_context);
475 void gen8_gpe_setup_interface_data(VADriverContextP ctx,
476 struct i965_gpe_context *gpe_context);
478 gen9_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
479 struct i965_gpe_surface *gpe_surface,
483 i965_gpe_allocate_2d_resource(dri_bufmgr *bufmgr,
484 struct i965_gpe_resource *res,
501 struct gpe_media_object_walker_parameter
504 unsigned int inline_size;
505 unsigned int interface_offset;
506 unsigned int use_scoreboard;
507 unsigned int scoreboard_mask;
508 unsigned int group_id_loop_select;
509 unsigned int color_count_minus1;
510 unsigned int mid_loop_unit_x;
511 unsigned int mid_loop_unit_y;
512 unsigned int middle_loop_extra_steps;
513 unsigned int local_loop_exec_count;
514 unsigned int global_loop_exec_count;
515 struct gpe_walker_xy block_resolution;
516 struct gpe_walker_xy local_start;
517 struct gpe_walker_xy local_end;
518 struct gpe_walker_xy local_outer_loop_stride;
519 struct gpe_walker_xy local_inner_loop_unit;
520 struct gpe_walker_xy global_resolution;
521 struct gpe_walker_xy global_start;
522 struct gpe_walker_xy global_outer_loop_stride;
523 struct gpe_walker_xy global_inner_loop_unit;
528 WALKER_NO_DEGREE = 0,
534 struct gpe_encoder_kernel_walker_parameter
536 unsigned int walker_degree;
537 unsigned int use_scoreboard;
538 unsigned int scoreboard_mask;
539 unsigned int no_dependency;
540 unsigned int resolution_x;
541 unsigned int resolution_y;
542 unsigned int use_vertical_raster_scan;
546 gen8_gpe_media_object(VADriverContextP ctx,
547 struct i965_gpe_context *gpe_context,
548 struct intel_batchbuffer *batch,
549 struct gpe_media_object_parameter *param);
552 gen8_gpe_media_state_flush(VADriverContextP ctx,
553 struct i965_gpe_context *gpe_context,
554 struct intel_batchbuffer *batch);
557 gen8_gpe_media_object_walker(VADriverContextP ctx,
558 struct i965_gpe_context *gpe_context,
559 struct intel_batchbuffer *batch,
560 struct gpe_media_object_walker_parameter *param);
563 struct intel_vpp_kernel_walker_parameter
565 unsigned int use_scoreboard;
566 unsigned int scoreboard_mask;
567 unsigned int no_dependency;
568 unsigned int resolution_x;
569 unsigned int resolution_y;
573 intel_vpp_init_media_object_walker_parameter(struct intel_vpp_kernel_walker_parameter *kernel_walker_param,
574 struct gpe_media_object_walker_parameter *walker_param);
576 gen8_gpe_reset_binding_table(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
579 gen8_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
580 struct i965_gpe_surface *gpe_surface,
584 gen8_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
585 struct intel_batchbuffer *batch,
586 struct gpe_mi_conditional_batch_buffer_end_parameter *param);
589 gen8_gpe_pipe_control(VADriverContextP ctx,
590 struct intel_batchbuffer *batch,
591 struct gpe_pipe_control_parameter *param);
594 i965_init_media_object_walker_parameter(struct gpe_encoder_kernel_walker_parameter *kernel_walker_param,
595 struct gpe_media_object_walker_parameter *walker_param);
598 gen9_add_2d_gpe_surface(VADriverContextP ctx,
599 struct i965_gpe_context *gpe_context,
600 struct object_surface *obj_surface,
602 int is_media_block_rw,
606 gen9_add_adv_gpe_surface(VADriverContextP ctx,
607 struct i965_gpe_context *gpe_context,
608 struct object_surface *obj_surface,
611 gen9_add_buffer_gpe_surface(VADriverContextP ctx,
612 struct i965_gpe_context *gpe_context,
613 struct i965_gpe_resource *gpe_buffer,
619 gen9_add_buffer_2d_gpe_surface(VADriverContextP ctx,
620 struct i965_gpe_context *gpe_context,
621 struct i965_gpe_resource *gpe_buffer,
622 int is_media_block_rw,
626 gen9_add_dri_buffer_gpe_surface(VADriverContextP ctx,
627 struct i965_gpe_context *gpe_context,
634 struct i965_gpe_table
636 void (*context_init)(VADriverContextP ctx,
637 struct i965_gpe_context *gpe_context);
639 void (*context_destroy)(struct i965_gpe_context *gpe_context);
641 void (*context_add_surface)(struct i965_gpe_context *gpe_context,
642 struct i965_gpe_surface *gpe_surface,
645 void (*reset_binding_table)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
647 void (*load_kernels)(VADriverContextP ctx,
648 struct i965_gpe_context *gpe_context,
649 struct i965_kernel *kernel_list,
650 unsigned int num_kernels);
652 void (*setup_interface_data)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
654 void (*set_dynamic_buffer)(VADriverContextP ctx,
655 struct i965_gpe_context *gpe_context,
656 struct gpe_dynamic_state_parameter *ds);
658 void (*media_object)(VADriverContextP ctx,
659 struct i965_gpe_context *gpe_context,
660 struct intel_batchbuffer *batch,
661 struct gpe_media_object_parameter *param);
663 void (*media_object_walker)(VADriverContextP ctx,
664 struct i965_gpe_context *gpe_context,
665 struct intel_batchbuffer *batch,
666 struct gpe_media_object_walker_parameter *param);
668 void (*media_state_flush)(VADriverContextP ctx,
669 struct i965_gpe_context *gpe_context,
670 struct intel_batchbuffer *batch);
673 void (*pipe_control)(VADriverContextP ctx,
674 struct intel_batchbuffer *batch,
675 struct gpe_pipe_control_parameter *param);
677 void (*pipeline_end)(VADriverContextP ctx,
678 struct i965_gpe_context *gpe_context,
679 struct intel_batchbuffer *batch); // only available on gen9+
681 void (*pipeline_setup)(VADriverContextP ctx,
682 struct i965_gpe_context *gpe_context,
683 struct intel_batchbuffer *batch);
685 void (*mi_conditional_batch_buffer_end)(VADriverContextP ctx,
686 struct intel_batchbuffer *batch,
687 struct gpe_mi_conditional_batch_buffer_end_parameter *param);
689 void (*mi_batch_buffer_start)(VADriverContextP ctx,
690 struct intel_batchbuffer *batch,
691 struct gpe_mi_batch_buffer_start_parameter *params);
693 void (*mi_load_register_reg)(VADriverContextP ctx,
694 struct intel_batchbuffer *batch,
695 struct gpe_mi_load_register_reg_parameter *params);
697 void (*mi_load_register_imm)(VADriverContextP ctx,
698 struct intel_batchbuffer *batch,
699 struct gpe_mi_load_register_imm_parameter *params);
701 void (*mi_load_register_mem)(VADriverContextP ctx,
702 struct intel_batchbuffer *batch,
703 struct gpe_mi_load_register_mem_parameter *params);
706 void (*mi_store_register_mem)(VADriverContextP ctx,
707 struct intel_batchbuffer *batch,
708 struct gpe_mi_store_register_mem_parameter *params);
710 void (*mi_store_data_imm)(VADriverContextP ctx,
711 struct intel_batchbuffer *batch,
712 struct gpe_mi_store_data_imm_parameter *params);
714 void (*mi_flush_dw)(VADriverContextP ctx,
715 struct intel_batchbuffer *batch,
716 struct gpe_mi_flush_dw_parameter *params);
720 i965_gpe_table_init(VADriverContextP ctx);
723 i965_gpe_table_terminate(VADriverContextP ctx);
725 #endif /* _I965_GPE_UTILS_H_ */