2 * Copyright © 2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Xiang Haihao <haihao.xiang@intel.com>
27 #ifndef _I965_GPE_UTILS_H_
28 #define _I965_GPE_UTILS_H_
31 #include <intel_bufmgr.h>
33 #include "i965_defines.h"
34 #include "i965_drv_video.h"
35 #include "i965_structs.h"
37 #define MAX_GPE_KERNELS 32
39 struct i965_buffer_surface
42 unsigned int num_blocks;
43 unsigned int size_block;
48 I965_GPE_RESOURCE_BUFFER = 0,
52 struct i965_gpe_resource
67 struct gpe_dynamic_state_parameter
71 unsigned int curbe_offset;
72 unsigned int idrt_offset;
73 unsigned int sampler_offset;
76 struct i965_gpe_context
80 unsigned int length; /* in bytes */
81 unsigned int max_entries;
82 unsigned int binding_table_offset;
83 unsigned int surface_state_offset;
84 } surface_state_binding_table;
88 unsigned int max_entries;
89 unsigned int entry_size; /* in bytes */
94 unsigned int length; /* in bytes */
98 unsigned int gpgpu_mode : 1;
99 unsigned int pad0 : 7;
100 unsigned int max_num_threads : 16;
101 unsigned int num_urb_entries : 8;
102 unsigned int urb_entry_size : 16;
103 unsigned int curbe_allocation_size : 16;
106 /* vfe_desc5/6/7 is used to determine whether the HW scoreboard is used.
107 * If scoreboard is not used, don't touch them
115 unsigned int enable:1;
147 unsigned int num_kernels;
148 struct i965_kernel kernels[MAX_GPE_KERNELS];
153 unsigned int end_offset;
163 unsigned int end_offset;
166 unsigned int sampler_offset;
169 unsigned int idrt_offset;
171 unsigned int curbe_offset;
175 struct gpe_mi_flush_dw_parameter
179 unsigned int video_pipeline_cache_invalidate;
184 struct gpe_mi_store_data_imm_parameter
187 unsigned int is_qword;
193 struct gpe_mi_store_register_mem_parameter
197 unsigned int mmio_offset;
200 struct gpe_mi_load_register_mem_parameter
204 unsigned int mmio_offset;
207 struct gpe_mi_load_register_imm_parameter
210 unsigned int mmio_offset;
213 struct gpe_mi_load_register_reg_parameter
215 unsigned int src_mmio_offset;
216 unsigned int dst_mmio_offset;
219 struct gpe_mi_math_parameter
221 unsigned int num_instructions;
222 unsigned int *instruction_list;
225 struct gpe_mi_conditional_batch_buffer_end_parameter
229 unsigned int compare_mask_mode_disabled;
230 unsigned int compare_data;
233 struct gpe_mi_batch_buffer_start_parameter
237 unsigned int is_second_level;
238 unsigned int use_global_gtt;
241 void i965_gpe_context_destroy(struct i965_gpe_context *gpe_context);
242 void i965_gpe_context_init(VADriverContextP ctx,
243 struct i965_gpe_context *gpe_context);
244 void i965_gpe_load_kernels(VADriverContextP ctx,
245 struct i965_gpe_context *gpe_context,
246 struct i965_kernel *kernel_list,
247 unsigned int num_kernels);
248 void gen6_gpe_pipeline_setup(VADriverContextP ctx,
249 struct i965_gpe_context *gpe_context,
250 struct intel_batchbuffer *batch);
251 void i965_gpe_surface2_setup(VADriverContextP ctx,
252 struct i965_gpe_context *gpe_context,
253 struct object_surface *obj_surface,
254 unsigned long binding_table_offset,
255 unsigned long surface_state_offset);
256 void i965_gpe_media_rw_surface_setup(VADriverContextP ctx,
257 struct i965_gpe_context *gpe_context,
258 struct object_surface *obj_surface,
259 unsigned long binding_table_offset,
260 unsigned long surface_state_offset,
262 void i965_gpe_buffer_suface_setup(VADriverContextP ctx,
263 struct i965_gpe_context *gpe_context,
264 struct i965_buffer_surface *buffer_surface,
265 unsigned long binding_table_offset,
266 unsigned long surface_state_offset);
267 void gen7_gpe_surface2_setup(VADriverContextP ctx,
268 struct i965_gpe_context *gpe_context,
269 struct object_surface *obj_surface,
270 unsigned long binding_table_offset,
271 unsigned long surface_state_offset);
272 void gen7_gpe_media_rw_surface_setup(VADriverContextP ctx,
273 struct i965_gpe_context *gpe_context,
274 struct object_surface *obj_surface,
275 unsigned long binding_table_offset,
276 unsigned long surface_state_offset,
278 void gen7_gpe_buffer_suface_setup(VADriverContextP ctx,
279 struct i965_gpe_context *gpe_context,
280 struct i965_buffer_surface *buffer_surface,
281 unsigned long binding_table_offset,
282 unsigned long surface_state_offset);
283 void gen75_gpe_media_chroma_surface_setup(VADriverContextP ctx,
284 struct i965_gpe_context *gpe_context,
285 struct object_surface *obj_surface,
286 unsigned long binding_table_offset,
287 unsigned long surface_state_offset,
290 extern void gen8_gpe_surface2_setup(VADriverContextP ctx,
291 struct i965_gpe_context *gpe_context,
292 struct object_surface *obj_surface,
293 unsigned long binding_table_offset,
294 unsigned long surface_state_offset);
295 extern void gen8_gpe_media_rw_surface_setup(VADriverContextP ctx,
296 struct i965_gpe_context *gpe_context,
297 struct object_surface *obj_surface,
298 unsigned long binding_table_offset,
299 unsigned long surface_state_offset,
301 extern void gen8_gpe_buffer_suface_setup(VADriverContextP ctx,
302 struct i965_gpe_context *gpe_context,
303 struct i965_buffer_surface *buffer_surface,
304 unsigned long binding_table_offset,
305 unsigned long surface_state_offset);
306 extern void gen8_gpe_media_chroma_surface_setup(VADriverContextP ctx,
307 struct i965_gpe_context *gpe_context,
308 struct object_surface *obj_surface,
309 unsigned long binding_table_offset,
310 unsigned long surface_state_offset,
313 void gen8_gpe_pipeline_setup(VADriverContextP ctx,
314 struct i965_gpe_context *gpe_context,
315 struct intel_batchbuffer *batch);
317 gen8_gpe_context_set_dynamic_buffer(VADriverContextP ctx,
318 struct i965_gpe_context *gpe_context,
319 struct gpe_dynamic_state_parameter *ds);
322 void gen8_gpe_context_destroy(struct i965_gpe_context *gpe_context);
323 void gen8_gpe_context_init(VADriverContextP ctx,
324 struct i965_gpe_context *gpe_context);
326 void gen8_gpe_load_kernels(VADriverContextP ctx,
327 struct i965_gpe_context *gpe_context,
328 struct i965_kernel *kernel_list,
329 unsigned int num_kernels);
331 void gen9_gpe_pipeline_setup(VADriverContextP ctx,
332 struct i965_gpe_context *gpe_context,
333 struct intel_batchbuffer *batch);
335 void gen9_gpe_pipeline_end(VADriverContextP ctx,
336 struct i965_gpe_context *gpe_context,
337 struct intel_batchbuffer *batch);
339 Bool i965_allocate_gpe_resource(dri_bufmgr *bufmgr,
340 struct i965_gpe_resource *res,
344 void i965_object_surface_to_2d_gpe_resource(struct i965_gpe_resource *res,
345 struct object_surface *obj_surface);
347 void i965_dri_object_to_buffer_gpe_resource(struct i965_gpe_resource *res,
350 void i965_gpe_dri_object_to_2d_gpe_resource(struct i965_gpe_resource *res,
356 void i965_zero_gpe_resource(struct i965_gpe_resource *res);
358 void i965_free_gpe_resource(struct i965_gpe_resource *res);
360 void *i965_map_gpe_resource(struct i965_gpe_resource *res);
362 void i965_unmap_gpe_resource(struct i965_gpe_resource *res);
364 void gen9_gpe_mi_flush_dw(VADriverContextP ctx,
365 struct intel_batchbuffer *batch,
366 struct gpe_mi_flush_dw_parameter *params);
368 void gen9_gpe_mi_store_data_imm(VADriverContextP ctx,
369 struct intel_batchbuffer *batch,
370 struct gpe_mi_store_data_imm_parameter *params);
372 void gen9_gpe_mi_store_register_mem(VADriverContextP ctx,
373 struct intel_batchbuffer *batch,
374 struct gpe_mi_store_register_mem_parameter *params);
376 void gen9_gpe_mi_load_register_mem(VADriverContextP ctx,
377 struct intel_batchbuffer *batch,
378 struct gpe_mi_load_register_mem_parameter *params);
380 void gen9_gpe_mi_load_register_imm(VADriverContextP ctx,
381 struct intel_batchbuffer *batch,
382 struct gpe_mi_load_register_imm_parameter *params);
384 void gen9_gpe_mi_load_register_reg(VADriverContextP ctx,
385 struct intel_batchbuffer *batch,
386 struct gpe_mi_load_register_reg_parameter *params);
388 void gen9_gpe_mi_math(VADriverContextP ctx,
389 struct intel_batchbuffer *batch,
390 struct gpe_mi_math_parameter *params);
392 void gen9_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
393 struct intel_batchbuffer *batch,
394 struct gpe_mi_conditional_batch_buffer_end_parameter *params);
396 void gen9_gpe_mi_batch_buffer_start(VADriverContextP ctx,
397 struct intel_batchbuffer *batch,
398 struct gpe_mi_batch_buffer_start_parameter *params);
401 struct gpe_media_object_parameter
403 unsigned int use_scoreboard;
404 unsigned int scoreboard_x;
405 unsigned int scoreboard_y;
406 unsigned int scoreboard_mask;
407 unsigned int interface_offset;
409 unsigned int inline_size;
412 struct i965_gpe_surface
414 unsigned int is_buffer:1;
415 unsigned int is_2d_surface:1;
416 unsigned int is_adv_surface:1;
417 unsigned int is_uv_surface:1;
418 unsigned int is_media_block_rw:1;
419 unsigned int is_raw_buffer:1;
421 unsigned int vert_line_stride_offset;
422 unsigned int vert_line_stride;
423 unsigned int cacheability_control;
424 unsigned int format; // 2d surface only
425 unsigned int v_direction; // adv surface only
426 unsigned int size; // buffer only
427 unsigned int offset; // buffer only
429 struct i965_gpe_resource *gpe_resource;
433 gen9_gpe_reset_binding_table(VADriverContextP ctx,
434 struct i965_gpe_context *gpe_context);
436 void *gen8p_gpe_context_map_curbe(struct i965_gpe_context *gpe_context);
439 void gen8p_gpe_context_unmap_curbe(struct i965_gpe_context *gpe_context);
442 void gen8_gpe_setup_interface_data(VADriverContextP ctx,
443 struct i965_gpe_context *gpe_context);
445 gen9_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
446 struct i965_gpe_surface *gpe_surface,
450 i965_gpe_allocate_2d_resource(dri_bufmgr *bufmgr,
451 struct i965_gpe_resource *res,
468 struct gpe_media_object_walker_parameter
471 unsigned int inline_size;
472 unsigned int interface_offset;
473 unsigned int use_scoreboard;
474 unsigned int scoreboard_mask;
475 unsigned int group_id_loop_select;
476 unsigned int color_count_minus1;
477 unsigned int mid_loop_unit_x;
478 unsigned int mid_loop_unit_y;
479 unsigned int middle_loop_extra_steps;
480 unsigned int local_loop_exec_count;
481 unsigned int global_loop_exec_count;
482 struct gpe_walker_xy block_resolution;
483 struct gpe_walker_xy local_start;
484 struct gpe_walker_xy local_end;
485 struct gpe_walker_xy local_outer_loop_stride;
486 struct gpe_walker_xy local_inner_loop_unit;
487 struct gpe_walker_xy global_resolution;
488 struct gpe_walker_xy global_start;
489 struct gpe_walker_xy global_outer_loop_stride;
490 struct gpe_walker_xy global_inner_loop_unit;
494 gen8_gpe_media_object(VADriverContextP ctx,
495 struct i965_gpe_context *gpe_context,
496 struct intel_batchbuffer *batch,
497 struct gpe_media_object_parameter *param);
500 gen8_gpe_media_state_flush(VADriverContextP ctx,
501 struct i965_gpe_context *gpe_context,
502 struct intel_batchbuffer *batch);
505 gen9_gpe_media_object_walker(VADriverContextP ctx,
506 struct i965_gpe_context *gpe_context,
507 struct intel_batchbuffer *batch,
508 struct gpe_media_object_walker_parameter *param);
510 #endif /* _I965_GPE_UTILS_H_ */