OSDN Git Service

Add vdenc common commands for CNL
[android-x86/hardware-intel-common-vaapi.git] / src / i965_gpe_utils.h
1 /*
2  * Copyright © 2012 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Xiang Haihao <haihao.xiang@intel.com>
25  */
26
27 #ifndef _I965_GPE_UTILS_H_
28 #define _I965_GPE_UTILS_H_
29
30 #include <i915_drm.h>
31 #include <intel_bufmgr.h>
32
33 #include "i965_defines.h"
34 #include "i965_structs.h"
35
36 #define MAX_GPE_KERNELS    32
37
38 struct i965_buffer_surface {
39     dri_bo *bo;
40     unsigned int num_blocks;
41     unsigned int size_block;
42     unsigned int pitch;
43 };
44
45 enum {
46     I965_GPE_RESOURCE_BUFFER = 0,
47     I965_GPE_RESOURCE_2D
48 };
49
50 struct i965_gpe_resource {
51     dri_bo *bo;
52     char *map;
53     uint32_t type;
54     uint32_t width;
55     uint32_t height;
56     uint32_t pitch;
57     uint32_t size;
58     uint32_t tiling;
59     uint32_t cb_cr_pitch;
60     uint32_t x_cb_offset;
61     uint32_t y_cb_offset;
62 };
63
64 struct gpe_dynamic_state_parameter {
65     dri_bo *bo;
66     int bo_size;
67     unsigned int curbe_offset;
68     unsigned int idrt_offset;
69     unsigned int sampler_offset;
70 };
71
72 #define PIPE_CONTROL_FLUSH_NONE         0
73 #define PIPE_CONTROL_FLUSH_WRITE_CACHE  1
74 #define PIPE_CONTROL_FLUSH_READ_CACHE   2
75
76 struct gpe_pipe_control_parameter {
77     dri_bo *bo;
78     unsigned int offset;
79     unsigned int flush_mode;
80     unsigned int disable_cs_stall;
81     unsigned int dw0;
82     unsigned int dw1;
83 };
84
85 struct i965_gpe_context {
86     struct {
87         dri_bo *bo;
88         unsigned int length;            /* in bytes */
89         unsigned int max_entries;
90         unsigned int binding_table_offset;
91         unsigned int surface_state_offset;
92     } surface_state_binding_table;
93
94     struct {
95         dri_bo *bo;
96         unsigned int max_entries;
97         unsigned int entry_size;        /* in bytes */
98         unsigned int offset;
99     } idrt;
100
101     struct {
102         dri_bo *bo;
103         unsigned int length;            /* in bytes */
104         unsigned int offset;
105     } curbe;
106
107     struct {
108         dri_bo *bo;
109         unsigned int max_entries;
110         unsigned int entry_size;        /* in bytes */
111         unsigned int offset;
112     } sampler;
113
114     struct {
115         unsigned int gpgpu_mode : 1;
116         unsigned int pad0 : 7;
117         unsigned int max_num_threads : 16;
118         unsigned int num_urb_entries : 8;
119         unsigned int urb_entry_size : 16;
120         unsigned int curbe_allocation_size : 16;
121     } vfe_state;
122
123     /* vfe_desc5/6/7 is used to determine whether the HW scoreboard is used.
124      * If scoreboard is not used, don't touch them
125      */
126     union {
127         unsigned int dword;
128         struct {
129             unsigned int mask: 8;
130             unsigned int pad: 22;
131             unsigned int type: 1;
132             unsigned int enable: 1;
133         } scoreboard0;
134     } vfe_desc5;
135
136     union {
137         unsigned int dword;
138         struct {
139             int delta_x0: 4;
140             int delta_y0: 4;
141             int delta_x1: 4;
142             int delta_y1: 4;
143             int delta_x2: 4;
144             int delta_y2: 4;
145             int delta_x3: 4;
146             int delta_y3: 4;
147         } scoreboard1;
148     } vfe_desc6;
149
150     union {
151         unsigned int dword;
152         struct {
153             int delta_x4: 4;
154             int delta_y4: 4;
155             int delta_x5: 4;
156             int delta_y5: 4;
157             int delta_x6: 4;
158             int delta_y6: 4;
159             int delta_x7: 4;
160             int delta_y7: 4;
161         } scoreboard2;
162     } vfe_desc7;
163
164     unsigned int num_kernels;
165     struct i965_kernel kernels[MAX_GPE_KERNELS];
166
167     struct {
168         dri_bo *bo;
169         int bo_size;
170         unsigned int end_offset;
171     } instruction_state;
172
173     struct {
174         dri_bo *bo;
175     } indirect_state;
176
177     struct {
178         dri_bo *bo;
179         int bo_size;
180         unsigned int end_offset;
181     } dynamic_state;
182 };
183
184 struct gpe_mi_flush_dw_parameter {
185     dri_bo *bo;
186     unsigned int offset;
187     unsigned int video_pipeline_cache_invalidate;
188     unsigned int dw0;
189     unsigned int dw1;
190 };
191
192 struct gpe_mi_store_data_imm_parameter {
193     dri_bo *bo;
194     unsigned int is_qword;
195     unsigned int offset;
196     unsigned int dw0;
197     unsigned int dw1;
198 };
199
200 struct gpe_mi_store_register_mem_parameter {
201     dri_bo *bo;
202     unsigned int offset;
203     unsigned int mmio_offset;
204 };
205
206 struct gpe_mi_load_register_mem_parameter {
207     dri_bo *bo;
208     unsigned int offset;
209     unsigned int mmio_offset;
210 };
211
212 struct gpe_mi_load_register_imm_parameter {
213     unsigned int data;
214     unsigned int mmio_offset;
215 };
216
217 struct gpe_mi_load_register_reg_parameter {
218     unsigned int src_mmio_offset;
219     unsigned int dst_mmio_offset;
220 };
221
222 struct gpe_mi_math_parameter {
223     unsigned int num_instructions;
224     unsigned int *instruction_list;
225 };
226
227 struct gpe_mi_conditional_batch_buffer_end_parameter {
228     dri_bo *bo;
229     unsigned int offset;
230     unsigned int compare_mask_mode_disabled;
231     unsigned int compare_data;
232 };
233
234 struct gpe_mi_batch_buffer_start_parameter {
235     dri_bo *bo;
236     unsigned int offset;
237     unsigned int is_second_level;
238     unsigned int use_global_gtt;
239 };
240
241 struct gpe_mi_copy_mem_parameter {
242     dri_bo *src_bo;
243     unsigned int src_offset;
244     dri_bo *dst_bo;
245     unsigned int dst_offset;
246 };
247
248 void i965_gpe_context_destroy(struct i965_gpe_context *gpe_context);
249 void i965_gpe_context_init(VADriverContextP ctx,
250                            struct i965_gpe_context *gpe_context);
251 void i965_gpe_load_kernels(VADriverContextP ctx,
252                            struct i965_gpe_context *gpe_context,
253                            struct i965_kernel *kernel_list,
254                            unsigned int num_kernels);
255 void gen6_gpe_pipeline_setup(VADriverContextP ctx,
256                              struct i965_gpe_context *gpe_context,
257                              struct intel_batchbuffer *batch);
258 void i965_gpe_surface2_setup(VADriverContextP ctx,
259                              struct i965_gpe_context *gpe_context,
260                              struct object_surface *obj_surface,
261                              unsigned long binding_table_offset,
262                              unsigned long surface_state_offset);
263 void i965_gpe_media_rw_surface_setup(VADriverContextP ctx,
264                                      struct i965_gpe_context *gpe_context,
265                                      struct object_surface *obj_surface,
266                                      unsigned long binding_table_offset,
267                                      unsigned long surface_state_offset,
268                                      int write_enabled);
269 void i965_gpe_buffer_suface_setup(VADriverContextP ctx,
270                                   struct i965_gpe_context *gpe_context,
271                                   struct i965_buffer_surface *buffer_surface,
272                                   unsigned long binding_table_offset,
273                                   unsigned long surface_state_offset);
274 void gen7_gpe_surface2_setup(VADriverContextP ctx,
275                              struct i965_gpe_context *gpe_context,
276                              struct object_surface *obj_surface,
277                              unsigned long binding_table_offset,
278                              unsigned long surface_state_offset);
279 void gen7_gpe_media_rw_surface_setup(VADriverContextP ctx,
280                                      struct i965_gpe_context *gpe_context,
281                                      struct object_surface *obj_surface,
282                                      unsigned long binding_table_offset,
283                                      unsigned long surface_state_offset,
284                                      int write_enabled);
285 void gen7_gpe_buffer_suface_setup(VADriverContextP ctx,
286                                   struct i965_gpe_context *gpe_context,
287                                   struct i965_buffer_surface *buffer_surface,
288                                   unsigned long binding_table_offset,
289                                   unsigned long surface_state_offset);
290 void gen75_gpe_media_chroma_surface_setup(VADriverContextP ctx,
291                                           struct i965_gpe_context *gpe_context,
292                                           struct object_surface *obj_surface,
293                                           unsigned long binding_table_offset,
294                                           unsigned long surface_state_offset,
295                                           int write_enabled);
296
297 extern void gen8_gpe_surface2_setup(VADriverContextP ctx,
298                                     struct i965_gpe_context *gpe_context,
299                                     struct object_surface *obj_surface,
300                                     unsigned long binding_table_offset,
301                                     unsigned long surface_state_offset);
302 extern void gen8_gpe_media_rw_surface_setup(VADriverContextP ctx,
303                                             struct i965_gpe_context *gpe_context,
304                                             struct object_surface *obj_surface,
305                                             unsigned long binding_table_offset,
306                                             unsigned long surface_state_offset,
307                                             int write_enabled);
308 extern void gen8_gpe_buffer_suface_setup(VADriverContextP ctx,
309                                          struct i965_gpe_context *gpe_context,
310                                          struct i965_buffer_surface *buffer_surface,
311                                          unsigned long binding_table_offset,
312                                          unsigned long surface_state_offset);
313 extern void gen8_gpe_media_chroma_surface_setup(VADriverContextP ctx,
314                                                 struct i965_gpe_context *gpe_context,
315                                                 struct object_surface *obj_surface,
316                                                 unsigned long binding_table_offset,
317                                                 unsigned long surface_state_offset,
318                                                 int write_enabled);
319
320 void gen8_gpe_pipeline_setup(VADriverContextP ctx,
321                              struct i965_gpe_context *gpe_context,
322                              struct intel_batchbuffer *batch);
323 extern void
324 gen8_gpe_context_set_dynamic_buffer(VADriverContextP ctx,
325                                     struct i965_gpe_context *gpe_context,
326                                     struct gpe_dynamic_state_parameter *ds);
327
328
329 void gen8_gpe_context_destroy(struct i965_gpe_context *gpe_context);
330 void gen8_gpe_context_init(VADriverContextP ctx,
331                            struct i965_gpe_context *gpe_context);
332
333 void gen8_gpe_load_kernels(VADriverContextP ctx,
334                            struct i965_gpe_context *gpe_context,
335                            struct i965_kernel *kernel_list,
336                            unsigned int num_kernels);
337
338 void gen9_gpe_pipeline_setup(VADriverContextP ctx,
339                              struct i965_gpe_context *gpe_context,
340                              struct intel_batchbuffer *batch);
341
342 void gen9_gpe_pipeline_end(VADriverContextP ctx,
343                            struct i965_gpe_context *gpe_context,
344                            struct intel_batchbuffer *batch);
345
346 Bool i965_allocate_gpe_resource(dri_bufmgr *bufmgr,
347                                 struct i965_gpe_resource *res,
348                                 int size,
349                                 const char *name);
350
351 void i965_object_surface_to_2d_gpe_resource(struct i965_gpe_resource *res,
352                                             struct object_surface *obj_surface);
353
354 void i965_object_surface_to_2d_gpe_resource_with_align(struct i965_gpe_resource *res,
355                                                        struct object_surface *obj_surface,
356                                                        unsigned int alignment);
357
358 void i965_dri_object_to_buffer_gpe_resource(struct i965_gpe_resource *res,
359                                             dri_bo *bo);
360
361 void i965_dri_object_to_2d_gpe_resource(struct i965_gpe_resource *res,
362                                         dri_bo *bo,
363                                         unsigned int width,
364                                         unsigned int height,
365                                         unsigned int pitch);
366
367 void i965_zero_gpe_resource(struct i965_gpe_resource *res);
368
369 void i965_free_gpe_resource(struct i965_gpe_resource *res);
370
371 void *i965_map_gpe_resource(struct i965_gpe_resource *res);
372
373 void i965_unmap_gpe_resource(struct i965_gpe_resource *res);
374
375 void gen8_gpe_mi_flush_dw(VADriverContextP ctx,
376                           struct intel_batchbuffer *batch,
377                           struct gpe_mi_flush_dw_parameter *params);
378
379 void gen8_gpe_mi_store_data_imm(VADriverContextP ctx,
380                                 struct intel_batchbuffer *batch,
381                                 struct gpe_mi_store_data_imm_parameter *params);
382
383 void gen8_gpe_mi_store_register_mem(VADriverContextP ctx,
384                                     struct intel_batchbuffer *batch,
385                                     struct gpe_mi_store_register_mem_parameter *params);
386
387 void gen8_gpe_mi_load_register_mem(VADriverContextP ctx,
388                                    struct intel_batchbuffer *batch,
389                                    struct gpe_mi_load_register_mem_parameter *params);
390
391 void gen8_gpe_mi_load_register_imm(VADriverContextP ctx,
392                                    struct intel_batchbuffer *batch,
393                                    struct gpe_mi_load_register_imm_parameter *params);
394
395 void gen8_gpe_mi_load_register_reg(VADriverContextP ctx,
396                                    struct intel_batchbuffer *batch,
397                                    struct gpe_mi_load_register_reg_parameter *params);
398
399 void gen9_gpe_mi_math(VADriverContextP ctx,
400                       struct intel_batchbuffer *batch,
401                       struct gpe_mi_math_parameter *params);
402
403 void gen9_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
404                                               struct intel_batchbuffer *batch,
405                                               struct gpe_mi_conditional_batch_buffer_end_parameter *params);
406
407 void gen8_gpe_mi_batch_buffer_start(VADriverContextP ctx,
408                                     struct intel_batchbuffer *batch,
409                                     struct gpe_mi_batch_buffer_start_parameter *params);
410
411
412 struct gpe_media_object_inline_data {
413     union {
414         struct {
415             unsigned int x: 8;
416             unsigned int y: 8;
417             unsigned int reserved: 16;
418         };
419         unsigned int value;
420     };
421 };
422
423 struct gpe_media_object_parameter {
424     unsigned int use_scoreboard;
425     unsigned int scoreboard_x;
426     unsigned int scoreboard_y;
427     unsigned int scoreboard_mask;
428     unsigned int interface_offset;
429     void *pinline_data;
430     unsigned int inline_size;
431 };
432
433 struct i965_gpe_surface {
434     unsigned int is_buffer: 1;
435     unsigned int is_2d_surface: 1;
436     unsigned int is_adv_surface: 1;
437     unsigned int is_uv_surface: 1;
438     unsigned int is_media_block_rw: 1;
439     unsigned int is_raw_buffer: 1;
440     unsigned int is_16bpp     : 1;
441     /* use the override_offset for 2d_surface */
442     unsigned int is_override_offset : 1;
443
444     unsigned int vert_line_stride_offset;
445     unsigned int vert_line_stride;
446     unsigned int cacheability_control;
447     unsigned int format; // 2d surface only
448     unsigned int v_direction; // adv surface only
449     unsigned int size; // buffer only
450     unsigned int offset;
451
452     struct i965_gpe_resource *gpe_resource;
453 };
454
455 extern void
456 gen9_gpe_reset_binding_table(VADriverContextP ctx,
457                              struct i965_gpe_context *gpe_context);
458 extern
459 void *i965_gpe_context_map_curbe(struct i965_gpe_context *gpe_context);
460
461 extern
462 void i965_gpe_context_unmap_curbe(struct i965_gpe_context *gpe_context);
463
464 extern
465 void gen8_gpe_setup_interface_data(VADriverContextP ctx,
466                                    struct i965_gpe_context *gpe_context);
467 extern void
468 gen9_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
469                              struct i965_gpe_surface *gpe_surface,
470                              int index);
471
472 extern bool
473 i965_gpe_allocate_2d_resource(dri_bufmgr *bufmgr,
474                               struct i965_gpe_resource *res,
475                               int width,
476                               int height,
477                               int pitch,
478                               const char *name);
479
480 struct gpe_walker_xy {
481     union {
482         struct {
483             unsigned int x: 16;
484             unsigned int y: 16;
485         };
486         unsigned int value;
487     };
488 };
489
490 struct gpe_media_object_walker_parameter {
491     void *pinline_data;
492     unsigned int inline_size;
493     unsigned int interface_offset;
494     unsigned int use_scoreboard;
495     unsigned int scoreboard_mask;
496     unsigned int group_id_loop_select;
497     unsigned int color_count_minus1;
498     unsigned int mid_loop_unit_x;
499     unsigned int mid_loop_unit_y;
500     unsigned int middle_loop_extra_steps;
501     unsigned int local_loop_exec_count;
502     unsigned int global_loop_exec_count;
503     struct gpe_walker_xy block_resolution;
504     struct gpe_walker_xy local_start;
505     struct gpe_walker_xy local_end;
506     struct gpe_walker_xy local_outer_loop_stride;
507     struct gpe_walker_xy local_inner_loop_unit;
508     struct gpe_walker_xy global_resolution;
509     struct gpe_walker_xy global_start;
510     struct gpe_walker_xy global_outer_loop_stride;
511     struct gpe_walker_xy global_inner_loop_unit;
512 };
513
514 enum walker_degree {
515     WALKER_NO_DEGREE = 0,
516     WALKER_45_DEGREE,
517     WALKER_26_DEGREE,
518     WALKER_26Z_DEGREE,
519     WALKER_45Z_DEGREE,
520 };
521 struct gpe_encoder_kernel_walker_parameter {
522     unsigned int walker_degree;
523     unsigned int use_scoreboard;
524     unsigned int scoreboard_mask;
525     unsigned int no_dependency;
526     unsigned int resolution_x;
527     unsigned int resolution_y;
528     unsigned int use_vertical_raster_scan;
529 };
530
531 extern void
532 gen8_gpe_media_object(VADriverContextP ctx,
533                       struct i965_gpe_context *gpe_context,
534                       struct intel_batchbuffer *batch,
535                       struct gpe_media_object_parameter *param);
536
537 extern void
538 gen8_gpe_media_state_flush(VADriverContextP ctx,
539                            struct i965_gpe_context *gpe_context,
540                            struct intel_batchbuffer *batch);
541
542 extern void
543 gen8_gpe_media_object_walker(VADriverContextP ctx,
544                              struct i965_gpe_context *gpe_context,
545                              struct intel_batchbuffer *batch,
546                              struct gpe_media_object_walker_parameter *param);
547
548
549 struct intel_vpp_kernel_walker_parameter {
550     unsigned int                use_scoreboard;
551     unsigned int                scoreboard_mask;
552     unsigned int                no_dependency;
553     unsigned int                resolution_x;
554     unsigned int                resolution_y;
555 };
556
557 extern void
558 intel_vpp_init_media_object_walker_parameter(struct intel_vpp_kernel_walker_parameter *kernel_walker_param,
559                                              struct gpe_media_object_walker_parameter *walker_param);
560 extern void
561 gen8_gpe_reset_binding_table(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
562
563 extern void
564 gen8_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
565                              struct i965_gpe_surface *gpe_surface,
566                              int index);
567
568 extern void
569 gen8_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
570                                          struct intel_batchbuffer *batch,
571                                          struct gpe_mi_conditional_batch_buffer_end_parameter *param);
572
573 extern void
574 gen8_gpe_pipe_control(VADriverContextP ctx,
575                       struct intel_batchbuffer *batch,
576                       struct gpe_pipe_control_parameter *param);
577
578 extern void
579 i965_init_media_object_walker_parameter(struct gpe_encoder_kernel_walker_parameter *kernel_walker_param,
580                                         struct gpe_media_object_walker_parameter *walker_param);
581
582 extern void
583 i965_add_2d_gpe_surface(VADriverContextP ctx,
584                         struct i965_gpe_context *gpe_context,
585                         struct object_surface *obj_surface,
586                         int is_uv_surface,
587                         int is_media_block_rw,
588                         unsigned int format,
589                         int index);
590 extern void
591 i965_add_adv_gpe_surface(VADriverContextP ctx,
592                          struct i965_gpe_context *gpe_context,
593                          struct object_surface *obj_surface,
594                          int index);
595 extern void
596 i965_add_buffer_gpe_surface(VADriverContextP ctx,
597                             struct i965_gpe_context *gpe_context,
598                             struct i965_gpe_resource *gpe_buffer,
599                             int is_raw_buffer,
600                             unsigned int size,
601                             unsigned int offset,
602                             int index);
603 extern void
604 i965_add_buffer_2d_gpe_surface(VADriverContextP ctx,
605                                struct i965_gpe_context *gpe_context,
606                                struct i965_gpe_resource *gpe_buffer,
607                                int is_media_block_rw,
608                                unsigned int format,
609                                int index);
610 extern void
611 gen9_add_dri_buffer_gpe_surface(VADriverContextP ctx,
612                                 struct i965_gpe_context *gpe_context,
613                                 dri_bo *bo,
614                                 int is_raw_buffer,
615                                 unsigned int size,
616                                 unsigned int offset,
617                                 int index);
618
619 struct i965_gpe_table {
620     void (*context_init)(VADriverContextP ctx,
621                          struct i965_gpe_context *gpe_context);
622
623     void (*context_destroy)(struct i965_gpe_context *gpe_context);
624
625     void (*context_add_surface)(struct i965_gpe_context *gpe_context,
626                                 struct i965_gpe_surface *gpe_surface,
627                                 int index);
628
629     void (*reset_binding_table)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
630
631     void (*load_kernels)(VADriverContextP ctx,
632                          struct i965_gpe_context *gpe_context,
633                          struct i965_kernel *kernel_list,
634                          unsigned int num_kernels);
635
636     void (*setup_interface_data)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
637
638     void (*set_dynamic_buffer)(VADriverContextP ctx,
639                                struct i965_gpe_context *gpe_context,
640                                struct gpe_dynamic_state_parameter *ds);
641
642     void (*media_object)(VADriverContextP ctx,
643                          struct i965_gpe_context *gpe_context,
644                          struct intel_batchbuffer *batch,
645                          struct gpe_media_object_parameter *param);
646
647     void (*media_object_walker)(VADriverContextP ctx,
648                                 struct i965_gpe_context *gpe_context,
649                                 struct intel_batchbuffer *batch,
650                                 struct gpe_media_object_walker_parameter *param);
651
652     void (*media_state_flush)(VADriverContextP ctx,
653                               struct i965_gpe_context *gpe_context,
654                               struct intel_batchbuffer *batch);
655
656
657     void (*pipe_control)(VADriverContextP ctx,
658                          struct intel_batchbuffer *batch,
659                          struct gpe_pipe_control_parameter *param);
660
661     void (*pipeline_end)(VADriverContextP ctx,
662                          struct i965_gpe_context *gpe_context,
663                          struct intel_batchbuffer *batch);              // only available on gen9+
664
665     void (*pipeline_setup)(VADriverContextP ctx,
666                            struct i965_gpe_context *gpe_context,
667                            struct intel_batchbuffer *batch);
668
669     void (*mi_conditional_batch_buffer_end)(VADriverContextP ctx,
670                                             struct intel_batchbuffer *batch,
671                                             struct gpe_mi_conditional_batch_buffer_end_parameter *param);
672
673     void (*mi_batch_buffer_start)(VADriverContextP ctx,
674                                   struct intel_batchbuffer *batch,
675                                   struct gpe_mi_batch_buffer_start_parameter *params);
676
677     void (*mi_load_register_reg)(VADriverContextP ctx,
678                                  struct intel_batchbuffer *batch,
679                                  struct gpe_mi_load_register_reg_parameter *params);
680
681     void (*mi_load_register_imm)(VADriverContextP ctx,
682                                  struct intel_batchbuffer *batch,
683                                  struct gpe_mi_load_register_imm_parameter *params);
684
685     void (*mi_load_register_mem)(VADriverContextP ctx,
686                                  struct intel_batchbuffer *batch,
687                                  struct gpe_mi_load_register_mem_parameter *params);
688
689
690     void (*mi_store_register_mem)(VADriverContextP ctx,
691                                   struct intel_batchbuffer *batch,
692                                   struct gpe_mi_store_register_mem_parameter *params);
693
694     void (*mi_store_data_imm)(VADriverContextP ctx,
695                               struct intel_batchbuffer *batch,
696                               struct gpe_mi_store_data_imm_parameter *params);
697
698     void (*mi_flush_dw)(VADriverContextP ctx,
699                         struct intel_batchbuffer *batch,
700                         struct gpe_mi_flush_dw_parameter *params);
701
702     void (*mi_copy_mem_mem)(VADriverContextP ctx,
703                             struct intel_batchbuffer *batch,
704                             struct gpe_mi_copy_mem_parameter *params);
705 };
706
707 extern bool
708 i965_gpe_table_init(VADriverContextP ctx);
709
710 extern void
711 i965_gpe_table_terminate(VADriverContextP ctx);
712
713 #endif /* _I965_GPE_UTILS_H_ */