OSDN Git Service

test: check whether MVC encoding is support
[android-x86/hardware-intel-common-vaapi.git] / src / i965_gpe_utils.h
1 /*
2  * Copyright © 2012 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Xiang Haihao <haihao.xiang@intel.com>
25  */
26
27 #ifndef _I965_GPE_UTILS_H_
28 #define _I965_GPE_UTILS_H_
29
30 #include <i915_drm.h>
31 #include <intel_bufmgr.h>
32
33 #include "i965_defines.h"
34 #include "i965_structs.h"
35
36 #define MAX_GPE_KERNELS    32
37
38 struct i965_buffer_surface
39 {
40     dri_bo *bo;
41     unsigned int num_blocks;
42     unsigned int size_block;
43     unsigned int pitch;
44 };
45
46 enum {
47     I965_GPE_RESOURCE_BUFFER = 0,
48     I965_GPE_RESOURCE_2D
49 };
50
51 struct i965_gpe_resource
52 {
53     dri_bo *bo;
54     char *map;
55     uint32_t type;
56     uint32_t width;
57     uint32_t height;
58     uint32_t pitch;
59     uint32_t size;
60     uint32_t tiling;
61     uint32_t cb_cr_pitch;
62     uint32_t x_cb_offset;
63     uint32_t y_cb_offset;
64 };
65
66 struct gpe_dynamic_state_parameter
67 {
68     dri_bo *bo;
69     int bo_size;
70     unsigned int curbe_offset;
71     unsigned int idrt_offset;
72     unsigned int sampler_offset;
73 };
74
75 #define PIPE_CONTROL_FLUSH_NONE         0
76 #define PIPE_CONTROL_FLUSH_WRITE_CACHE  1
77 #define PIPE_CONTROL_FLUSH_READ_CACHE   2
78
79 struct gpe_pipe_control_parameter
80 {
81     dri_bo *bo;
82     unsigned int offset;
83     unsigned int flush_mode;
84     unsigned int disable_cs_stall;
85     unsigned int dw0;
86     unsigned int dw1;
87 };
88
89 struct i965_gpe_context
90 {
91     struct {
92         dri_bo *bo;
93         unsigned int length;            /* in bytes */
94         unsigned int max_entries;
95         unsigned int binding_table_offset;
96         unsigned int surface_state_offset;
97     } surface_state_binding_table;
98
99     struct {
100         dri_bo *bo;
101         unsigned int max_entries;
102         unsigned int entry_size;        /* in bytes */
103         unsigned int offset;
104     } idrt;
105
106     struct {
107         dri_bo *bo;
108         unsigned int length;            /* in bytes */
109         unsigned int offset;
110     } curbe;
111
112     struct {
113         dri_bo *bo;
114         unsigned int max_entries;
115         unsigned int entry_size;        /* in bytes */
116         unsigned int offset;
117     } sampler;
118
119     struct {
120         unsigned int gpgpu_mode : 1;
121         unsigned int pad0 : 7;
122         unsigned int max_num_threads : 16;
123         unsigned int num_urb_entries : 8;
124         unsigned int urb_entry_size : 16;
125         unsigned int curbe_allocation_size : 16;
126     } vfe_state;
127   
128     /* vfe_desc5/6/7 is used to determine whether the HW scoreboard is used.
129      * If scoreboard is not used, don't touch them
130      */ 
131     union { 
132         unsigned int dword;
133         struct {
134                 unsigned int mask:8;
135                 unsigned int pad:22;
136                 unsigned int type:1;
137                 unsigned int enable:1;
138         } scoreboard0; 
139     }vfe_desc5;
140
141     union {
142         unsigned int dword;
143         struct {
144                 int delta_x0:4;
145                 int delta_y0:4;
146                 int delta_x1:4;
147                 int delta_y1:4;
148                 int delta_x2:4;
149                 int delta_y2:4;
150                 int delta_x3:4;
151                 int delta_y3:4;
152         } scoreboard1;
153      } vfe_desc6;
154
155     union {
156         unsigned int dword;
157         struct {
158                 int delta_x4:4;
159                 int delta_y4:4;
160                 int delta_x5:4;
161                 int delta_y5:4;
162                 int delta_x6:4;
163                 int delta_y6:4;
164                 int delta_x7:4;
165                 int delta_y7:4;
166         } scoreboard2;
167      } vfe_desc7;
168
169     unsigned int num_kernels;
170     struct i965_kernel kernels[MAX_GPE_KERNELS];
171
172     struct {
173         dri_bo *bo;
174         int bo_size;
175         unsigned int end_offset;
176     } instruction_state;
177
178     struct {
179         dri_bo *bo;
180     } indirect_state;
181
182     struct {
183         dri_bo *bo;
184         int bo_size;
185         unsigned int end_offset;
186     } dynamic_state;
187 };
188
189 struct gpe_mi_flush_dw_parameter
190 {
191     dri_bo *bo;
192     unsigned int offset;
193     unsigned int video_pipeline_cache_invalidate;
194     unsigned int dw0;
195     unsigned int dw1;
196 };
197
198 struct gpe_mi_store_data_imm_parameter
199 {
200     dri_bo *bo;
201     unsigned int is_qword;
202     unsigned int offset;
203     unsigned int dw0;
204     unsigned int dw1;
205 };
206
207 struct gpe_mi_store_register_mem_parameter
208 {
209     dri_bo *bo;
210     unsigned int offset;
211     unsigned int mmio_offset;
212 };
213
214 struct gpe_mi_load_register_mem_parameter
215 {
216     dri_bo *bo;
217     unsigned int offset;
218     unsigned int mmio_offset;
219 };
220
221 struct gpe_mi_load_register_imm_parameter
222 {
223     unsigned int data;
224     unsigned int mmio_offset;
225 };
226
227 struct gpe_mi_load_register_reg_parameter
228 {
229     unsigned int src_mmio_offset;
230     unsigned int dst_mmio_offset;
231 };
232
233 struct gpe_mi_math_parameter
234 {
235     unsigned int num_instructions;
236     unsigned int *instruction_list;
237 };
238
239 struct gpe_mi_conditional_batch_buffer_end_parameter
240 {
241     dri_bo *bo;
242     unsigned int offset;
243     unsigned int compare_mask_mode_disabled;
244     unsigned int compare_data;
245 };
246
247 struct gpe_mi_batch_buffer_start_parameter
248 {
249     dri_bo *bo;
250     unsigned int offset;
251     unsigned int is_second_level;
252     unsigned int use_global_gtt;
253 };
254
255 void i965_gpe_context_destroy(struct i965_gpe_context *gpe_context);
256 void i965_gpe_context_init(VADriverContextP ctx,
257                            struct i965_gpe_context *gpe_context);
258 void i965_gpe_load_kernels(VADriverContextP ctx,
259                            struct i965_gpe_context *gpe_context,
260                            struct i965_kernel *kernel_list,
261                            unsigned int num_kernels);
262 void gen6_gpe_pipeline_setup(VADriverContextP ctx,
263                              struct i965_gpe_context *gpe_context,
264                              struct intel_batchbuffer *batch);
265 void i965_gpe_surface2_setup(VADriverContextP ctx,
266                              struct i965_gpe_context *gpe_context,
267                              struct object_surface *obj_surface,
268                              unsigned long binding_table_offset,
269                              unsigned long surface_state_offset);
270 void i965_gpe_media_rw_surface_setup(VADriverContextP ctx,
271                                      struct i965_gpe_context *gpe_context,
272                                      struct object_surface *obj_surface,
273                                      unsigned long binding_table_offset,
274                                      unsigned long surface_state_offset,
275                                      int write_enabled);
276 void i965_gpe_buffer_suface_setup(VADriverContextP ctx,
277                                   struct i965_gpe_context *gpe_context,
278                                   struct i965_buffer_surface *buffer_surface,
279                                   unsigned long binding_table_offset,
280                                   unsigned long surface_state_offset);
281 void gen7_gpe_surface2_setup(VADriverContextP ctx,
282                              struct i965_gpe_context *gpe_context,
283                              struct object_surface *obj_surface,
284                              unsigned long binding_table_offset,
285                              unsigned long surface_state_offset);
286 void gen7_gpe_media_rw_surface_setup(VADriverContextP ctx,
287                                      struct i965_gpe_context *gpe_context,
288                                      struct object_surface *obj_surface,
289                                      unsigned long binding_table_offset,
290                                      unsigned long surface_state_offset,
291                                      int write_enabled);
292 void gen7_gpe_buffer_suface_setup(VADriverContextP ctx,
293                                   struct i965_gpe_context *gpe_context,
294                                   struct i965_buffer_surface *buffer_surface,
295                                   unsigned long binding_table_offset,
296                                   unsigned long surface_state_offset);
297 void gen75_gpe_media_chroma_surface_setup(VADriverContextP ctx,
298                                      struct i965_gpe_context *gpe_context,
299                                      struct object_surface *obj_surface,
300                                      unsigned long binding_table_offset,
301                                      unsigned long surface_state_offset,
302                                      int write_enabled);
303
304 extern void gen8_gpe_surface2_setup(VADriverContextP ctx,
305                              struct i965_gpe_context *gpe_context,
306                              struct object_surface *obj_surface,
307                              unsigned long binding_table_offset,
308                              unsigned long surface_state_offset);
309 extern void gen8_gpe_media_rw_surface_setup(VADriverContextP ctx,
310                                      struct i965_gpe_context *gpe_context,
311                                      struct object_surface *obj_surface,
312                                      unsigned long binding_table_offset,
313                                      unsigned long surface_state_offset,
314                                      int write_enabled);
315 extern void gen8_gpe_buffer_suface_setup(VADriverContextP ctx,
316                                   struct i965_gpe_context *gpe_context,
317                                   struct i965_buffer_surface *buffer_surface,
318                                   unsigned long binding_table_offset,
319                                   unsigned long surface_state_offset);
320 extern void gen8_gpe_media_chroma_surface_setup(VADriverContextP ctx,
321                                      struct i965_gpe_context *gpe_context,
322                                      struct object_surface *obj_surface,
323                                      unsigned long binding_table_offset,
324                                      unsigned long surface_state_offset,
325                                      int write_enabled);
326
327 void gen8_gpe_pipeline_setup(VADriverContextP ctx,
328                              struct i965_gpe_context *gpe_context,
329                              struct intel_batchbuffer *batch);
330 extern void
331 gen8_gpe_context_set_dynamic_buffer(VADriverContextP ctx,
332                                     struct i965_gpe_context *gpe_context,
333                                     struct gpe_dynamic_state_parameter *ds);
334
335
336 void gen8_gpe_context_destroy(struct i965_gpe_context *gpe_context);
337 void gen8_gpe_context_init(VADriverContextP ctx,
338                            struct i965_gpe_context *gpe_context);
339
340 void gen8_gpe_load_kernels(VADriverContextP ctx,
341                            struct i965_gpe_context *gpe_context,
342                            struct i965_kernel *kernel_list,
343                            unsigned int num_kernels);
344
345 void gen9_gpe_pipeline_setup(VADriverContextP ctx,
346                              struct i965_gpe_context *gpe_context,
347                              struct intel_batchbuffer *batch);
348
349 void gen9_gpe_pipeline_end(VADriverContextP ctx,
350                              struct i965_gpe_context *gpe_context,
351                              struct intel_batchbuffer *batch);
352
353 Bool i965_allocate_gpe_resource(dri_bufmgr *bufmgr,
354                                 struct i965_gpe_resource *res,
355                                 int size,
356                                 const char *name);
357
358 void i965_object_surface_to_2d_gpe_resource(struct i965_gpe_resource *res,
359                                             struct object_surface *obj_surface);
360
361 void i965_object_surface_to_2d_gpe_resource_with_align(struct i965_gpe_resource *res,
362                                                        struct object_surface *obj_surface,
363                                                        unsigned int alignment);
364
365 void i965_dri_object_to_buffer_gpe_resource(struct i965_gpe_resource *res,
366                                             dri_bo *bo);
367
368 void i965_dri_object_to_2d_gpe_resource(struct i965_gpe_resource *res,
369                                         dri_bo *bo,
370                                         unsigned int width,
371                                         unsigned int height,
372                                         unsigned int pitch);
373
374 void i965_zero_gpe_resource(struct i965_gpe_resource *res);
375
376 void i965_free_gpe_resource(struct i965_gpe_resource *res);
377
378 void *i965_map_gpe_resource(struct i965_gpe_resource *res);
379
380 void i965_unmap_gpe_resource(struct i965_gpe_resource *res);
381
382 void gen8_gpe_mi_flush_dw(VADriverContextP ctx,
383                           struct intel_batchbuffer *batch,
384                           struct gpe_mi_flush_dw_parameter *params);
385
386 void gen8_gpe_mi_store_data_imm(VADriverContextP ctx,
387                                 struct intel_batchbuffer *batch,
388                                 struct gpe_mi_store_data_imm_parameter *params);
389
390 void gen8_gpe_mi_store_register_mem(VADriverContextP ctx,
391                                     struct intel_batchbuffer *batch,
392                                     struct gpe_mi_store_register_mem_parameter *params);
393
394 void gen8_gpe_mi_load_register_mem(VADriverContextP ctx,
395                                    struct intel_batchbuffer *batch,
396                                    struct gpe_mi_load_register_mem_parameter *params);
397
398 void gen8_gpe_mi_load_register_imm(VADriverContextP ctx,
399                                    struct intel_batchbuffer *batch,
400                                    struct gpe_mi_load_register_imm_parameter *params);
401
402 void gen8_gpe_mi_load_register_reg(VADriverContextP ctx,
403                                    struct intel_batchbuffer *batch,
404                                    struct gpe_mi_load_register_reg_parameter *params);
405
406 void gen9_gpe_mi_math(VADriverContextP ctx,
407                       struct intel_batchbuffer *batch,
408                       struct gpe_mi_math_parameter *params);
409
410 void gen9_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
411                                               struct intel_batchbuffer *batch,
412                                               struct gpe_mi_conditional_batch_buffer_end_parameter *params);
413
414 void gen8_gpe_mi_batch_buffer_start(VADriverContextP ctx,
415                                     struct intel_batchbuffer *batch,
416                                     struct gpe_mi_batch_buffer_start_parameter *params);
417
418
419 struct gpe_media_object_inline_data
420 {
421     union {
422         struct {
423             unsigned int x:8;
424             unsigned int y:8;
425             unsigned int reserved:16;
426         };
427         unsigned int value;
428     };
429 };
430
431 struct gpe_media_object_parameter
432 {
433     unsigned int use_scoreboard;
434     unsigned int scoreboard_x;
435     unsigned int scoreboard_y;
436     unsigned int scoreboard_mask;
437     unsigned int interface_offset;
438     void *pinline_data;
439     unsigned int inline_size;
440 };
441
442 struct i965_gpe_surface
443 {
444     unsigned int is_buffer:1;
445     unsigned int is_2d_surface:1;
446     unsigned int is_adv_surface:1;
447     unsigned int is_uv_surface:1;
448     unsigned int is_media_block_rw:1;
449     unsigned int is_raw_buffer:1;
450     unsigned int is_16bpp     :1;
451     /* use the override_offset for 2d_surface */
452     unsigned int is_override_offset : 1;
453
454     unsigned int vert_line_stride_offset;
455     unsigned int vert_line_stride;
456     unsigned int cacheability_control;
457     unsigned int format; // 2d surface only
458     unsigned int v_direction; // adv surface only
459     unsigned int size; // buffer only
460     unsigned int offset;
461
462     struct i965_gpe_resource *gpe_resource;
463 };
464
465 extern void
466 gen9_gpe_reset_binding_table(VADriverContextP ctx,
467                              struct i965_gpe_context *gpe_context);
468 extern
469 void *i965_gpe_context_map_curbe(struct i965_gpe_context *gpe_context);
470
471 extern
472 void i965_gpe_context_unmap_curbe(struct i965_gpe_context *gpe_context);
473
474 extern
475 void gen8_gpe_setup_interface_data(VADriverContextP ctx,
476                                    struct i965_gpe_context *gpe_context);
477 extern void
478 gen9_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
479                              struct i965_gpe_surface *gpe_surface,
480                              int index);
481
482 extern bool
483 i965_gpe_allocate_2d_resource(dri_bufmgr *bufmgr,
484                            struct i965_gpe_resource *res,
485                            int width,
486                            int height,
487                            int pitch,
488                            const char *name);
489
490 struct gpe_walker_xy
491 {
492     union {
493         struct {
494             unsigned int x:16;
495             unsigned int y:16;
496         };
497         unsigned int value;
498     };
499 };
500
501 struct gpe_media_object_walker_parameter
502 {
503     void *pinline_data;
504     unsigned int inline_size;
505     unsigned int interface_offset;
506     unsigned int use_scoreboard;
507     unsigned int scoreboard_mask;
508     unsigned int group_id_loop_select;
509     unsigned int color_count_minus1;
510     unsigned int mid_loop_unit_x;
511     unsigned int mid_loop_unit_y;
512     unsigned int middle_loop_extra_steps;
513     unsigned int local_loop_exec_count;
514     unsigned int global_loop_exec_count;
515     struct gpe_walker_xy block_resolution;
516     struct gpe_walker_xy local_start;
517     struct gpe_walker_xy local_end;
518     struct gpe_walker_xy local_outer_loop_stride;
519     struct gpe_walker_xy local_inner_loop_unit;
520     struct gpe_walker_xy global_resolution;
521     struct gpe_walker_xy global_start;
522     struct gpe_walker_xy global_outer_loop_stride;
523     struct gpe_walker_xy global_inner_loop_unit;
524 };
525
526 enum walker_degree
527 {
528     WALKER_NO_DEGREE = 0,
529     WALKER_45_DEGREE,
530     WALKER_26_DEGREE,
531     WALKER_26Z_DEGREE,
532     WALKER_45Z_DEGREE,
533 };
534 struct gpe_encoder_kernel_walker_parameter
535 {
536     unsigned int walker_degree;
537     unsigned int use_scoreboard;
538     unsigned int scoreboard_mask;
539     unsigned int no_dependency;
540     unsigned int resolution_x;
541     unsigned int resolution_y;
542     unsigned int use_vertical_raster_scan;
543 };
544
545 extern void
546 gen8_gpe_media_object(VADriverContextP ctx,
547                       struct i965_gpe_context *gpe_context,
548                       struct intel_batchbuffer *batch,
549                       struct gpe_media_object_parameter *param);
550
551 extern void
552 gen8_gpe_media_state_flush(VADriverContextP ctx,
553                            struct i965_gpe_context *gpe_context,
554                            struct intel_batchbuffer *batch);
555
556 extern void
557 gen8_gpe_media_object_walker(VADriverContextP ctx,
558                              struct i965_gpe_context *gpe_context,
559                              struct intel_batchbuffer *batch,
560                              struct gpe_media_object_walker_parameter *param);
561
562
563 struct intel_vpp_kernel_walker_parameter
564 {
565     unsigned int                use_scoreboard;
566     unsigned int                scoreboard_mask;
567     unsigned int                no_dependency;
568     unsigned int                resolution_x;
569     unsigned int                resolution_y;
570 };
571
572 extern void
573 intel_vpp_init_media_object_walker_parameter(struct intel_vpp_kernel_walker_parameter *kernel_walker_param,
574                                              struct gpe_media_object_walker_parameter *walker_param);
575 extern void
576 gen8_gpe_reset_binding_table(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
577
578 extern void
579 gen8_gpe_context_add_surface(struct i965_gpe_context *gpe_context,
580                              struct i965_gpe_surface *gpe_surface,
581                              int index);
582
583 extern void
584 gen8_gpe_mi_conditional_batch_buffer_end(VADriverContextP ctx,
585                                          struct intel_batchbuffer *batch,
586                                          struct gpe_mi_conditional_batch_buffer_end_parameter *param);
587
588 extern void
589 gen8_gpe_pipe_control(VADriverContextP ctx,
590                       struct intel_batchbuffer *batch,
591                       struct gpe_pipe_control_parameter *param);
592
593 extern void
594 i965_init_media_object_walker_parameter(struct gpe_encoder_kernel_walker_parameter *kernel_walker_param,
595                                         struct gpe_media_object_walker_parameter *walker_param);
596
597 extern void
598 gen9_add_2d_gpe_surface(VADriverContextP ctx,
599                         struct i965_gpe_context *gpe_context,
600                         struct object_surface *obj_surface,
601                         int is_uv_surface,
602                         int is_media_block_rw,
603                         unsigned int format,
604                         int index);
605 extern void
606 gen9_add_adv_gpe_surface(VADriverContextP ctx,
607                          struct i965_gpe_context *gpe_context,
608                          struct object_surface *obj_surface,
609                          int index);
610 extern void
611 gen9_add_buffer_gpe_surface(VADriverContextP ctx,
612                             struct i965_gpe_context *gpe_context,
613                             struct i965_gpe_resource *gpe_buffer,
614                             int is_raw_buffer,
615                             unsigned int size,
616                             unsigned int offset,
617                             int index);
618 extern void
619 gen9_add_buffer_2d_gpe_surface(VADriverContextP ctx,
620                                struct i965_gpe_context *gpe_context,
621                                struct i965_gpe_resource *gpe_buffer,
622                                int is_media_block_rw,
623                                unsigned int format,
624                                int index);
625 extern void
626 gen9_add_dri_buffer_gpe_surface(VADriverContextP ctx,
627                                 struct i965_gpe_context *gpe_context,
628                                 dri_bo *bo,
629                                 int is_raw_buffer,
630                                 unsigned int size,
631                                 unsigned int offset,
632                                 int index);
633
634 struct i965_gpe_table
635 {
636     void (*context_init)(VADriverContextP ctx,
637                          struct i965_gpe_context *gpe_context);
638
639     void (*context_destroy)(struct i965_gpe_context *gpe_context);
640
641     void (*context_add_surface)(struct i965_gpe_context *gpe_context,
642                                 struct i965_gpe_surface *gpe_surface,
643                                 int index);
644
645     void (*reset_binding_table)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
646
647     void (*load_kernels)(VADriverContextP ctx,
648                          struct i965_gpe_context *gpe_context,
649                          struct i965_kernel *kernel_list,
650                          unsigned int num_kernels);
651
652     void (*setup_interface_data)(VADriverContextP ctx, struct i965_gpe_context *gpe_context);
653
654     void (*set_dynamic_buffer)(VADriverContextP ctx,
655                              struct i965_gpe_context *gpe_context,
656                              struct gpe_dynamic_state_parameter *ds);
657
658     void (*media_object)(VADriverContextP ctx,
659                          struct i965_gpe_context *gpe_context,
660                          struct intel_batchbuffer *batch,
661                          struct gpe_media_object_parameter *param);
662
663     void (*media_object_walker)(VADriverContextP ctx,
664                                 struct i965_gpe_context *gpe_context,
665                                 struct intel_batchbuffer *batch,
666                                 struct gpe_media_object_walker_parameter *param);
667
668     void (*media_state_flush)(VADriverContextP ctx,
669                               struct i965_gpe_context *gpe_context,
670                               struct intel_batchbuffer *batch);
671
672
673     void (*pipe_control)(VADriverContextP ctx,
674                          struct intel_batchbuffer *batch,
675                          struct gpe_pipe_control_parameter *param);
676
677     void (*pipeline_end)(VADriverContextP ctx,
678                          struct i965_gpe_context *gpe_context,
679                          struct intel_batchbuffer *batch);              // only available on gen9+
680
681     void (*pipeline_setup)(VADriverContextP ctx,
682                              struct i965_gpe_context *gpe_context,
683                              struct intel_batchbuffer *batch);
684
685     void (*mi_conditional_batch_buffer_end)(VADriverContextP ctx,
686                                             struct intel_batchbuffer *batch,
687                                             struct gpe_mi_conditional_batch_buffer_end_parameter *param);
688
689     void (*mi_batch_buffer_start)(VADriverContextP ctx,
690                                   struct intel_batchbuffer *batch,
691                                   struct gpe_mi_batch_buffer_start_parameter *params);
692
693     void (*mi_load_register_reg)(VADriverContextP ctx,
694                                  struct intel_batchbuffer *batch,
695                                  struct gpe_mi_load_register_reg_parameter *params);
696
697     void (*mi_load_register_imm)(VADriverContextP ctx,
698                                  struct intel_batchbuffer *batch,
699                                  struct gpe_mi_load_register_imm_parameter *params);
700
701     void (*mi_load_register_mem)(VADriverContextP ctx,
702                                  struct intel_batchbuffer *batch,
703                                  struct gpe_mi_load_register_mem_parameter *params);
704
705
706     void (*mi_store_register_mem)(VADriverContextP ctx,
707                                   struct intel_batchbuffer *batch,
708                                   struct gpe_mi_store_register_mem_parameter *params);
709
710     void (*mi_store_data_imm)(VADriverContextP ctx,
711                               struct intel_batchbuffer *batch,
712                               struct gpe_mi_store_data_imm_parameter *params);
713
714     void (*mi_flush_dw)(VADriverContextP ctx,
715                         struct intel_batchbuffer *batch,
716                         struct gpe_mi_flush_dw_parameter *params);
717 };
718
719 extern bool
720 i965_gpe_table_init(VADriverContextP ctx);
721
722 extern void
723 i965_gpe_table_terminate(VADriverContextP ctx);
724
725 #endif /* _I965_GPE_UTILS_H_ */