OSDN Git Service

Add vdenc common commands for CNL
[android-x86/hardware-intel-common-vaapi.git] / src / gen7_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <stdbool.h>
32 #include <string.h>
33 #include <assert.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_encoder.h"
41 #include "gen6_vme.h"
42 #include "gen6_mfc.h"
43 #ifdef SURFACE_STATE_PADDED_SIZE
44 #undef SURFACE_STATE_PADDED_SIZE
45 #endif
46
47 #define VME_MSG_LENGTH      32
48
49 #define SURFACE_STATE_PADDED_SIZE               SURFACE_STATE_PADDED_SIZE_GEN7
50 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
51 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
52
53 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
54 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
55 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
56
57 enum VIDEO_CODING_TYPE {
58     VIDEO_CODING_AVC = 0,
59     VIDEO_CODING_MPEG2,
60     VIDEO_CODING_SUM
61 };
62
63 enum AVC_VME_KERNEL_TYPE {
64     AVC_VME_INTRA_SHADER = 0,
65     AVC_VME_INTER_SHADER,
66     AVC_VME_BATCHBUFFER,
67     AVC_VME_BINTER_SHADER,
68     AVC_VME_KERNEL_SUM
69 };
70
71 enum MPEG2_VME_KERNEL_TYPE {
72     MPEG2_VME_INTER_SHADER = 0,
73     MPEG2_VME_BATCHBUFFER,
74     MPEG2_VME_KERNEL_SUM
75 };
76
77
78 static const uint32_t gen7_vme_intra_frame[][4] = {
79 #include "shaders/vme/intra_frame_ivb.g7b"
80 };
81
82 static const uint32_t gen7_vme_inter_frame[][4] = {
83 #include "shaders/vme/inter_frame_ivb.g7b"
84 };
85
86 static const uint32_t gen7_vme_batchbuffer[][4] = {
87 #include "shaders/vme/batchbuffer.g7b"
88 };
89
90 static const uint32_t gen7_vme_binter_frame[][4] = {
91 #include "shaders/vme/inter_bframe_ivb.g7b"
92 };
93
94 static struct i965_kernel gen7_vme_kernels[] = {
95     {
96         "AVC VME Intra Frame",
97         AVC_VME_INTRA_SHADER,           /*index*/
98         gen7_vme_intra_frame,
99         sizeof(gen7_vme_intra_frame),
100         NULL
101     },
102     {
103         "AVC VME inter Frame",
104         AVC_VME_INTER_SHADER,
105         gen7_vme_inter_frame,
106         sizeof(gen7_vme_inter_frame),
107         NULL
108     },
109     {
110         "AVC VME BATCHBUFFER",
111         AVC_VME_BATCHBUFFER,
112         gen7_vme_batchbuffer,
113         sizeof(gen7_vme_batchbuffer),
114         NULL
115     },
116     {
117         "AVC VME binter Frame",
118         AVC_VME_BINTER_SHADER,
119         gen7_vme_binter_frame,
120         sizeof(gen7_vme_binter_frame),
121         NULL
122     }
123 };
124
125 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
126 #include "shaders/vme/mpeg2_inter_ivb.g7b"
127 };
128
129 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
130 #include "shaders/vme/batchbuffer.g7b"
131 };
132
133 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
134     {
135         "MPEG2 VME inter Frame",
136         MPEG2_VME_INTER_SHADER,
137         gen7_vme_mpeg2_inter_frame,
138         sizeof(gen7_vme_mpeg2_inter_frame),
139         NULL
140     },
141     {
142         "MPEG2 VME BATCHBUFFER",
143         MPEG2_VME_BATCHBUFFER,
144         gen7_vme_mpeg2_batchbuffer,
145         sizeof(gen7_vme_mpeg2_batchbuffer),
146         NULL
147     },
148 };
149
150 /* only used for VME source surface state */
151 static void
152 gen7_vme_source_surface_state(VADriverContextP ctx,
153                               int index,
154                               struct object_surface *obj_surface,
155                               struct intel_encoder_context *encoder_context)
156 {
157     struct gen6_vme_context *vme_context = encoder_context->vme_context;
158
159     vme_context->vme_surface2_setup(ctx,
160                                     &vme_context->gpe_context,
161                                     obj_surface,
162                                     BINDING_TABLE_OFFSET(index),
163                                     SURFACE_STATE_OFFSET(index));
164 }
165
166 static void
167 gen7_vme_media_source_surface_state(VADriverContextP ctx,
168                                     int index,
169                                     struct object_surface *obj_surface,
170                                     struct intel_encoder_context *encoder_context)
171 {
172     struct gen6_vme_context *vme_context = encoder_context->vme_context;
173
174     vme_context->vme_media_rw_surface_setup(ctx,
175                                             &vme_context->gpe_context,
176                                             obj_surface,
177                                             BINDING_TABLE_OFFSET(index),
178                                             SURFACE_STATE_OFFSET(index),
179                                             0);
180 }
181
182 static void
183 gen7_vme_output_buffer_setup(VADriverContextP ctx,
184                              struct encode_state *encode_state,
185                              int index,
186                              struct intel_encoder_context *encoder_context)
187
188 {
189     struct i965_driver_data *i965 = i965_driver_data(ctx);
190     struct gen6_vme_context *vme_context = encoder_context->vme_context;
191     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
192     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
193     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
194     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
195     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
196
197     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
198     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
199
200     if (is_intra)
201         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
202     else
203         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
204
205     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
206                                               "VME output buffer",
207                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
208                                               0x1000);
209     assert(vme_context->vme_output.bo);
210     vme_context->vme_buffer_suface_setup(ctx,
211                                          &vme_context->gpe_context,
212                                          &vme_context->vme_output,
213                                          BINDING_TABLE_OFFSET(index),
214                                          SURFACE_STATE_OFFSET(index));
215 }
216
217 static void
218 gen7_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
219                                       struct encode_state *encode_state,
220                                       int index,
221                                       struct intel_encoder_context *encoder_context)
222
223 {
224     struct i965_driver_data *i965 = i965_driver_data(ctx);
225     struct gen6_vme_context *vme_context = encoder_context->vme_context;
226     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
227     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
228     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
229
230     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
231     vme_context->vme_batchbuffer.size_block = 64; /* 4 OWORDs */
232     vme_context->vme_batchbuffer.pitch = 16;
233     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr,
234                                                    "VME batchbuffer",
235                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
236                                                    0x1000);
237     vme_context->vme_buffer_suface_setup(ctx,
238                                          &vme_context->gpe_context,
239                                          &vme_context->vme_batchbuffer,
240                                          BINDING_TABLE_OFFSET(index),
241                                          SURFACE_STATE_OFFSET(index));
242 }
243
244 static VAStatus
245 gen7_vme_surface_setup(VADriverContextP ctx,
246                        struct encode_state *encode_state,
247                        int is_intra,
248                        struct intel_encoder_context *encoder_context)
249 {
250     struct object_surface *obj_surface;
251
252     /*Setup surfaces state*/
253     /* current picture for encoding */
254     obj_surface = encode_state->input_yuv_object;
255     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
256     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
257
258     if (!is_intra) {
259         VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
260         int slice_type;
261
262         slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
263         assert(slice_type != SLICE_TYPE_I && slice_type != SLICE_TYPE_SI);
264
265         intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 0, 1, gen7_vme_source_surface_state);
266
267         if (slice_type == SLICE_TYPE_B)
268             intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 1, 2, gen7_vme_source_surface_state);
269     }
270
271     /* VME output */
272     gen7_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
273     gen7_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
274     intel_h264_setup_cost_surface(ctx, encode_state, encoder_context,
275                                   BINDING_TABLE_OFFSET(INTEL_COST_TABLE_OFFSET),
276                                   SURFACE_STATE_OFFSET(INTEL_COST_TABLE_OFFSET));
277
278     return VA_STATUS_SUCCESS;
279 }
280
281 static VAStatus gen7_vme_interface_setup(VADriverContextP ctx,
282                                          struct encode_state *encode_state,
283                                          struct intel_encoder_context *encoder_context)
284 {
285     struct gen6_vme_context *vme_context = encoder_context->vme_context;
286     struct gen6_interface_descriptor_data *desc;
287     int i;
288     dri_bo *bo;
289
290     bo = vme_context->gpe_context.idrt.bo;
291     dri_bo_map(bo, 1);
292     assert(bo->virtual);
293     desc = bo->virtual;
294
295     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
296         struct i965_kernel *kernel;
297         kernel = &vme_context->gpe_context.kernels[i];
298         assert(sizeof(*desc) == 32);
299         /*Setup the descritor table*/
300         memset(desc, 0, sizeof(*desc));
301         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
302         desc->desc2.sampler_count = 1; /* FIXME: */
303         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
304         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
305         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
306         desc->desc4.constant_urb_entry_read_offset = 0;
307         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
308
309         /*kernel start*/
310         dri_bo_emit_reloc(bo,
311                           I915_GEM_DOMAIN_INSTRUCTION, 0,
312                           0,
313                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
314                           kernel->bo);
315         /*Sampler State(VME state pointer)*/
316         dri_bo_emit_reloc(bo,
317                           I915_GEM_DOMAIN_INSTRUCTION, 0,
318                           (1 << 2),                                 //
319                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
320                           vme_context->vme_state.bo);
321         desc++;
322     }
323     dri_bo_unmap(bo);
324
325     return VA_STATUS_SUCCESS;
326 }
327
328 static VAStatus gen7_vme_constant_setup(VADriverContextP ctx,
329                                         struct encode_state *encode_state,
330                                         struct intel_encoder_context *encoder_context)
331 {
332     struct gen6_vme_context *vme_context = encoder_context->vme_context;
333     unsigned char *constant_buffer;
334     unsigned int *vme_state_message;
335     int mv_num;
336
337     vme_state_message = (unsigned int *)vme_context->vme_state_message;
338     mv_num = 32;
339
340     if (encoder_context->codec == CODEC_H264) {
341         if (vme_context->h264_level >= 30) {
342             mv_num = 16;
343
344             if (vme_context->h264_level >= 31)
345                 mv_num = 8;
346         }
347     } else if (encoder_context->codec == CODEC_MPEG2) {
348         mv_num = 2;
349     }
350
351
352     vme_state_message[31] = mv_num;
353
354     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
355     assert(vme_context->gpe_context.curbe.bo->virtual);
356     constant_buffer = vme_context->gpe_context.curbe.bo->virtual;
357
358     /* Pass the required constant info into the constant buffer */
359     memcpy(constant_buffer, (char *)vme_context->vme_state_message, 128);
360
361     dri_bo_unmap(vme_context->gpe_context.curbe.bo);
362
363     return VA_STATUS_SUCCESS;
364 }
365
366
367 static VAStatus gen7_vme_avc_state_setup(VADriverContextP ctx,
368                                          struct encode_state *encode_state,
369                                          int is_intra,
370                                          struct intel_encoder_context *encoder_context)
371 {
372     struct gen6_vme_context *vme_context = encoder_context->vme_context;
373     unsigned int *vme_state_message;
374     unsigned int *mb_cost_table;
375     int i;
376     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
377     unsigned int is_low_quality = (encoder_context->quality_level == ENCODER_LOW_QUALITY);
378     dri_bo *cost_bo;
379     int slice_type;
380     uint8_t *cost_ptr;
381     int qp;
382
383     slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
384
385     if (slice_type == SLICE_TYPE_I) {
386         cost_bo = vme_context->i_qp_cost_table;
387     } else if (slice_type == SLICE_TYPE_P) {
388         cost_bo = vme_context->p_qp_cost_table;
389     } else {
390         cost_bo = vme_context->b_qp_cost_table;
391     }
392
393     mb_cost_table = (unsigned int *)vme_context->vme_state_message;
394     dri_bo_map(vme_context->vme_state.bo, 1);
395     dri_bo_map(cost_bo, 0);
396     assert(vme_context->vme_state.bo->virtual);
397     assert(cost_bo->virtual);
398     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
399
400     cost_ptr = (uint8_t *)cost_bo->virtual;
401
402     /* up to 8 VME_SEARCH_PATH_LUT is supported */
403     /* Two subsequent qp will share the same mode/motion-vector cost table */
404     /* the range is from 0-51 */
405     for (i = 0; i < 8; i++)  {
406
407         vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual +
408                             i * 32;
409         if ((slice_type == SLICE_TYPE_P) && !is_low_quality) {
410             vme_state_message[0] = 0x01010101;
411             vme_state_message[1] = 0x10010101;
412             vme_state_message[2] = 0x0F0F0F0F;
413             vme_state_message[3] = 0x100F0F0F;
414             vme_state_message[4] = 0x01010101;
415             vme_state_message[5] = 0x10010101;
416             vme_state_message[6] = 0x0F0F0F0F;
417             vme_state_message[7] = 0x100F0F0F;
418             vme_state_message[8] = 0x01010101;
419             vme_state_message[9] = 0x10010101;
420             vme_state_message[10] = 0x0F0F0F0F;
421             vme_state_message[11] = 0x000F0F0F;
422             vme_state_message[12] = 0x00;
423             vme_state_message[13] = 0x00;
424         } else {
425             vme_state_message[0] = 0x10010101;
426             vme_state_message[1] = 0x100F0F0F;
427             vme_state_message[2] = 0x10010101;
428             vme_state_message[3] = 0x000F0F0F;
429             vme_state_message[4] = 0;
430             vme_state_message[5] = 0;
431             vme_state_message[6] = 0;
432             vme_state_message[7] = 0;
433             vme_state_message[8] = 0;
434             vme_state_message[9] = 0;
435             vme_state_message[10] = 0;
436             vme_state_message[11] = 0;
437             vme_state_message[12] = 0;
438             vme_state_message[13] = 0;
439         }
440
441         qp = 8 * i;
442
443         /* when qp is greater than 51, use the cost_table of qp=51 to fulfill */
444         if (qp > 51) {
445             qp = 51;
446         }
447         /* Setup the four LUT sets for MbMV cost */
448         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
449         vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
450         vme_state_message[16] = mb_cost_table[0];
451         vme_state_message[17] = mb_cost_table[1];
452         vme_state_message[18] = mb_cost_table[3];
453         vme_state_message[19] = mb_cost_table[4];
454
455         qp += 2;
456         if (qp > 51) {
457             qp = 51;
458         }
459         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
460         vme_state_message[14] |= ((mb_cost_table[2] & 0xFFFF) << 16);
461         vme_state_message[20] = mb_cost_table[0];
462         vme_state_message[21] = mb_cost_table[1];
463         vme_state_message[22] = mb_cost_table[3];
464         vme_state_message[23] = mb_cost_table[4];
465
466         qp += 2;
467         if (qp > 51) {
468             qp = 51;
469         }
470         vme_state_message[15] = (mb_cost_table[2] & 0xFFFF);
471         vme_state_message[24] = mb_cost_table[0];
472         vme_state_message[25] = mb_cost_table[1];
473         vme_state_message[26] = mb_cost_table[3];
474         vme_state_message[27] = mb_cost_table[4];
475
476         qp += 2;
477         if (qp > 51) {
478             qp = 51;
479         }
480         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
481         vme_state_message[15] |= ((mb_cost_table[2] & 0xFFFF) << 16);
482         vme_state_message[28] = mb_cost_table[0];
483         vme_state_message[29] = mb_cost_table[1];
484         vme_state_message[30] = mb_cost_table[3];
485         vme_state_message[31] = mb_cost_table[4];
486     }
487
488     dri_bo_unmap(cost_bo);
489     dri_bo_unmap(vme_context->vme_state.bo);
490     return VA_STATUS_SUCCESS;
491 }
492
493 static VAStatus gen7_vme_mpeg2_state_setup(VADriverContextP ctx,
494                                            struct encode_state *encode_state,
495                                            int is_intra,
496                                            struct intel_encoder_context *encoder_context)
497 {
498     struct gen6_vme_context *vme_context = encoder_context->vme_context;
499     unsigned int *vme_state_message;
500     int i;
501     unsigned int *mb_cost_table;
502
503     mb_cost_table = (unsigned int *)vme_context->vme_state_message;
504
505     //building VME state message
506     dri_bo_map(vme_context->vme_state.bo, 1);
507     assert(vme_context->vme_state.bo->virtual);
508     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
509
510     vme_state_message[0] = 0x01010101;
511     vme_state_message[1] = 0x10010101;
512     vme_state_message[2] = 0x0F0F0F0F;
513     vme_state_message[3] = 0x100F0F0F;
514     vme_state_message[4] = 0x01010101;
515     vme_state_message[5] = 0x10010101;
516     vme_state_message[6] = 0x0F0F0F0F;
517     vme_state_message[7] = 0x100F0F0F;
518     vme_state_message[8] = 0x01010101;
519     vme_state_message[9] = 0x10010101;
520     vme_state_message[10] = 0x0F0F0F0F;
521     vme_state_message[11] = 0x000F0F0F;
522     vme_state_message[12] = 0x00;
523     vme_state_message[13] = 0x00;
524
525     vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
526     vme_state_message[15] = 0;
527     vme_state_message[16] = mb_cost_table[0];
528     vme_state_message[17] = 0;
529     vme_state_message[18] = mb_cost_table[3];
530     vme_state_message[19] = mb_cost_table[4];
531
532     for (i = 20; i < 32; i++) {
533         vme_state_message[i] = 0;
534     }
535     //vme_state_message[16] = 0x42424242;           //cost function LUT set 0 for Intra
536
537     dri_bo_unmap(vme_context->vme_state.bo);
538     return VA_STATUS_SUCCESS;
539 }
540
541 static void
542 gen7_vme_fill_vme_batchbuffer(VADriverContextP ctx,
543                               struct encode_state *encode_state,
544                               int mb_width, int mb_height,
545                               int kernel,
546                               int transform_8x8_mode_flag,
547                               struct intel_encoder_context *encoder_context)
548 {
549     struct gen6_vme_context *vme_context = encoder_context->vme_context;
550     int mb_x = 0, mb_y = 0;
551     int i, s, j;
552     unsigned int *command_ptr;
553     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
554     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
555     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
556     int qp;
557     int slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
558     int qp_mb, qp_index;
559
560     if (encoder_context->rate_control_mode == VA_RC_CQP)
561         qp = pic_param->pic_init_qp + slice_param->slice_qp_delta;
562     else
563         qp = mfc_context->brc.qp_prime_y[encoder_context->layer.curr_frame_layer_id][slice_type];
564
565     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
566     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
567
568     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
569         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
570
571         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
572             int slice_mb_begin = slice_param->macroblock_address;
573             int slice_mb_number = slice_param->num_macroblocks;
574             unsigned int mb_intra_ub;
575             int slice_mb_x = slice_param->macroblock_address % mb_width;
576
577             for (i = 0; i < slice_mb_number;) {
578                 int mb_count = i + slice_mb_begin;
579
580                 mb_x = mb_count % mb_width;
581                 mb_y = mb_count / mb_width;
582                 mb_intra_ub = 0;
583
584                 if (mb_x != 0) {
585                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
586                 }
587
588                 if (mb_y != 0) {
589                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
590
591                     if (mb_x != 0)
592                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
593
594                     if (mb_x != (mb_width - 1))
595                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
596                 }
597
598                 if (i < mb_width) {
599                     if (i == 0)
600                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
601
602                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
603
604                     if ((i == (mb_width - 1)) && slice_mb_x) {
605                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
606                     }
607                 }
608
609                 if ((i == mb_width) && slice_mb_x) {
610                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
611                 }
612
613                 *command_ptr++ = (CMD_MEDIA_OBJECT | (9 - 2));
614                 *command_ptr++ = kernel;
615                 *command_ptr++ = 0;
616                 *command_ptr++ = 0;
617                 *command_ptr++ = 0;
618                 *command_ptr++ = 0;
619
620                 /*inline data */
621                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
622                 *command_ptr++ = ((encoder_context->quality_level << 24) | (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
623
624                 if (vme_context->roi_enabled) {
625                     qp_index = mb_y * mb_width + mb_x;
626                     qp_mb = *(vme_context->qp_per_mb + qp_index);
627                 } else
628                     qp_mb = qp;
629                 *command_ptr++ = qp_mb;
630
631                 i += 1;
632             }
633
634             slice_param++;
635         }
636     }
637
638     *command_ptr++ = 0;
639     *command_ptr++ = MI_BATCH_BUFFER_END;
640
641     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
642 }
643
644
645 static void gen7_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
646 {
647     struct i965_driver_data *i965 = i965_driver_data(ctx);
648     struct gen6_vme_context *vme_context = encoder_context->vme_context;
649     dri_bo *bo;
650
651     i965_gpe_context_init(ctx, &vme_context->gpe_context);
652
653     /* VME output buffer */
654     dri_bo_unreference(vme_context->vme_output.bo);
655     vme_context->vme_output.bo = NULL;
656
657     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
658     vme_context->vme_batchbuffer.bo = NULL;
659
660     /* VME state */
661     dri_bo_unreference(vme_context->vme_state.bo);
662     bo = dri_bo_alloc(i965->intel.bufmgr,
663                       "Buffer",
664                       1024 * 16, 64);
665     assert(bo);
666     vme_context->vme_state.bo = bo;
667 }
668
669 static void gen7_vme_pipeline_programing(VADriverContextP ctx,
670                                          struct encode_state *encode_state,
671                                          struct intel_encoder_context *encoder_context)
672 {
673     struct gen6_vme_context *vme_context = encoder_context->vme_context;
674     struct intel_batchbuffer *batch = encoder_context->base.batch;
675     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
676     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
677     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
678     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
679     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
680     int s;
681     bool allow_hwscore = true;
682     int kernel_shader;
683     unsigned int is_low_quality = (encoder_context->quality_level == ENCODER_LOW_QUALITY);
684
685     if (is_low_quality)
686         allow_hwscore = false;
687     else {
688         for (s = 0; s < encode_state->num_slice_params_ext; s++) {
689             pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer;
690             if ((pSliceParameter->macroblock_address % width_in_mbs)) {
691                 allow_hwscore = false;
692                 break;
693             }
694         }
695     }
696
697     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
698         (pSliceParameter->slice_type == SLICE_TYPE_SI)) {
699         kernel_shader = AVC_VME_INTRA_SHADER;
700     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
701                (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
702         kernel_shader = AVC_VME_INTER_SHADER;
703     } else {
704         kernel_shader = AVC_VME_BINTER_SHADER;
705         if (!allow_hwscore)
706             kernel_shader = AVC_VME_INTER_SHADER;
707     }
708
709     if (allow_hwscore)
710         gen7_vme_walker_fill_vme_batchbuffer(ctx,
711                                              encode_state,
712                                              width_in_mbs, height_in_mbs,
713                                              kernel_shader,
714                                              pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
715                                              encoder_context);
716
717     else
718         gen7_vme_fill_vme_batchbuffer(ctx,
719                                       encode_state,
720                                       width_in_mbs, height_in_mbs,
721                                       kernel_shader,
722                                       pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
723                                       encoder_context);
724
725     intel_batchbuffer_start_atomic(batch, 0x1000);
726     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
727     BEGIN_BATCH(batch, 2);
728     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
729     OUT_RELOC(batch,
730               vme_context->vme_batchbuffer.bo,
731               I915_GEM_DOMAIN_COMMAND, 0,
732               0);
733     ADVANCE_BATCH(batch);
734
735     intel_batchbuffer_end_atomic(batch);
736 }
737
738 static VAStatus gen7_vme_prepare(VADriverContextP ctx,
739                                  struct encode_state *encode_state,
740                                  struct intel_encoder_context *encoder_context)
741 {
742     VAStatus vaStatus = VA_STATUS_SUCCESS;
743     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
744     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
745     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
746     struct gen6_vme_context *vme_context = encoder_context->vme_context;
747
748     if (!vme_context->h264_level ||
749         (vme_context->h264_level != pSequenceParameter->level_idc)) {
750         vme_context->h264_level = pSequenceParameter->level_idc;
751     }
752
753     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
754     intel_h264_initialize_mbmv_cost(ctx, encode_state, encoder_context);
755     intel_h264_enc_roi_config(ctx, encode_state, encoder_context);
756
757     /*Setup all the memory object*/
758     gen7_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
759     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
760     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
761     gen7_vme_avc_state_setup(ctx, encode_state, is_intra, encoder_context);
762
763     /*Programing media pipeline*/
764     gen7_vme_pipeline_programing(ctx, encode_state, encoder_context);
765
766     return vaStatus;
767 }
768
769 static VAStatus gen7_vme_run(VADriverContextP ctx,
770                              struct encode_state *encode_state,
771                              struct intel_encoder_context *encoder_context)
772 {
773     struct intel_batchbuffer *batch = encoder_context->base.batch;
774
775     intel_batchbuffer_flush(batch);
776
777     return VA_STATUS_SUCCESS;
778 }
779
780 static VAStatus gen7_vme_stop(VADriverContextP ctx,
781                               struct encode_state *encode_state,
782                               struct intel_encoder_context *encoder_context)
783 {
784     return VA_STATUS_SUCCESS;
785 }
786
787 static VAStatus
788 gen7_vme_pipeline(VADriverContextP ctx,
789                   VAProfile profile,
790                   struct encode_state *encode_state,
791                   struct intel_encoder_context *encoder_context)
792 {
793     gen7_vme_media_init(ctx, encoder_context);
794     gen7_vme_prepare(ctx, encode_state, encoder_context);
795     gen7_vme_run(ctx, encode_state, encoder_context);
796     gen7_vme_stop(ctx, encode_state, encoder_context);
797
798     return VA_STATUS_SUCCESS;
799 }
800
801 static void
802 gen7_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
803                                    struct encode_state *encode_state,
804                                    int index,
805                                    int is_intra,
806                                    struct intel_encoder_context *encoder_context)
807
808 {
809     struct i965_driver_data *i965 = i965_driver_data(ctx);
810     struct gen6_vme_context *vme_context = encoder_context->vme_context;
811     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
812     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
813     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
814
815     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
816     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
817
818     if (is_intra)
819         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
820     else
821         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
822
823     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
824                                               "VME output buffer",
825                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
826                                               0x1000);
827     assert(vme_context->vme_output.bo);
828     vme_context->vme_buffer_suface_setup(ctx,
829                                          &vme_context->gpe_context,
830                                          &vme_context->vme_output,
831                                          BINDING_TABLE_OFFSET(index),
832                                          SURFACE_STATE_OFFSET(index));
833 }
834
835 static void
836 gen7_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
837                                             struct encode_state *encode_state,
838                                             int index,
839                                             struct intel_encoder_context *encoder_context)
840
841 {
842     struct i965_driver_data *i965 = i965_driver_data(ctx);
843     struct gen6_vme_context *vme_context = encoder_context->vme_context;
844     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
845     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
846     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
847
848     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
849     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
850     vme_context->vme_batchbuffer.pitch = 16;
851     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr,
852                                                    "VME batchbuffer",
853                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
854                                                    0x1000);
855     vme_context->vme_buffer_suface_setup(ctx,
856                                          &vme_context->gpe_context,
857                                          &vme_context->vme_batchbuffer,
858                                          BINDING_TABLE_OFFSET(index),
859                                          SURFACE_STATE_OFFSET(index));
860 }
861
862 static VAStatus
863 gen7_vme_mpeg2_surface_setup(VADriverContextP ctx,
864                              struct encode_state *encode_state,
865                              int is_intra,
866                              struct intel_encoder_context *encoder_context)
867 {
868     struct object_surface *obj_surface;
869
870     /*Setup surfaces state*/
871     /* current picture for encoding */
872     obj_surface = encode_state->input_yuv_object;
873     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
874     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
875
876     if (!is_intra) {
877         /* reference 0 */
878         obj_surface = encode_state->reference_objects[0];
879         if (obj_surface->bo != NULL)
880             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
881
882         /* reference 1 */
883         obj_surface = encode_state->reference_objects[1];
884         if (obj_surface && obj_surface->bo != NULL)
885             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
886     }
887
888     /* VME output */
889     gen7_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
890     gen7_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
891
892     return VA_STATUS_SUCCESS;
893 }
894
895 static void
896 gen7_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
897                                     struct encode_state *encode_state,
898                                     int mb_width, int mb_height,
899                                     int kernel,
900                                     int transform_8x8_mode_flag,
901                                     struct intel_encoder_context *encoder_context)
902 {
903     struct gen6_vme_context *vme_context = encoder_context->vme_context;
904     int mb_x = 0, mb_y = 0;
905     int i, s, j;
906     unsigned int *command_ptr;
907
908     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
909     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
910
911     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
912         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
913
914         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
915             int slice_mb_begin = slice_param->macroblock_address;
916             int slice_mb_number = slice_param->num_macroblocks;
917             unsigned int mb_intra_ub;
918
919             for (i = 0; i < slice_mb_number;) {
920                 int mb_count = i + slice_mb_begin;
921
922                 mb_x = mb_count % mb_width;
923                 mb_y = mb_count / mb_width;
924                 mb_intra_ub = 0;
925
926                 if (mb_x != 0) {
927                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
928                 }
929
930                 if (mb_y != 0) {
931                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
932
933                     if (mb_x != 0)
934                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
935
936                     if (mb_x != (mb_width - 1))
937                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
938                 }
939
940
941
942                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
943                 *command_ptr++ = kernel;
944                 *command_ptr++ = 0;
945                 *command_ptr++ = 0;
946                 *command_ptr++ = 0;
947                 *command_ptr++ = 0;
948
949                 /*inline data */
950                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
951                 *command_ptr++ = ((1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
952
953                 i += 1;
954             }
955
956             slice_param++;
957         }
958     }
959
960     *command_ptr++ = 0;
961     *command_ptr++ = MI_BATCH_BUFFER_END;
962
963     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
964 }
965
966 static void
967 gen7_vme_mpeg2_pipeline_programing(VADriverContextP ctx,
968                                    struct encode_state *encode_state,
969                                    int is_intra,
970                                    struct intel_encoder_context *encoder_context)
971 {
972     struct gen6_vme_context *vme_context = encoder_context->vme_context;
973     struct intel_batchbuffer *batch = encoder_context->base.batch;
974     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
975     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
976     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
977
978     bool allow_hwscore = true;
979     int s;
980
981     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
982         int j;
983         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
984
985         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
986             if (slice_param->macroblock_address % width_in_mbs) {
987                 allow_hwscore = false;
988                 break;
989             }
990         }
991     }
992
993     if (allow_hwscore)
994         gen7_vme_mpeg2_walker_fill_vme_batchbuffer(ctx,
995                                                    encode_state,
996                                                    width_in_mbs, height_in_mbs,
997                                                    MPEG2_VME_INTER_SHADER,
998                                                    encoder_context);
999     else
1000         gen7_vme_mpeg2_fill_vme_batchbuffer(ctx,
1001                                             encode_state,
1002                                             width_in_mbs, height_in_mbs,
1003                                             MPEG2_VME_INTER_SHADER,
1004                                             0,
1005                                             encoder_context);
1006
1007     intel_batchbuffer_start_atomic(batch, 0x1000);
1008     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
1009     BEGIN_BATCH(batch, 2);
1010     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
1011     OUT_RELOC(batch,
1012               vme_context->vme_batchbuffer.bo,
1013               I915_GEM_DOMAIN_COMMAND, 0,
1014               0);
1015     ADVANCE_BATCH(batch);
1016
1017     intel_batchbuffer_end_atomic(batch);
1018 }
1019
1020 static VAStatus
1021 gen7_vme_mpeg2_prepare(VADriverContextP ctx,
1022                        struct encode_state *encode_state,
1023                        struct intel_encoder_context *encoder_context)
1024 {
1025     VAStatus vaStatus = VA_STATUS_SUCCESS;
1026     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
1027     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1028
1029     if ((!vme_context->mpeg2_level) ||
1030         (vme_context->mpeg2_level != (seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK))) {
1031         vme_context->mpeg2_level = seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK;
1032     }
1033
1034     /*Setup all the memory object*/
1035
1036     intel_vme_mpeg2_state_setup(ctx, encode_state, encoder_context);
1037     gen7_vme_mpeg2_surface_setup(ctx, encode_state, 0, encoder_context);
1038     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
1039     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
1040     gen7_vme_mpeg2_state_setup(ctx, encode_state, 0, encoder_context);
1041
1042     /*Programing media pipeline*/
1043     gen7_vme_mpeg2_pipeline_programing(ctx, encode_state, 0, encoder_context);
1044
1045     return vaStatus;
1046 }
1047
1048 static VAStatus
1049 gen7_vme_mpeg2_pipeline(VADriverContextP ctx,
1050                         VAProfile profile,
1051                         struct encode_state *encode_state,
1052                         struct intel_encoder_context *encoder_context)
1053 {
1054     struct i965_driver_data *i965 = i965_driver_data(ctx);
1055     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1056     VAEncSliceParameterBufferMPEG2 *slice_param =
1057         (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
1058     VAEncSequenceParameterBufferMPEG2 *seq_param =
1059         (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
1060
1061     /*No need of to exec VME for Intra slice */
1062     if (slice_param->is_intra_slice) {
1063         if (!vme_context->vme_output.bo) {
1064             int w_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
1065             int h_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
1066
1067             vme_context->vme_output.num_blocks = w_in_mbs * h_in_mbs;
1068             vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
1069             vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
1070             vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
1071                                                       "MPEG2 VME output buffer",
1072                                                       vme_context->vme_output.num_blocks
1073                                                       * vme_context->vme_output.size_block,
1074                                                       0x1000);
1075         }
1076
1077         return VA_STATUS_SUCCESS;
1078     }
1079
1080     gen7_vme_media_init(ctx, encoder_context);
1081     gen7_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1082     gen7_vme_run(ctx, encode_state, encoder_context);
1083     gen7_vme_stop(ctx, encode_state, encoder_context);
1084
1085     return VA_STATUS_SUCCESS;
1086 }
1087
1088 static void
1089 gen7_vme_context_destroy(void *context)
1090 {
1091     struct gen6_vme_context *vme_context = context;
1092
1093     i965_gpe_context_destroy(&vme_context->gpe_context);
1094
1095     dri_bo_unreference(vme_context->vme_output.bo);
1096     vme_context->vme_output.bo = NULL;
1097
1098     dri_bo_unreference(vme_context->vme_state.bo);
1099     vme_context->vme_state.bo = NULL;
1100
1101     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1102     vme_context->vme_batchbuffer.bo = NULL;
1103
1104     free(vme_context->vme_state_message);
1105     vme_context->vme_state_message = NULL;
1106
1107     dri_bo_unreference(vme_context->i_qp_cost_table);
1108     vme_context->i_qp_cost_table = NULL;
1109
1110     dri_bo_unreference(vme_context->p_qp_cost_table);
1111     vme_context->p_qp_cost_table = NULL;
1112
1113     dri_bo_unreference(vme_context->b_qp_cost_table);
1114     vme_context->b_qp_cost_table = NULL;
1115
1116     free(vme_context->qp_per_mb);
1117     vme_context->qp_per_mb = NULL;
1118
1119     free(vme_context);
1120 }
1121
1122 Bool gen7_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1123 {
1124     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1125     struct i965_kernel *vme_kernel_list = NULL;
1126
1127     assert(vme_context);
1128     vme_context->gpe_context.surface_state_binding_table.length =
1129         (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1130
1131     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1132     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1133     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1134
1135     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1136     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
1137     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1138     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1139     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1140
1141     gen7_vme_scoreboard_init(ctx, vme_context);
1142
1143     if (encoder_context->codec == CODEC_H264) {
1144         vme_kernel_list = gen7_vme_kernels;
1145         vme_context->video_coding_type = VIDEO_CODING_AVC;
1146         vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM;
1147         encoder_context->vme_pipeline = gen7_vme_pipeline;
1148     } else if (encoder_context->codec == CODEC_MPEG2) {
1149         vme_kernel_list = gen7_vme_mpeg2_kernels;
1150         vme_context->video_coding_type = VIDEO_CODING_MPEG2;
1151         vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM;
1152         encoder_context->vme_pipeline = gen7_vme_mpeg2_pipeline;
1153     } else {
1154         /* Unsupported codec */
1155         assert(0);
1156     }
1157
1158     i965_gpe_load_kernels(ctx,
1159                           &vme_context->gpe_context,
1160                           vme_kernel_list,
1161                           vme_context->vme_kernel_sum);
1162
1163     vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1164     vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1165     vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1166
1167     encoder_context->vme_context = vme_context;
1168     encoder_context->vme_context_destroy = gen7_vme_context_destroy;
1169     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1170
1171     return True;
1172 }