OSDN Git Service

intel-vaapi-driver 1.8.1.pre1
[android-x86/hardware-intel-common-vaapi.git] / src / gen7_vme.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <stdbool.h>
32 #include <string.h>
33 #include <assert.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_encoder.h"
41 #include "gen6_vme.h"
42 #include "gen6_mfc.h"
43 #ifdef SURFACE_STATE_PADDED_SIZE
44 #undef SURFACE_STATE_PADDED_SIZE
45 #endif
46
47 #define VME_MSG_LENGTH          32
48
49 #define SURFACE_STATE_PADDED_SIZE               SURFACE_STATE_PADDED_SIZE_GEN7
50 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
51 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
52
53 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
54 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
55 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
56
57 enum VIDEO_CODING_TYPE{
58     VIDEO_CODING_AVC = 0,
59     VIDEO_CODING_MPEG2,
60     VIDEO_CODING_SUM
61 };
62
63 enum AVC_VME_KERNEL_TYPE{ 
64     AVC_VME_INTRA_SHADER = 0,
65     AVC_VME_INTER_SHADER,
66     AVC_VME_BATCHBUFFER,
67     AVC_VME_BINTER_SHADER,
68     AVC_VME_KERNEL_SUM
69 };
70
71 enum MPEG2_VME_KERNEL_TYPE{
72     MPEG2_VME_INTER_SHADER = 0,
73     MPEG2_VME_BATCHBUFFER,
74     MPEG2_VME_KERNEL_SUM
75 };
76  
77
78 static const uint32_t gen7_vme_intra_frame[][4] = {
79 #include "shaders/vme/intra_frame_ivb.g7b"
80 };
81
82 static const uint32_t gen7_vme_inter_frame[][4] = {
83 #include "shaders/vme/inter_frame_ivb.g7b"
84 };
85
86 static const uint32_t gen7_vme_batchbuffer[][4] = {
87 #include "shaders/vme/batchbuffer.g7b"
88 };
89
90 static const uint32_t gen7_vme_binter_frame[][4] = {
91 #include "shaders/vme/inter_bframe_ivb.g7b"
92 };
93
94 static struct i965_kernel gen7_vme_kernels[] = {
95     {
96         "AVC VME Intra Frame",
97         AVC_VME_INTRA_SHADER,                   /*index*/
98         gen7_vme_intra_frame,                   
99         sizeof(gen7_vme_intra_frame),           
100         NULL
101     },
102     {
103         "AVC VME inter Frame",
104         AVC_VME_INTER_SHADER,
105         gen7_vme_inter_frame,
106         sizeof(gen7_vme_inter_frame),
107         NULL
108     },
109     {
110         "AVC VME BATCHBUFFER",
111         AVC_VME_BATCHBUFFER,
112         gen7_vme_batchbuffer,
113         sizeof(gen7_vme_batchbuffer),
114         NULL
115     },
116     {
117         "AVC VME binter Frame",
118         AVC_VME_BINTER_SHADER,
119         gen7_vme_binter_frame,
120         sizeof(gen7_vme_binter_frame),
121         NULL
122     }
123 };
124
125 static const uint32_t gen7_vme_mpeg2_inter_frame[][4] = {
126 #include "shaders/vme/mpeg2_inter_ivb.g7b"
127 };
128
129 static const uint32_t gen7_vme_mpeg2_batchbuffer[][4] = {
130 #include "shaders/vme/batchbuffer.g7b"
131 };
132
133 static struct i965_kernel gen7_vme_mpeg2_kernels[] = {
134     {
135         "MPEG2 VME inter Frame",
136         MPEG2_VME_INTER_SHADER,
137         gen7_vme_mpeg2_inter_frame,
138         sizeof(gen7_vme_mpeg2_inter_frame),
139         NULL
140     },
141     {
142         "MPEG2 VME BATCHBUFFER",
143         MPEG2_VME_BATCHBUFFER,
144         gen7_vme_mpeg2_batchbuffer,
145         sizeof(gen7_vme_mpeg2_batchbuffer),
146         NULL
147     },
148 };
149
150 /* only used for VME source surface state */
151 static void 
152 gen7_vme_source_surface_state(VADriverContextP ctx,
153                               int index,
154                               struct object_surface *obj_surface,
155                               struct intel_encoder_context *encoder_context)
156 {
157     struct gen6_vme_context *vme_context = encoder_context->vme_context;
158
159     vme_context->vme_surface2_setup(ctx,
160                                     &vme_context->gpe_context,
161                                     obj_surface,
162                                     BINDING_TABLE_OFFSET(index),
163                                     SURFACE_STATE_OFFSET(index));
164 }
165
166 static void
167 gen7_vme_media_source_surface_state(VADriverContextP ctx,
168                                     int index,
169                                     struct object_surface *obj_surface,
170                                     struct intel_encoder_context *encoder_context)
171 {
172     struct gen6_vme_context *vme_context = encoder_context->vme_context;
173
174     vme_context->vme_media_rw_surface_setup(ctx,
175                                             &vme_context->gpe_context,
176                                             obj_surface,
177                                             BINDING_TABLE_OFFSET(index),
178                                             SURFACE_STATE_OFFSET(index),
179                                             0);
180 }
181
182 static void
183 gen7_vme_output_buffer_setup(VADriverContextP ctx,
184                              struct encode_state *encode_state,
185                              int index,
186                              struct intel_encoder_context *encoder_context)
187
188 {
189     struct i965_driver_data *i965 = i965_driver_data(ctx);
190     struct gen6_vme_context *vme_context = encoder_context->vme_context;
191     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
192     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
193     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
194     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
195     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
196
197     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
198     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
199
200     if (is_intra)
201         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
202     else
203         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
204
205     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr, 
206                                               "VME output buffer",
207                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
208                                               0x1000);
209     assert(vme_context->vme_output.bo);
210     vme_context->vme_buffer_suface_setup(ctx,
211                                          &vme_context->gpe_context,
212                                          &vme_context->vme_output,
213                                          BINDING_TABLE_OFFSET(index),
214                                          SURFACE_STATE_OFFSET(index));
215 }
216
217 static void
218 gen7_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
219                                       struct encode_state *encode_state,
220                                       int index,
221                                       struct intel_encoder_context *encoder_context)
222
223 {
224     struct i965_driver_data *i965 = i965_driver_data(ctx);
225     struct gen6_vme_context *vme_context = encoder_context->vme_context;
226     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
227     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
228     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
229
230     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
231     vme_context->vme_batchbuffer.size_block = 64; /* 4 OWORDs */
232     vme_context->vme_batchbuffer.pitch = 16;
233     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
234                                                    "VME batchbuffer",
235                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
236                                                    0x1000);
237     vme_context->vme_buffer_suface_setup(ctx,
238                                          &vme_context->gpe_context,
239                                          &vme_context->vme_batchbuffer,
240                                          BINDING_TABLE_OFFSET(index),
241                                          SURFACE_STATE_OFFSET(index));
242 }
243
244 static VAStatus
245 gen7_vme_surface_setup(VADriverContextP ctx, 
246                        struct encode_state *encode_state,
247                        int is_intra,
248                        struct intel_encoder_context *encoder_context)
249 {
250     struct object_surface *obj_surface;
251
252     /*Setup surfaces state*/
253     /* current picture for encoding */
254     obj_surface = encode_state->input_yuv_object;
255     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
256     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
257
258     if (!is_intra) {
259         VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
260         int slice_type;
261
262         slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
263         assert(slice_type != SLICE_TYPE_I && slice_type != SLICE_TYPE_SI);
264
265         intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 0, 1, gen7_vme_source_surface_state);
266
267         if (slice_type == SLICE_TYPE_B)
268             intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 1, 2, gen7_vme_source_surface_state);
269     }
270
271     /* VME output */
272     gen7_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
273     gen7_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
274     intel_h264_setup_cost_surface(ctx, encode_state, encoder_context,
275                                   BINDING_TABLE_OFFSET(INTEL_COST_TABLE_OFFSET),
276                                   SURFACE_STATE_OFFSET(INTEL_COST_TABLE_OFFSET));
277
278     return VA_STATUS_SUCCESS;
279 }
280
281 static VAStatus gen7_vme_interface_setup(VADriverContextP ctx, 
282                                          struct encode_state *encode_state,
283                                          struct intel_encoder_context *encoder_context)
284 {
285     struct gen6_vme_context *vme_context = encoder_context->vme_context;
286     struct gen6_interface_descriptor_data *desc;   
287     int i;
288     dri_bo *bo;
289
290     bo = vme_context->gpe_context.idrt.bo;
291     dri_bo_map(bo, 1);
292     assert(bo->virtual);
293     desc = bo->virtual;
294
295     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
296         struct i965_kernel *kernel;
297         kernel = &vme_context->gpe_context.kernels[i];
298         assert(sizeof(*desc) == 32);
299         /*Setup the descritor table*/
300         memset(desc, 0, sizeof(*desc));
301         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
302         desc->desc2.sampler_count = 1; /* FIXME: */
303         desc->desc2.sampler_state_pointer = (vme_context->vme_state.bo->offset >> 5);
304         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
305         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
306         desc->desc4.constant_urb_entry_read_offset = 0;
307         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
308                 
309         /*kernel start*/
310         dri_bo_emit_reloc(bo,   
311                           I915_GEM_DOMAIN_INSTRUCTION, 0,
312                           0,
313                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
314                           kernel->bo);
315         /*Sampler State(VME state pointer)*/
316         dri_bo_emit_reloc(bo,
317                           I915_GEM_DOMAIN_INSTRUCTION, 0,
318                           (1 << 2),                                                                     //
319                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc2),
320                           vme_context->vme_state.bo);
321         desc++;
322     }
323     dri_bo_unmap(bo);
324
325     return VA_STATUS_SUCCESS;
326 }
327
328 static VAStatus gen7_vme_constant_setup(VADriverContextP ctx, 
329                                         struct encode_state *encode_state,
330                                         struct intel_encoder_context *encoder_context)
331 {
332     struct gen6_vme_context *vme_context = encoder_context->vme_context;
333     unsigned char *constant_buffer;
334     unsigned int *vme_state_message;
335     int mv_num;
336
337     vme_state_message = (unsigned int *)vme_context->vme_state_message;
338     mv_num = 32;
339
340     if (encoder_context->codec == CODEC_H264) {
341         if (vme_context->h264_level >= 30) {
342             mv_num = 16;
343         
344             if (vme_context->h264_level >= 31)
345                 mv_num = 8;
346         }
347     } else if (encoder_context->codec == CODEC_MPEG2) { 
348         mv_num = 2;
349     }
350
351
352     vme_state_message[31] = mv_num;
353
354     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
355     assert(vme_context->gpe_context.curbe.bo->virtual);
356     constant_buffer = vme_context->gpe_context.curbe.bo->virtual;
357
358     /* Pass the required constant info into the constant buffer */
359     memcpy(constant_buffer, (char *)vme_context->vme_state_message, 128);
360         
361     dri_bo_unmap( vme_context->gpe_context.curbe.bo);
362
363     return VA_STATUS_SUCCESS;
364 }
365
366
367 static VAStatus gen7_vme_avc_state_setup(VADriverContextP ctx,
368                                          struct encode_state *encode_state,
369                                          int is_intra,
370                                          struct intel_encoder_context *encoder_context)
371 {
372     struct gen6_vme_context *vme_context = encoder_context->vme_context;
373     unsigned int *vme_state_message;
374     unsigned int *mb_cost_table;
375     int i;
376     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
377     unsigned int is_low_quality = (encoder_context->quality_level == ENCODER_LOW_QUALITY);
378     dri_bo *cost_bo;
379     int slice_type;
380     uint8_t *cost_ptr;
381     int qp;
382
383     slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
384
385     if (slice_type == SLICE_TYPE_I) {
386         cost_bo = vme_context->i_qp_cost_table;
387     } else if (slice_type == SLICE_TYPE_P) {
388         cost_bo = vme_context->p_qp_cost_table;
389     } else {
390         cost_bo = vme_context->b_qp_cost_table;
391     }
392
393     mb_cost_table = (unsigned int *)vme_context->vme_state_message;
394     dri_bo_map(vme_context->vme_state.bo, 1);
395     dri_bo_map(cost_bo, 0);
396     assert(vme_context->vme_state.bo->virtual);
397     assert(cost_bo->virtual);
398     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
399
400     cost_ptr = (uint8_t *)cost_bo->virtual;
401
402     /* up to 8 VME_SEARCH_PATH_LUT is supported */
403     /* Two subsequent qp will share the same mode/motion-vector cost table */
404     /* the range is from 0-51 */
405     for (i = 0; i < 8; i++)  {
406
407         vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual +
408                              i * 32;
409         if ((slice_type == SLICE_TYPE_P) && !is_low_quality) {
410             vme_state_message[0] = 0x01010101;
411             vme_state_message[1] = 0x10010101;
412             vme_state_message[2] = 0x0F0F0F0F;
413             vme_state_message[3] = 0x100F0F0F;
414             vme_state_message[4] = 0x01010101;
415             vme_state_message[5] = 0x10010101;
416             vme_state_message[6] = 0x0F0F0F0F;
417             vme_state_message[7] = 0x100F0F0F;
418             vme_state_message[8] = 0x01010101;
419             vme_state_message[9] = 0x10010101;
420             vme_state_message[10] = 0x0F0F0F0F;
421             vme_state_message[11] = 0x000F0F0F;
422             vme_state_message[12] = 0x00;
423             vme_state_message[13] = 0x00;
424         } else {
425             vme_state_message[0] = 0x10010101;
426             vme_state_message[1] = 0x100F0F0F;
427             vme_state_message[2] = 0x10010101;
428             vme_state_message[3] = 0x000F0F0F;
429             vme_state_message[4] = 0;
430             vme_state_message[5] = 0;
431             vme_state_message[6] = 0;
432             vme_state_message[7] = 0;
433             vme_state_message[8] = 0;
434             vme_state_message[9] = 0;
435             vme_state_message[10] = 0;
436             vme_state_message[11] = 0;
437             vme_state_message[12] = 0;
438             vme_state_message[13] = 0;
439         }
440
441         qp = 8 * i;
442
443         /* when qp is greater than 51, use the cost_table of qp=51 to fulfill */
444         if (qp > 51) {
445             qp = 51;
446         }
447         /* Setup the four LUT sets for MbMV cost */
448         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
449         vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
450         vme_state_message[16] = mb_cost_table[0];
451         vme_state_message[17] = mb_cost_table[1];
452         vme_state_message[18] = mb_cost_table[3];
453         vme_state_message[19] = mb_cost_table[4];
454
455         qp += 2;
456         if (qp > 51) {
457             qp = 51;
458         }
459         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
460         vme_state_message[14] |= ((mb_cost_table[2] & 0xFFFF) << 16);
461         vme_state_message[20] = mb_cost_table[0];
462         vme_state_message[21] = mb_cost_table[1];
463         vme_state_message[22] = mb_cost_table[3];
464         vme_state_message[23] = mb_cost_table[4];
465
466         qp += 2;
467         if (qp > 51) {
468             qp = 51;
469         }
470         vme_state_message[15] = (mb_cost_table[2] & 0xFFFF);
471         vme_state_message[24] = mb_cost_table[0];
472         vme_state_message[25] = mb_cost_table[1];
473         vme_state_message[26] = mb_cost_table[3];
474         vme_state_message[27] = mb_cost_table[4];
475
476         qp += 2;
477         if (qp > 51) {
478             qp = 51;
479         }
480         mb_cost_table = (unsigned int *)(cost_ptr + qp * 32);
481         vme_state_message[15] |= ((mb_cost_table[2] & 0xFFFF) << 16);
482         vme_state_message[28] = mb_cost_table[0];
483         vme_state_message[29] = mb_cost_table[1];
484         vme_state_message[30] = mb_cost_table[3];
485         vme_state_message[31] = mb_cost_table[4];
486     }
487
488     dri_bo_unmap(cost_bo);
489     dri_bo_unmap( vme_context->vme_state.bo);
490     return VA_STATUS_SUCCESS;
491 }
492
493 static VAStatus gen7_vme_mpeg2_state_setup(VADriverContextP ctx,
494                                            struct encode_state *encode_state,
495                                            int is_intra,
496                                            struct intel_encoder_context *encoder_context)
497 {
498     struct gen6_vme_context *vme_context = encoder_context->vme_context;
499     unsigned int *vme_state_message;
500     int i;
501     unsigned int *mb_cost_table;
502
503     mb_cost_table = (unsigned int *)vme_context->vme_state_message;
504         
505     //building VME state message
506     dri_bo_map(vme_context->vme_state.bo, 1);
507     assert(vme_context->vme_state.bo->virtual);
508     vme_state_message = (unsigned int *)vme_context->vme_state.bo->virtual;
509
510     vme_state_message[0] = 0x01010101;
511     vme_state_message[1] = 0x10010101;
512     vme_state_message[2] = 0x0F0F0F0F;
513     vme_state_message[3] = 0x100F0F0F;
514     vme_state_message[4] = 0x01010101;
515     vme_state_message[5] = 0x10010101;
516     vme_state_message[6] = 0x0F0F0F0F;
517     vme_state_message[7] = 0x100F0F0F;
518     vme_state_message[8] = 0x01010101;
519     vme_state_message[9] = 0x10010101;
520     vme_state_message[10] = 0x0F0F0F0F;
521     vme_state_message[11] = 0x000F0F0F;
522     vme_state_message[12] = 0x00;
523     vme_state_message[13] = 0x00;
524
525     vme_state_message[14] = (mb_cost_table[2] & 0xFFFF);
526     vme_state_message[15] = 0;
527     vme_state_message[16] = mb_cost_table[0];
528     vme_state_message[17] = 0;
529     vme_state_message[18] = mb_cost_table[3];
530     vme_state_message[19] = mb_cost_table[4];
531
532     for(i = 20; i < 32; i++) {
533         vme_state_message[i] = 0;
534     }
535     //vme_state_message[16] = 0x42424242;                       //cost function LUT set 0 for Intra
536
537     dri_bo_unmap( vme_context->vme_state.bo);
538     return VA_STATUS_SUCCESS;
539 }
540
541 static void
542 gen7_vme_fill_vme_batchbuffer(VADriverContextP ctx, 
543                               struct encode_state *encode_state,
544                               int mb_width, int mb_height,
545                               int kernel,
546                               int transform_8x8_mode_flag,
547                               struct intel_encoder_context *encoder_context)
548 {
549     struct gen6_vme_context *vme_context = encoder_context->vme_context;
550     int mb_x = 0, mb_y = 0;
551     int i, s, j;
552     unsigned int *command_ptr;
553     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
554     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
555     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
556     int qp;
557     int slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
558     int qp_mb, qp_index;
559
560     if (encoder_context->rate_control_mode == VA_RC_CQP)
561         qp = pic_param->pic_init_qp + slice_param->slice_qp_delta;
562     else
563         qp = mfc_context->brc.qp_prime_y[encoder_context->layer.curr_frame_layer_id][slice_type];
564
565     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
566     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
567
568     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
569         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
570
571         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
572             int slice_mb_begin = slice_param->macroblock_address;
573             int slice_mb_number = slice_param->num_macroblocks;
574             unsigned int mb_intra_ub;
575             int slice_mb_x = slice_param->macroblock_address % mb_width;
576
577             for (i = 0; i < slice_mb_number;) {
578                 int mb_count = i + slice_mb_begin;    
579
580                 mb_x = mb_count % mb_width;
581                 mb_y = mb_count / mb_width;
582                 mb_intra_ub = 0;
583
584                 if (mb_x != 0) {
585                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
586                 }
587
588                 if (mb_y != 0) {
589                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
590
591                     if (mb_x != 0)
592                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
593
594                     if (mb_x != (mb_width -1))
595                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
596                 }
597
598                 if (i < mb_width) {
599                     if (i == 0)
600                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
601
602                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
603
604                     if ((i == (mb_width - 1)) && slice_mb_x) {
605                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
606                     }
607                 }
608                 
609                 if ((i == mb_width) && slice_mb_x) {
610                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
611                 }
612
613                 *command_ptr++ = (CMD_MEDIA_OBJECT | (9 - 2));
614                 *command_ptr++ = kernel;
615                 *command_ptr++ = 0;
616                 *command_ptr++ = 0;
617                 *command_ptr++ = 0;
618                 *command_ptr++ = 0;
619    
620                 /*inline data */
621                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
622                 *command_ptr++ = ((encoder_context->quality_level << 24) | (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
623
624                 if (vme_context->roi_enabled) {
625                     qp_index = mb_y * mb_width + mb_x;
626                     qp_mb = *(vme_context->qp_per_mb + qp_index);
627                 } else
628                     qp_mb = qp;
629                 *command_ptr++ = qp_mb;
630
631                 i += 1;
632             }
633
634             slice_param++;
635         }
636     }
637
638     *command_ptr++ = 0;
639     *command_ptr++ = MI_BATCH_BUFFER_END;
640
641     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
642 }
643
644
645 static void gen7_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
646 {
647     struct i965_driver_data *i965 = i965_driver_data(ctx);
648     struct gen6_vme_context *vme_context = encoder_context->vme_context;
649     dri_bo *bo;
650
651     i965_gpe_context_init(ctx, &vme_context->gpe_context);
652
653     /* VME output buffer */
654     dri_bo_unreference(vme_context->vme_output.bo);
655     vme_context->vme_output.bo = NULL;
656
657     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
658     vme_context->vme_batchbuffer.bo = NULL;
659
660     /* VME state */
661     dri_bo_unreference(vme_context->vme_state.bo);
662     bo = dri_bo_alloc(i965->intel.bufmgr,
663                       "Buffer",
664                       1024*16, 64);
665     assert(bo);
666     vme_context->vme_state.bo = bo;
667 }
668
669 static void gen7_vme_pipeline_programing(VADriverContextP ctx, 
670                                          struct encode_state *encode_state,
671                                          struct intel_encoder_context *encoder_context)
672 {
673     struct gen6_vme_context *vme_context = encoder_context->vme_context;
674     struct intel_batchbuffer *batch = encoder_context->base.batch;
675     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
676     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
677     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
678     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
679     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
680     int s;
681     bool allow_hwscore = true;
682     int kernel_shader;
683     unsigned int is_low_quality = (encoder_context->quality_level == ENCODER_LOW_QUALITY);
684
685     if (is_low_quality)
686         allow_hwscore = false;
687     else {
688         for (s = 0; s < encode_state->num_slice_params_ext; s++) {
689             pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer; 
690             if ((pSliceParameter->macroblock_address % width_in_mbs)) {
691                 allow_hwscore = false;
692                 break;
693             }
694         }
695     }
696
697     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
698         (pSliceParameter->slice_type == SLICE_TYPE_SI)) {
699         kernel_shader = AVC_VME_INTRA_SHADER;
700     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
701                (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
702         kernel_shader = AVC_VME_INTER_SHADER;
703     } else {
704         kernel_shader = AVC_VME_BINTER_SHADER;
705         if (!allow_hwscore)
706             kernel_shader = AVC_VME_INTER_SHADER;
707     }
708
709     if (allow_hwscore)
710         gen7_vme_walker_fill_vme_batchbuffer(ctx, 
711                                              encode_state,
712                                              width_in_mbs, height_in_mbs,
713                                              kernel_shader,
714                                              pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
715                                              encoder_context);
716         
717     else
718         gen7_vme_fill_vme_batchbuffer(ctx, 
719                                       encode_state,
720                                       width_in_mbs, height_in_mbs,
721                                       kernel_shader,
722                                       pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
723                                       encoder_context);
724
725     intel_batchbuffer_start_atomic(batch, 0x1000);
726     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
727     BEGIN_BATCH(batch, 2);
728     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
729     OUT_RELOC(batch,
730               vme_context->vme_batchbuffer.bo,
731               I915_GEM_DOMAIN_COMMAND, 0, 
732               0);
733     ADVANCE_BATCH(batch);
734
735     intel_batchbuffer_end_atomic(batch);        
736 }
737
738 static VAStatus gen7_vme_prepare(VADriverContextP ctx, 
739                                  struct encode_state *encode_state,
740                                  struct intel_encoder_context *encoder_context)
741 {
742     VAStatus vaStatus = VA_STATUS_SUCCESS;
743     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
744     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
745     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
746     struct gen6_vme_context *vme_context = encoder_context->vme_context;
747
748     if (!vme_context->h264_level ||
749         (vme_context->h264_level != pSequenceParameter->level_idc)) {
750         vme_context->h264_level = pSequenceParameter->level_idc;        
751     }
752
753     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
754     intel_h264_initialize_mbmv_cost(ctx, encode_state, encoder_context);
755
756     /*Setup all the memory object*/
757     gen7_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
758     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
759     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
760     gen7_vme_avc_state_setup(ctx, encode_state, is_intra, encoder_context);
761
762     /*Programing media pipeline*/
763     gen7_vme_pipeline_programing(ctx, encode_state, encoder_context);
764
765     return vaStatus;
766 }
767
768 static VAStatus gen7_vme_run(VADriverContextP ctx, 
769                              struct encode_state *encode_state,
770                              struct intel_encoder_context *encoder_context)
771 {
772     struct intel_batchbuffer *batch = encoder_context->base.batch;
773
774     intel_batchbuffer_flush(batch);
775
776     return VA_STATUS_SUCCESS;
777 }
778
779 static VAStatus gen7_vme_stop(VADriverContextP ctx, 
780                               struct encode_state *encode_state,
781                               struct intel_encoder_context *encoder_context)
782 {
783     return VA_STATUS_SUCCESS;
784 }
785
786 static VAStatus
787 gen7_vme_pipeline(VADriverContextP ctx,
788                   VAProfile profile,
789                   struct encode_state *encode_state,
790                   struct intel_encoder_context *encoder_context)
791 {
792     gen7_vme_media_init(ctx, encoder_context);
793     gen7_vme_prepare(ctx, encode_state, encoder_context);
794     gen7_vme_run(ctx, encode_state, encoder_context);
795     gen7_vme_stop(ctx, encode_state, encoder_context);
796
797     return VA_STATUS_SUCCESS;
798 }
799
800 static void
801 gen7_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
802                                    struct encode_state *encode_state,
803                                    int index,
804                                    int is_intra,
805                                    struct intel_encoder_context *encoder_context)
806
807 {
808     struct i965_driver_data *i965 = i965_driver_data(ctx);
809     struct gen6_vme_context *vme_context = encoder_context->vme_context;
810     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
811     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
812     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
813
814     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
815     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
816
817     if (is_intra)
818         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
819     else
820         vme_context->vme_output.size_block = INTER_VME_OUTPUT_IN_BYTES;
821
822     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
823                                               "VME output buffer",
824                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
825                                               0x1000);
826     assert(vme_context->vme_output.bo);
827     vme_context->vme_buffer_suface_setup(ctx,
828                                          &vme_context->gpe_context,
829                                          &vme_context->vme_output,
830                                          BINDING_TABLE_OFFSET(index),
831                                          SURFACE_STATE_OFFSET(index));
832 }
833
834 static void
835 gen7_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
836                                             struct encode_state *encode_state,
837                                             int index,
838                                             struct intel_encoder_context *encoder_context)
839
840 {
841     struct i965_driver_data *i965 = i965_driver_data(ctx);
842     struct gen6_vme_context *vme_context = encoder_context->vme_context;
843     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
844     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
845     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
846
847     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
848     vme_context->vme_batchbuffer.size_block = 32; /* 4 OWORDs */
849     vme_context->vme_batchbuffer.pitch = 16;
850     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr, 
851                                                    "VME batchbuffer",
852                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
853                                                    0x1000);
854     vme_context->vme_buffer_suface_setup(ctx,
855                                          &vme_context->gpe_context,
856                                          &vme_context->vme_batchbuffer,
857                                          BINDING_TABLE_OFFSET(index),
858                                          SURFACE_STATE_OFFSET(index));
859 }
860
861 static VAStatus
862 gen7_vme_mpeg2_surface_setup(VADriverContextP ctx, 
863                              struct encode_state *encode_state,
864                              int is_intra,
865                              struct intel_encoder_context *encoder_context)
866 {
867     struct object_surface *obj_surface;
868
869     /*Setup surfaces state*/
870     /* current picture for encoding */
871     obj_surface = encode_state->input_yuv_object;
872     gen7_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
873     gen7_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
874
875     if (!is_intra) {
876         /* reference 0 */
877         obj_surface = encode_state->reference_objects[0];
878         if (obj_surface->bo != NULL)
879             gen7_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
880
881         /* reference 1 */
882         obj_surface = encode_state->reference_objects[1];
883         if (obj_surface && obj_surface->bo != NULL) 
884             gen7_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
885     }
886
887     /* VME output */
888     gen7_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
889     gen7_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
890
891     return VA_STATUS_SUCCESS;
892 }
893
894 static void
895 gen7_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
896                                     struct encode_state *encode_state,
897                                     int mb_width, int mb_height,
898                                     int kernel,
899                                     int transform_8x8_mode_flag,
900                                     struct intel_encoder_context *encoder_context)
901 {
902     struct gen6_vme_context *vme_context = encoder_context->vme_context;
903     int mb_x = 0, mb_y = 0;
904     int i, s, j;
905     unsigned int *command_ptr;
906
907     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
908     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
909
910     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
911         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
912
913         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
914             int slice_mb_begin = slice_param->macroblock_address;
915             int slice_mb_number = slice_param->num_macroblocks;
916             unsigned int mb_intra_ub;
917
918             for (i = 0; i < slice_mb_number;) {
919                 int mb_count = i + slice_mb_begin;    
920
921                 mb_x = mb_count % mb_width;
922                 mb_y = mb_count / mb_width;
923                 mb_intra_ub = 0;
924
925                 if (mb_x != 0) {
926                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
927                 }
928
929                 if (mb_y != 0) {
930                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
931
932                     if (mb_x != 0)
933                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
934
935                     if (mb_x != (mb_width -1))
936                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
937                 }
938
939                 
940
941                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
942                 *command_ptr++ = kernel;
943                 *command_ptr++ = 0;
944                 *command_ptr++ = 0;
945                 *command_ptr++ = 0;
946                 *command_ptr++ = 0;
947    
948                 /*inline data */
949                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
950                 *command_ptr++ = ( (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
951
952                 i += 1;
953             }
954
955             slice_param++;
956         }
957     }
958
959     *command_ptr++ = 0;
960     *command_ptr++ = MI_BATCH_BUFFER_END;
961
962     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
963 }
964
965 static void
966 gen7_vme_mpeg2_pipeline_programing(VADriverContextP ctx, 
967                                    struct encode_state *encode_state,
968                                    int is_intra,
969                                    struct intel_encoder_context *encoder_context)
970 {
971     struct gen6_vme_context *vme_context = encoder_context->vme_context;
972     struct intel_batchbuffer *batch = encoder_context->base.batch;
973     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
974     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
975     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
976
977     bool allow_hwscore = true;
978     int s;
979
980     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
981         int j;
982         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
983
984         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
985             if (slice_param->macroblock_address % width_in_mbs) {
986                 allow_hwscore = false;
987                 break;
988             }
989         }
990     }
991
992     if (allow_hwscore) 
993         gen7_vme_mpeg2_walker_fill_vme_batchbuffer(ctx,
994                                                    encode_state,
995                                                    width_in_mbs, height_in_mbs,
996                                                    MPEG2_VME_INTER_SHADER,
997                                                    encoder_context);
998     else
999         gen7_vme_mpeg2_fill_vme_batchbuffer(ctx, 
1000                                             encode_state,
1001                                             width_in_mbs, height_in_mbs,
1002                                             MPEG2_VME_INTER_SHADER,
1003                                             0,
1004                                             encoder_context);
1005
1006     intel_batchbuffer_start_atomic(batch, 0x1000);
1007     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
1008     BEGIN_BATCH(batch, 2);
1009     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
1010     OUT_RELOC(batch,
1011               vme_context->vme_batchbuffer.bo,
1012               I915_GEM_DOMAIN_COMMAND, 0, 
1013               0);
1014     ADVANCE_BATCH(batch);
1015
1016     intel_batchbuffer_end_atomic(batch);
1017 }
1018
1019 static VAStatus
1020 gen7_vme_mpeg2_prepare(VADriverContextP ctx, 
1021                        struct encode_state *encode_state,
1022                        struct intel_encoder_context *encoder_context)
1023 {
1024     VAStatus vaStatus = VA_STATUS_SUCCESS;
1025     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
1026     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1027
1028     if ((!vme_context->mpeg2_level) ||
1029         (vme_context->mpeg2_level != (seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK))) {
1030         vme_context->mpeg2_level = seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK;
1031     }
1032
1033     /*Setup all the memory object*/
1034
1035     intel_vme_mpeg2_state_setup(ctx, encode_state, encoder_context);
1036     gen7_vme_mpeg2_surface_setup(ctx, encode_state, 0, encoder_context);
1037     gen7_vme_interface_setup(ctx, encode_state, encoder_context);
1038     gen7_vme_constant_setup(ctx, encode_state, encoder_context);
1039     gen7_vme_mpeg2_state_setup(ctx, encode_state, 0, encoder_context);
1040
1041     /*Programing media pipeline*/
1042     gen7_vme_mpeg2_pipeline_programing(ctx, encode_state, 0, encoder_context);
1043
1044     return vaStatus;
1045 }
1046
1047 static VAStatus
1048 gen7_vme_mpeg2_pipeline(VADriverContextP ctx,
1049                         VAProfile profile,
1050                         struct encode_state *encode_state,
1051                         struct intel_encoder_context *encoder_context)
1052 {
1053     struct i965_driver_data *i965 = i965_driver_data(ctx);
1054     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1055     VAEncSliceParameterBufferMPEG2 *slice_param = 
1056         (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
1057     VAEncSequenceParameterBufferMPEG2 *seq_param = 
1058         (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
1059  
1060     /*No need of to exec VME for Intra slice */
1061     if (slice_param->is_intra_slice) {
1062         if(!vme_context->vme_output.bo) {
1063             int w_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
1064             int h_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
1065
1066             vme_context->vme_output.num_blocks = w_in_mbs * h_in_mbs;
1067             vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
1068             vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES;
1069             vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
1070                                                       "MPEG2 VME output buffer",
1071                                                       vme_context->vme_output.num_blocks
1072                                                       * vme_context->vme_output.size_block,
1073                                                       0x1000);
1074         }
1075
1076         return VA_STATUS_SUCCESS;
1077     }
1078
1079     gen7_vme_media_init(ctx, encoder_context);
1080     gen7_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1081     gen7_vme_run(ctx, encode_state, encoder_context);
1082     gen7_vme_stop(ctx, encode_state, encoder_context);
1083
1084     return VA_STATUS_SUCCESS;
1085 }
1086
1087 static void
1088 gen7_vme_context_destroy(void *context)
1089 {
1090     struct gen6_vme_context *vme_context = context;
1091
1092     i965_gpe_context_destroy(&vme_context->gpe_context);
1093
1094     dri_bo_unreference(vme_context->vme_output.bo);
1095     vme_context->vme_output.bo = NULL;
1096
1097     dri_bo_unreference(vme_context->vme_state.bo);
1098     vme_context->vme_state.bo = NULL;
1099
1100     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1101     vme_context->vme_batchbuffer.bo = NULL;
1102
1103     free(vme_context->vme_state_message);
1104     vme_context->vme_state_message = NULL;
1105
1106     dri_bo_unreference(vme_context->i_qp_cost_table);
1107     vme_context->i_qp_cost_table = NULL;
1108
1109     dri_bo_unreference(vme_context->p_qp_cost_table);
1110     vme_context->p_qp_cost_table = NULL;
1111
1112     dri_bo_unreference(vme_context->b_qp_cost_table);
1113     vme_context->b_qp_cost_table = NULL;
1114
1115     free(vme_context->qp_per_mb);
1116     vme_context->qp_per_mb = NULL;
1117
1118     free(vme_context);
1119 }
1120
1121 Bool gen7_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1122 {
1123     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1124     struct i965_kernel *vme_kernel_list = NULL;
1125
1126     assert(vme_context);
1127     vme_context->gpe_context.surface_state_binding_table.length =
1128         (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1129
1130     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1131     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1132     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1133
1134     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1135     vme_context->gpe_context.vfe_state.num_urb_entries = 16;
1136     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1137     vme_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1138     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1139
1140     gen7_vme_scoreboard_init(ctx, vme_context);
1141
1142     if (encoder_context->codec == CODEC_H264) {
1143         vme_kernel_list = gen7_vme_kernels;
1144         vme_context->video_coding_type = VIDEO_CODING_AVC;
1145         vme_context->vme_kernel_sum = AVC_VME_KERNEL_SUM; 
1146         encoder_context->vme_pipeline = gen7_vme_pipeline; 
1147     } else if (encoder_context->codec == CODEC_MPEG2) {
1148         vme_kernel_list = gen7_vme_mpeg2_kernels;
1149         vme_context->video_coding_type = VIDEO_CODING_MPEG2;
1150         vme_context->vme_kernel_sum = MPEG2_VME_KERNEL_SUM;
1151         encoder_context->vme_pipeline = gen7_vme_mpeg2_pipeline;
1152     } else {
1153         /* Unsupported codec */
1154         assert(0);
1155     }
1156
1157     i965_gpe_load_kernels(ctx,
1158                           &vme_context->gpe_context,
1159                           vme_kernel_list,
1160                           vme_context->vme_kernel_sum);
1161
1162     vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1163     vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1164     vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1165
1166     encoder_context->vme_context = vme_context;
1167     encoder_context->vme_context_destroy = gen7_vme_context_destroy;
1168     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1169
1170     return True;
1171 }