OSDN Git Service

build: automake options in configure.ac
[android-x86/hardware-intel-common-vaapi.git] / src / gen75_vme.c
1 /*
2  * Copyright © 2010-2012 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhao Yakui <yakui.zhao@intel.com>
26  *    Xiang Haihao <haihao.xiang@intel.com>
27  *
28  */
29
30 #include "sysdeps.h"
31
32 #include "intel_batchbuffer.h"
33 #include "intel_driver.h"
34
35 #include "i965_defines.h"
36 #include "i965_drv_video.h"
37 #include "i965_encoder.h"
38 #include "gen6_vme.h"
39 #include "gen6_mfc.h"
40
41 #define SURFACE_STATE_PADDED_SIZE               MAX(SURFACE_STATE_PADDED_SIZE_GEN6, SURFACE_STATE_PADDED_SIZE_GEN7)
42 #define SURFACE_STATE_OFFSET(index)             (SURFACE_STATE_PADDED_SIZE * index)
43 #define BINDING_TABLE_OFFSET(index)             (SURFACE_STATE_OFFSET(MAX_MEDIA_SURFACES_GEN6) + sizeof(unsigned int) * index)
44
45 #define VME_INTRA_SHADER        0
46 #define VME_INTER_SHADER        1
47 #define VME_BINTER_SHADER   3
48 #define VME_BATCHBUFFER         2
49
50 #define CURBE_ALLOCATION_SIZE   37              /* in 256-bit */
51 #define CURBE_TOTAL_DATA_LENGTH (4 * 32)        /* in byte, it should be less than or equal to CURBE_ALLOCATION_SIZE * 32 */
52 #define CURBE_URB_ENTRY_LENGTH  4               /* in 256-bit, it should be less than or equal to CURBE_TOTAL_DATA_LENGTH / 32 */
53
54 #define VME_MSG_LENGTH      32
55
56 static const uint32_t gen75_vme_intra_frame[][4] = {
57 #include "shaders/vme/intra_frame_haswell.g75b"
58 };
59
60 static const uint32_t gen75_vme_inter_frame[][4] = {
61 #include "shaders/vme/inter_frame_haswell.g75b"
62 };
63
64 static const uint32_t gen75_vme_inter_bframe[][4] = {
65 #include "shaders/vme/inter_bframe_haswell.g75b"
66 };
67
68 static const uint32_t gen75_vme_batchbuffer[][4] = {
69 #include "shaders/vme/batchbuffer.g75b"
70 };
71
72 static struct i965_kernel gen75_vme_kernels[] = {
73     {
74         "VME Intra Frame",
75         VME_INTRA_SHADER, /*index*/
76         gen75_vme_intra_frame,
77         sizeof(gen75_vme_intra_frame),
78         NULL
79     },
80     {
81         "VME inter Frame",
82         VME_INTER_SHADER,
83         gen75_vme_inter_frame,
84         sizeof(gen75_vme_inter_frame),
85         NULL
86     },
87     {
88         "VME BATCHBUFFER",
89         VME_BATCHBUFFER,
90         gen75_vme_batchbuffer,
91         sizeof(gen75_vme_batchbuffer),
92         NULL
93     },
94     {
95         "VME inter BFrame",
96         VME_BINTER_SHADER,
97         gen75_vme_inter_bframe,
98         sizeof(gen75_vme_inter_bframe),
99         NULL
100     }
101 };
102
103 static const uint32_t gen75_vme_mpeg2_intra_frame[][4] = {
104 #include "shaders/vme/intra_frame_haswell.g75b"
105 };
106
107 static const uint32_t gen75_vme_mpeg2_inter_frame[][4] = {
108 #include "shaders/vme/mpeg2_inter_haswell.g75b"
109 };
110
111 static const uint32_t gen75_vme_mpeg2_batchbuffer[][4] = {
112 #include "shaders/vme/batchbuffer.g75b"
113 };
114
115 static struct i965_kernel gen75_vme_mpeg2_kernels[] = {
116     {
117         "VME Intra Frame",
118         VME_INTRA_SHADER, /*index*/
119         gen75_vme_mpeg2_intra_frame,
120         sizeof(gen75_vme_mpeg2_intra_frame),
121         NULL
122     },
123     {
124         "VME inter Frame",
125         VME_INTER_SHADER,
126         gen75_vme_mpeg2_inter_frame,
127         sizeof(gen75_vme_mpeg2_inter_frame),
128         NULL
129     },
130     {
131         "VME BATCHBUFFER",
132         VME_BATCHBUFFER,
133         gen75_vme_mpeg2_batchbuffer,
134         sizeof(gen75_vme_mpeg2_batchbuffer),
135         NULL
136     },
137 };
138
139 /* only used for VME source surface state */
140 static void
141 gen75_vme_source_surface_state(VADriverContextP ctx,
142                                int index,
143                                struct object_surface *obj_surface,
144                                struct intel_encoder_context *encoder_context)
145 {
146     struct gen6_vme_context *vme_context = encoder_context->vme_context;
147
148     vme_context->vme_surface2_setup(ctx,
149                                     &vme_context->gpe_context,
150                                     obj_surface,
151                                     BINDING_TABLE_OFFSET(index),
152                                     SURFACE_STATE_OFFSET(index));
153 }
154
155 static void
156 gen75_vme_media_source_surface_state(VADriverContextP ctx,
157                                      int index,
158                                      struct object_surface *obj_surface,
159                                      struct intel_encoder_context *encoder_context)
160 {
161     struct gen6_vme_context *vme_context = encoder_context->vme_context;
162
163     vme_context->vme_media_rw_surface_setup(ctx,
164                                             &vme_context->gpe_context,
165                                             obj_surface,
166                                             BINDING_TABLE_OFFSET(index),
167                                             SURFACE_STATE_OFFSET(index),
168                                             0);
169 }
170
171 static void
172 gen75_vme_media_chroma_source_surface_state(VADriverContextP ctx,
173                                             int index,
174                                             struct object_surface *obj_surface,
175                                             struct intel_encoder_context *encoder_context)
176 {
177     struct gen6_vme_context *vme_context = encoder_context->vme_context;
178
179     vme_context->vme_media_chroma_surface_setup(ctx,
180                                                 &vme_context->gpe_context,
181                                                 obj_surface,
182                                                 BINDING_TABLE_OFFSET(index),
183                                                 SURFACE_STATE_OFFSET(index),
184                                                 0);
185 }
186
187 static void
188 gen75_vme_output_buffer_setup(VADriverContextP ctx,
189                               struct encode_state *encode_state,
190                               int index,
191                               struct intel_encoder_context *encoder_context)
192
193 {
194     struct i965_driver_data *i965 = i965_driver_data(ctx);
195     struct gen6_vme_context *vme_context = encoder_context->vme_context;
196     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
197     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
198     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
199     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
200     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
201
202     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
203     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
204
205     if (is_intra)
206         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES * 2;
207     else
208         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES * 24;
209     /*
210      * Inter MV . 32-byte Intra search + 16 IME info + 128 IME MV + 32 IME Ref
211      * + 16 FBR Info + 128 FBR MV + 32 FBR Ref.
212      * 16 * (2 + 2 * (1 + 8 + 2))= 16 * 24.
213      */
214
215     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
216                                               "VME output buffer",
217                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
218                                               0x1000);
219     assert(vme_context->vme_output.bo);
220     vme_context->vme_buffer_suface_setup(ctx,
221                                          &vme_context->gpe_context,
222                                          &vme_context->vme_output,
223                                          BINDING_TABLE_OFFSET(index),
224                                          SURFACE_STATE_OFFSET(index));
225 }
226
227 static void
228 gen75_vme_output_vme_batchbuffer_setup(VADriverContextP ctx,
229                                        struct encode_state *encode_state,
230                                        int index,
231                                        struct intel_encoder_context *encoder_context)
232
233 {
234     struct i965_driver_data *i965 = i965_driver_data(ctx);
235     struct gen6_vme_context *vme_context = encoder_context->vme_context;
236     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
237     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
238     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
239
240     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
241     vme_context->vme_batchbuffer.size_block = 64; /* 4 OWORDs */
242     vme_context->vme_batchbuffer.pitch = 16;
243     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr,
244                                                    "VME batchbuffer",
245                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
246                                                    0x1000);
247     vme_context->vme_buffer_suface_setup(ctx,
248                                          &vme_context->gpe_context,
249                                          &vme_context->vme_batchbuffer,
250                                          BINDING_TABLE_OFFSET(index),
251                                          SURFACE_STATE_OFFSET(index));
252 }
253
254 static VAStatus
255 gen75_vme_surface_setup(VADriverContextP ctx,
256                         struct encode_state *encode_state,
257                         int is_intra,
258                         struct intel_encoder_context *encoder_context)
259 {
260     struct object_surface *obj_surface;
261
262     /*Setup surfaces state*/
263     /* current picture for encoding */
264     obj_surface = encode_state->input_yuv_object;
265     gen75_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
266     gen75_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
267     gen75_vme_media_chroma_source_surface_state(ctx, 6, obj_surface, encoder_context);
268
269     if (!is_intra) {
270         VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
271         int slice_type;
272
273         slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
274         assert(slice_type != SLICE_TYPE_I && slice_type != SLICE_TYPE_SI);
275
276         intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 0, 1, gen75_vme_source_surface_state);
277
278         if (slice_type == SLICE_TYPE_B)
279             intel_avc_vme_reference_state(ctx, encode_state, encoder_context, 1, 2, gen75_vme_source_surface_state);
280     }
281
282     /* VME output */
283     gen75_vme_output_buffer_setup(ctx, encode_state, 3, encoder_context);
284     gen75_vme_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
285     intel_h264_setup_cost_surface(ctx, encode_state, encoder_context,
286                                   BINDING_TABLE_OFFSET(INTEL_COST_TABLE_OFFSET),
287                                   SURFACE_STATE_OFFSET(INTEL_COST_TABLE_OFFSET));
288
289     return VA_STATUS_SUCCESS;
290 }
291
292 static VAStatus gen75_vme_interface_setup(VADriverContextP ctx,
293                                           struct encode_state *encode_state,
294                                           struct intel_encoder_context *encoder_context)
295 {
296     struct gen6_vme_context *vme_context = encoder_context->vme_context;
297     struct gen6_interface_descriptor_data *desc;
298     int i;
299     dri_bo *bo;
300
301     bo = vme_context->gpe_context.idrt.bo;
302     dri_bo_map(bo, 1);
303     assert(bo->virtual);
304     desc = bo->virtual;
305
306     for (i = 0; i < vme_context->vme_kernel_sum; i++) {
307         struct i965_kernel *kernel;
308         kernel = &vme_context->gpe_context.kernels[i];
309         assert(sizeof(*desc) == 32);
310         /*Setup the descritor table*/
311         memset(desc, 0, sizeof(*desc));
312         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
313         desc->desc2.sampler_count = 0; /* FIXME: */
314         desc->desc2.sampler_state_pointer = 0;
315         desc->desc3.binding_table_entry_count = 1; /* FIXME: */
316         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
317         desc->desc4.constant_urb_entry_read_offset = 0;
318         desc->desc4.constant_urb_entry_read_length = CURBE_URB_ENTRY_LENGTH;
319
320         /*kernel start*/
321         dri_bo_emit_reloc(bo,
322                           I915_GEM_DOMAIN_INSTRUCTION, 0,
323                           0,
324                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
325                           kernel->bo);
326         desc++;
327     }
328     dri_bo_unmap(bo);
329
330     return VA_STATUS_SUCCESS;
331 }
332
333 static VAStatus gen75_vme_constant_setup(VADriverContextP ctx,
334                                          struct encode_state *encode_state,
335                                          struct intel_encoder_context *encoder_context,
336                                          int denom)
337 {
338     struct gen6_vme_context *vme_context = encoder_context->vme_context;
339     unsigned char *constant_buffer;
340     unsigned int *vme_state_message;
341     int mv_num = 32;
342
343     vme_state_message = (unsigned int *)vme_context->vme_state_message;
344
345     if (encoder_context->codec == CODEC_H264 ||
346         encoder_context->codec == CODEC_H264_MVC) {
347         if (vme_context->h264_level >= 30) {
348             mv_num = 16 / denom;
349
350             if (vme_context->h264_level >= 31)
351                 mv_num = 8 / denom;
352         }
353     } else if (encoder_context->codec == CODEC_MPEG2) {
354         mv_num = 2 / denom;
355     }
356
357     vme_state_message[31] = mv_num;
358
359     dri_bo_map(vme_context->gpe_context.curbe.bo, 1);
360     assert(vme_context->gpe_context.curbe.bo->virtual);
361     constant_buffer = vme_context->gpe_context.curbe.bo->virtual;
362
363     /* VME MV/Mb cost table is passed by using const buffer */
364     /* Now it uses the fixed search path. So it is constructed directly
365      * in the GPU shader.
366      */
367     memcpy(constant_buffer, (char *)vme_context->vme_state_message, 128);
368
369     dri_bo_unmap(vme_context->gpe_context.curbe.bo);
370
371     return VA_STATUS_SUCCESS;
372 }
373
374 static const unsigned int intra_mb_mode_cost_table[] = {
375     0x31110001, // for qp0
376     0x09110001, // for qp1
377     0x15030001, // for qp2
378     0x0b030001, // for qp3
379     0x0d030011, // for qp4
380     0x17210011, // for qp5
381     0x41210011, // for qp6
382     0x19210011, // for qp7
383     0x25050003, // for qp8
384     0x1b130003, // for qp9
385     0x1d130003, // for qp10
386     0x27070021, // for qp11
387     0x51310021, // for qp12
388     0x29090021, // for qp13
389     0x35150005, // for qp14
390     0x2b0b0013, // for qp15
391     0x2d0d0013, // for qp16
392     0x37170007, // for qp17
393     0x61410031, // for qp18
394     0x39190009, // for qp19
395     0x45250015, // for qp20
396     0x3b1b000b, // for qp21
397     0x3d1d000d, // for qp22
398     0x47270017, // for qp23
399     0x71510041, // for qp24 ! center for qp=0..30
400     0x49290019, // for qp25
401     0x55350025, // for qp26
402     0x4b2b001b, // for qp27
403     0x4d2d001d, // for qp28
404     0x57370027, // for qp29
405     0x81610051, // for qp30
406     0x57270017, // for qp31
407     0x81510041, // for qp32 ! center for qp=31..51
408     0x59290019, // for qp33
409     0x65350025, // for qp34
410     0x5b2b001b, // for qp35
411     0x5d2d001d, // for qp36
412     0x67370027, // for qp37
413     0x91610051, // for qp38
414     0x69390029, // for qp39
415     0x75450035, // for qp40
416     0x6b3b002b, // for qp41
417     0x6d3d002d, // for qp42
418     0x77470037, // for qp43
419     0xa1710061, // for qp44
420     0x79490039, // for qp45
421     0x85550045, // for qp46
422     0x7b4b003b, // for qp47
423     0x7d4d003d, // for qp48
424     0x87570047, // for qp49
425     0xb1810071, // for qp50
426     0x89590049  // for qp51
427 };
428
429 static void gen75_vme_state_setup_fixup(VADriverContextP ctx,
430                                         struct encode_state *encode_state,
431                                         struct intel_encoder_context *encoder_context,
432                                         unsigned int *vme_state_message)
433 {
434     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
435     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
436     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
437
438     if (slice_param->slice_type != SLICE_TYPE_I &&
439         slice_param->slice_type != SLICE_TYPE_SI)
440         return;
441     if (encoder_context->rate_control_mode == VA_RC_CQP)
442         vme_state_message[0] = intra_mb_mode_cost_table[pic_param->pic_init_qp + slice_param->slice_qp_delta];
443     else
444         vme_state_message[0] = intra_mb_mode_cost_table[mfc_context->brc.qp_prime_y[encoder_context->layer.curr_frame_layer_id][SLICE_TYPE_I]];
445 }
446
447 static VAStatus gen75_vme_vme_state_setup(VADriverContextP ctx,
448                                           struct encode_state *encode_state,
449                                           int is_intra,
450                                           struct intel_encoder_context *encoder_context)
451 {
452     struct gen6_vme_context *vme_context = encoder_context->vme_context;
453     unsigned int *vme_state_message;
454     int i;
455
456     //pass the MV/Mb cost into VME message on HASWell
457     assert(vme_context->vme_state_message);
458     vme_state_message = (unsigned int *)vme_context->vme_state_message;
459
460     vme_state_message[0] = 0x4a4a4a4a;
461     vme_state_message[1] = 0x4a4a4a4a;
462     vme_state_message[2] = 0x4a4a4a4a;
463     vme_state_message[3] = 0x22120200;
464     vme_state_message[4] = 0x62524232;
465
466     for (i = 5; i < 8; i++) {
467         vme_state_message[i] = 0;
468     }
469
470     switch (encoder_context->codec) {
471     case CODEC_H264:
472     case CODEC_H264_MVC:
473         gen75_vme_state_setup_fixup(ctx, encode_state, encoder_context, vme_state_message);
474
475         break;
476
477     default:
478         /* no fixup */
479         break;
480     }
481
482     return VA_STATUS_SUCCESS;
483 }
484
485 static void
486 gen75_vme_fill_vme_batchbuffer(VADriverContextP ctx,
487                                struct encode_state *encode_state,
488                                int mb_width, int mb_height,
489                                int kernel,
490                                int transform_8x8_mode_flag,
491                                struct intel_encoder_context *encoder_context)
492 {
493     struct gen6_vme_context *vme_context = encoder_context->vme_context;
494     int mb_x = 0, mb_y = 0;
495     int i, s;
496     unsigned int *command_ptr;
497     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
498     VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
499     VAEncSliceParameterBufferH264 *slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
500     int qp;
501     int slice_type = intel_avc_enc_slice_type_fixup(slice_param->slice_type);
502     int qp_mb, qp_index;
503
504     if (encoder_context->rate_control_mode == VA_RC_CQP)
505         qp = pic_param->pic_init_qp + slice_param->slice_qp_delta;
506     else
507         qp = mfc_context->brc.qp_prime_y[encoder_context->layer.curr_frame_layer_id][slice_type];
508
509     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
510     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
511
512     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
513         VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer;
514         int slice_mb_begin = pSliceParameter->macroblock_address;
515         int slice_mb_number = pSliceParameter->num_macroblocks;
516         unsigned int mb_intra_ub;
517         int slice_mb_x = pSliceParameter->macroblock_address % mb_width;
518         for (i = 0; i < slice_mb_number;) {
519             int mb_count = i + slice_mb_begin;
520             mb_x = mb_count % mb_width;
521             mb_y = mb_count / mb_width;
522             mb_intra_ub = 0;
523             if (mb_x != 0) {
524                 mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
525             }
526             if (mb_y != 0) {
527                 mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
528                 if (mb_x != 0)
529                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
530                 if (mb_x != (mb_width - 1))
531                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
532             }
533             if (i < mb_width) {
534                 if (i == 0)
535                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
536                 mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
537                 if ((i == (mb_width - 1)) && slice_mb_x) {
538                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
539                 }
540             }
541
542             if ((i == mb_width) && slice_mb_x) {
543                 mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
544             }
545             *command_ptr++ = (CMD_MEDIA_OBJECT | (9 - 2));
546             *command_ptr++ = kernel;
547             *command_ptr++ = 0;
548             *command_ptr++ = 0;
549             *command_ptr++ = 0;
550             *command_ptr++ = 0;
551
552             /*inline data */
553             *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
554             *command_ptr++ = ((encoder_context->quality_level << 24) | (1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
555             /* qp occupies one byte */
556             if (vme_context->roi_enabled) {
557                 qp_index = mb_y * mb_width + mb_x;
558                 qp_mb = *(vme_context->qp_per_mb + qp_index);
559             } else
560                 qp_mb = qp;
561             *command_ptr++ = qp_mb;
562
563             i += 1;
564         }
565     }
566
567     *command_ptr++ = 0;
568     *command_ptr++ = MI_BATCH_BUFFER_END;
569
570     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
571 }
572
573 static void gen75_vme_media_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
574 {
575     struct gen6_vme_context *vme_context = encoder_context->vme_context;
576
577     i965_gpe_context_init(ctx, &vme_context->gpe_context);
578
579     /* VME output buffer */
580     dri_bo_unreference(vme_context->vme_output.bo);
581     vme_context->vme_output.bo = NULL;
582
583     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
584     vme_context->vme_batchbuffer.bo = NULL;
585
586     /* VME state */
587     dri_bo_unreference(vme_context->vme_state.bo);
588     vme_context->vme_state.bo = NULL;
589 }
590
591 static void gen75_vme_pipeline_programing(VADriverContextP ctx,
592                                           struct encode_state *encode_state,
593                                           struct intel_encoder_context *encoder_context)
594 {
595     struct gen6_vme_context *vme_context = encoder_context->vme_context;
596     struct intel_batchbuffer *batch = encoder_context->base.batch;
597     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
598     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
599     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
600     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
601     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
602     int kernel_shader;
603     bool allow_hwscore = true;
604     int s;
605     unsigned int is_low_quality = (encoder_context->quality_level == ENCODER_LOW_QUALITY);
606
607     if (is_low_quality)
608         allow_hwscore = false;
609     else {
610         for (s = 0; s < encode_state->num_slice_params_ext; s++) {
611             pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[s]->buffer;
612             if ((pSliceParameter->macroblock_address % width_in_mbs)) {
613                 allow_hwscore = false;
614                 break;
615             }
616         }
617     }
618
619     if ((pSliceParameter->slice_type == SLICE_TYPE_I) ||
620         (pSliceParameter->slice_type == SLICE_TYPE_SI)) {
621         kernel_shader = VME_INTRA_SHADER;
622     } else if ((pSliceParameter->slice_type == SLICE_TYPE_P) ||
623                (pSliceParameter->slice_type == SLICE_TYPE_SP)) {
624         kernel_shader = VME_INTER_SHADER;
625     } else {
626         kernel_shader = VME_BINTER_SHADER;
627         if (!allow_hwscore)
628             kernel_shader = VME_INTER_SHADER;
629     }
630     if (allow_hwscore)
631         gen7_vme_walker_fill_vme_batchbuffer(ctx,
632                                              encode_state,
633                                              width_in_mbs, height_in_mbs,
634                                              kernel_shader,
635                                              pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
636                                              encoder_context);
637     else
638         gen75_vme_fill_vme_batchbuffer(ctx,
639                                        encode_state,
640                                        width_in_mbs, height_in_mbs,
641                                        kernel_shader,
642                                        pPicParameter->pic_fields.bits.transform_8x8_mode_flag,
643                                        encoder_context);
644
645     intel_batchbuffer_start_atomic(batch, 0x1000);
646     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
647     BEGIN_BATCH(batch, 2);
648     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
649     OUT_RELOC(batch,
650               vme_context->vme_batchbuffer.bo,
651               I915_GEM_DOMAIN_COMMAND, 0,
652               0);
653     ADVANCE_BATCH(batch);
654
655     intel_batchbuffer_end_atomic(batch);
656 }
657
658 static VAStatus gen75_vme_prepare(VADriverContextP ctx,
659                                   struct encode_state *encode_state,
660                                   struct intel_encoder_context *encoder_context)
661 {
662     VAStatus vaStatus = VA_STATUS_SUCCESS;
663     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
664     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
665     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
666     struct gen6_vme_context *vme_context = encoder_context->vme_context;
667
668     if (!vme_context->h264_level ||
669         (vme_context->h264_level != pSequenceParameter->level_idc)) {
670         vme_context->h264_level = pSequenceParameter->level_idc;
671     }
672
673     intel_vme_update_mbmv_cost(ctx, encode_state, encoder_context);
674     intel_h264_initialize_mbmv_cost(ctx, encode_state, encoder_context);
675     intel_h264_enc_roi_config(ctx, encode_state, encoder_context);
676
677     /*Setup all the memory object*/
678     gen75_vme_surface_setup(ctx, encode_state, is_intra, encoder_context);
679     gen75_vme_interface_setup(ctx, encode_state, encoder_context);
680     //gen75_vme_vme_state_setup(ctx, encode_state, is_intra, encoder_context);
681     gen75_vme_constant_setup(ctx, encode_state, encoder_context, (pSliceParameter->slice_type == SLICE_TYPE_B) ? 2 : 1);
682
683     /*Programing media pipeline*/
684     gen75_vme_pipeline_programing(ctx, encode_state, encoder_context);
685
686     return vaStatus;
687 }
688
689 static VAStatus gen75_vme_run(VADriverContextP ctx,
690                               struct encode_state *encode_state,
691                               struct intel_encoder_context *encoder_context)
692 {
693     struct intel_batchbuffer *batch = encoder_context->base.batch;
694
695     intel_batchbuffer_flush(batch);
696
697     return VA_STATUS_SUCCESS;
698 }
699
700 static VAStatus gen75_vme_stop(VADriverContextP ctx,
701                                struct encode_state *encode_state,
702                                struct intel_encoder_context *encoder_context)
703 {
704     return VA_STATUS_SUCCESS;
705 }
706
707 static VAStatus
708 gen75_vme_pipeline(VADriverContextP ctx,
709                    VAProfile profile,
710                    struct encode_state *encode_state,
711                    struct intel_encoder_context *encoder_context)
712 {
713     gen75_vme_media_init(ctx, encoder_context);
714     gen75_vme_prepare(ctx, encode_state, encoder_context);
715     gen75_vme_run(ctx, encode_state, encoder_context);
716     gen75_vme_stop(ctx, encode_state, encoder_context);
717
718     return VA_STATUS_SUCCESS;
719 }
720
721 static void
722 gen75_vme_mpeg2_output_buffer_setup(VADriverContextP ctx,
723                                     struct encode_state *encode_state,
724                                     int index,
725                                     int is_intra,
726                                     struct intel_encoder_context *encoder_context)
727
728 {
729     struct i965_driver_data *i965 = i965_driver_data(ctx);
730     struct gen6_vme_context *vme_context = encoder_context->vme_context;
731     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
732     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
733     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
734
735     vme_context->vme_output.num_blocks = width_in_mbs * height_in_mbs;
736     vme_context->vme_output.pitch = 16; /* in bytes, always 16 */
737
738     if (is_intra)
739         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES * 2;
740     else
741         vme_context->vme_output.size_block = INTRA_VME_OUTPUT_IN_BYTES * 24;
742     /*
743      * Inter MV . 32-byte Intra search + 16 IME info + 128 IME MV + 32 IME Ref
744      * + 16 FBR Info + 128 FBR MV + 32 FBR Ref.
745      * 16 * (2 + 2 * (1 + 8 + 2))= 16 * 24.
746      */
747
748     vme_context->vme_output.bo = dri_bo_alloc(i965->intel.bufmgr,
749                                               "VME output buffer",
750                                               vme_context->vme_output.num_blocks * vme_context->vme_output.size_block,
751                                               0x1000);
752     assert(vme_context->vme_output.bo);
753     vme_context->vme_buffer_suface_setup(ctx,
754                                          &vme_context->gpe_context,
755                                          &vme_context->vme_output,
756                                          BINDING_TABLE_OFFSET(index),
757                                          SURFACE_STATE_OFFSET(index));
758 }
759
760 static void
761 gen75_vme_mpeg2_output_vme_batchbuffer_setup(VADriverContextP ctx,
762                                              struct encode_state *encode_state,
763                                              int index,
764                                              struct intel_encoder_context *encoder_context)
765
766 {
767     struct i965_driver_data *i965 = i965_driver_data(ctx);
768     struct gen6_vme_context *vme_context = encoder_context->vme_context;
769     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
770     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
771     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
772
773     vme_context->vme_batchbuffer.num_blocks = width_in_mbs * height_in_mbs + 1;
774     vme_context->vme_batchbuffer.size_block = 64; /* 4 OWORDs */
775     vme_context->vme_batchbuffer.pitch = 16;
776     vme_context->vme_batchbuffer.bo = dri_bo_alloc(i965->intel.bufmgr,
777                                                    "VME batchbuffer",
778                                                    vme_context->vme_batchbuffer.num_blocks * vme_context->vme_batchbuffer.size_block,
779                                                    0x1000);
780     vme_context->vme_buffer_suface_setup(ctx,
781                                          &vme_context->gpe_context,
782                                          &vme_context->vme_batchbuffer,
783                                          BINDING_TABLE_OFFSET(index),
784                                          SURFACE_STATE_OFFSET(index));
785 }
786
787 static VAStatus
788 gen75_vme_mpeg2_surface_setup(VADriverContextP ctx,
789                               struct encode_state *encode_state,
790                               int is_intra,
791                               struct intel_encoder_context *encoder_context)
792 {
793     struct object_surface *obj_surface;
794
795     /*Setup surfaces state*/
796     /* current picture for encoding */
797     obj_surface = encode_state->input_yuv_object;
798     gen75_vme_source_surface_state(ctx, 0, obj_surface, encoder_context);
799     gen75_vme_media_source_surface_state(ctx, 4, obj_surface, encoder_context);
800     gen75_vme_media_chroma_source_surface_state(ctx, 6, obj_surface, encoder_context);
801
802     if (!is_intra) {
803         /* reference 0 */
804         obj_surface = encode_state->reference_objects[0];
805         if (obj_surface->bo != NULL)
806             gen75_vme_source_surface_state(ctx, 1, obj_surface, encoder_context);
807
808         /* reference 1 */
809         obj_surface = encode_state->reference_objects[1];
810         if (obj_surface && obj_surface->bo != NULL)
811             gen75_vme_source_surface_state(ctx, 2, obj_surface, encoder_context);
812     }
813
814     /* VME output */
815     gen75_vme_mpeg2_output_buffer_setup(ctx, encode_state, 3, is_intra, encoder_context);
816     gen75_vme_mpeg2_output_vme_batchbuffer_setup(ctx, encode_state, 5, encoder_context);
817
818     return VA_STATUS_SUCCESS;
819 }
820
821 static void
822 gen75_vme_mpeg2_fill_vme_batchbuffer(VADriverContextP ctx,
823                                      struct encode_state *encode_state,
824                                      int mb_width, int mb_height,
825                                      int kernel,
826                                      int transform_8x8_mode_flag,
827                                      struct intel_encoder_context *encoder_context)
828 {
829     struct gen6_vme_context *vme_context = encoder_context->vme_context;
830     int mb_x = 0, mb_y = 0;
831     int i, s, j;
832     unsigned int *command_ptr;
833
834
835     dri_bo_map(vme_context->vme_batchbuffer.bo, 1);
836     command_ptr = vme_context->vme_batchbuffer.bo->virtual;
837
838     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
839         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
840
841         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
842             int slice_mb_begin = slice_param->macroblock_address;
843             int slice_mb_number = slice_param->num_macroblocks;
844             unsigned int mb_intra_ub;
845             int slice_mb_x = slice_param->macroblock_address % mb_width;
846
847             for (i = 0; i < slice_mb_number;) {
848                 int mb_count = i + slice_mb_begin;
849
850                 mb_x = mb_count % mb_width;
851                 mb_y = mb_count / mb_width;
852                 mb_intra_ub = 0;
853
854                 if (mb_x != 0) {
855                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_AE;
856                 }
857
858                 if (mb_y != 0) {
859                     mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_B;
860
861                     if (mb_x != 0)
862                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_D;
863
864                     if (mb_x != (mb_width - 1))
865                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
866                 }
867
868                 if (i < mb_width) {
869                     if (i == 0)
870                         mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_AE);
871
872                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_BCD_MASK);
873
874                     if ((i == (mb_width - 1)) && slice_mb_x) {
875                         mb_intra_ub |= INTRA_PRED_AVAIL_FLAG_C;
876                     }
877                 }
878
879                 if ((i == mb_width) && slice_mb_x) {
880                     mb_intra_ub &= ~(INTRA_PRED_AVAIL_FLAG_D);
881                 }
882
883                 *command_ptr++ = (CMD_MEDIA_OBJECT | (8 - 2));
884                 *command_ptr++ = kernel;
885                 *command_ptr++ = 0;
886                 *command_ptr++ = 0;
887                 *command_ptr++ = 0;
888                 *command_ptr++ = 0;
889
890                 /*inline data */
891                 *command_ptr++ = (mb_width << 16 | mb_y << 8 | mb_x);
892                 *command_ptr++ = ((1 << 16) | transform_8x8_mode_flag | (mb_intra_ub << 8));
893
894                 i += 1;
895             }
896
897             slice_param++;
898         }
899     }
900
901     *command_ptr++ = 0;
902     *command_ptr++ = MI_BATCH_BUFFER_END;
903
904     dri_bo_unmap(vme_context->vme_batchbuffer.bo);
905 }
906
907 static void
908 gen75_vme_mpeg2_pipeline_programing(VADriverContextP ctx,
909                                     struct encode_state *encode_state,
910                                     int is_intra,
911                                     struct intel_encoder_context *encoder_context)
912 {
913     struct gen6_vme_context *vme_context = encoder_context->vme_context;
914     struct intel_batchbuffer *batch = encoder_context->base.batch;
915     VAEncPictureParameterBufferMPEG2 *pic_param = NULL;
916     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
917     int width_in_mbs = ALIGN(seq_param->picture_width, 16) / 16;
918     int height_in_mbs = ALIGN(seq_param->picture_height, 16) / 16;
919     bool allow_hwscore = true;
920     int s;
921     int kernel_shader;
922
923     pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer;
924
925     for (s = 0; s < encode_state->num_slice_params_ext; s++) {
926         int j;
927         VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[s]->buffer;
928
929         for (j = 0; j < encode_state->slice_params_ext[s]->num_elements; j++) {
930             if (slice_param->macroblock_address % width_in_mbs) {
931                 allow_hwscore = false;
932                 break;
933             }
934         }
935     }
936
937     pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer;
938     if (pic_param->picture_type == VAEncPictureTypeIntra) {
939         allow_hwscore = false;
940         kernel_shader = VME_INTRA_SHADER;
941     } else {
942         kernel_shader = VME_INTER_SHADER;
943     }
944
945     if (allow_hwscore)
946         gen7_vme_mpeg2_walker_fill_vme_batchbuffer(ctx,
947                                                    encode_state,
948                                                    width_in_mbs, height_in_mbs,
949                                                    kernel_shader,
950                                                    encoder_context);
951     else
952         gen75_vme_mpeg2_fill_vme_batchbuffer(ctx,
953                                              encode_state,
954                                              width_in_mbs, height_in_mbs,
955                                              kernel_shader,
956                                              0,
957                                              encoder_context);
958
959     intel_batchbuffer_start_atomic(batch, 0x1000);
960     gen6_gpe_pipeline_setup(ctx, &vme_context->gpe_context, batch);
961     BEGIN_BATCH(batch, 2);
962     OUT_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
963     OUT_RELOC(batch,
964               vme_context->vme_batchbuffer.bo,
965               I915_GEM_DOMAIN_COMMAND, 0,
966               0);
967     ADVANCE_BATCH(batch);
968
969     intel_batchbuffer_end_atomic(batch);
970 }
971
972 static VAStatus
973 gen75_vme_mpeg2_prepare(VADriverContextP ctx,
974                         struct encode_state *encode_state,
975                         struct intel_encoder_context *encoder_context)
976 {
977     VAStatus vaStatus = VA_STATUS_SUCCESS;
978     VAEncSliceParameterBufferMPEG2 *slice_param = (VAEncSliceParameterBufferMPEG2 *)encode_state->slice_params_ext[0]->buffer;
979
980     VAEncSequenceParameterBufferMPEG2 *seq_param = (VAEncSequenceParameterBufferMPEG2 *)encode_state->seq_param_ext->buffer;
981     struct gen6_vme_context *vme_context = encoder_context->vme_context;
982
983     if ((!vme_context->mpeg2_level) ||
984         (vme_context->mpeg2_level != (seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK))) {
985         vme_context->mpeg2_level = seq_param->sequence_extension.bits.profile_and_level_indication & MPEG2_LEVEL_MASK;
986     }
987
988     /*Setup all the memory object*/
989     gen75_vme_mpeg2_surface_setup(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
990     gen75_vme_interface_setup(ctx, encode_state, encoder_context);
991     gen75_vme_vme_state_setup(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
992     intel_vme_mpeg2_state_setup(ctx, encode_state, encoder_context);
993     gen75_vme_constant_setup(ctx, encode_state, encoder_context, 1);
994
995     /*Programing media pipeline*/
996     gen75_vme_mpeg2_pipeline_programing(ctx, encode_state, slice_param->is_intra_slice, encoder_context);
997
998     return vaStatus;
999 }
1000
1001 static VAStatus
1002 gen75_vme_mpeg2_pipeline(VADriverContextP ctx,
1003                          VAProfile profile,
1004                          struct encode_state *encode_state,
1005                          struct intel_encoder_context *encoder_context)
1006 {
1007     gen75_vme_media_init(ctx, encoder_context);
1008     gen75_vme_mpeg2_prepare(ctx, encode_state, encoder_context);
1009     gen75_vme_run(ctx, encode_state, encoder_context);
1010     gen75_vme_stop(ctx, encode_state, encoder_context);
1011
1012     return VA_STATUS_SUCCESS;
1013 }
1014
1015 static void
1016 gen75_vme_context_destroy(void *context)
1017 {
1018     struct gen6_vme_context *vme_context = context;
1019
1020     i965_gpe_context_destroy(&vme_context->gpe_context);
1021
1022     dri_bo_unreference(vme_context->vme_output.bo);
1023     vme_context->vme_output.bo = NULL;
1024
1025     dri_bo_unreference(vme_context->vme_state.bo);
1026     vme_context->vme_state.bo = NULL;
1027
1028     dri_bo_unreference(vme_context->vme_batchbuffer.bo);
1029     vme_context->vme_batchbuffer.bo = NULL;
1030
1031     free(vme_context->vme_state_message);
1032     vme_context->vme_state_message = NULL;
1033
1034     dri_bo_unreference(vme_context->i_qp_cost_table);
1035     vme_context->i_qp_cost_table = NULL;
1036
1037     dri_bo_unreference(vme_context->p_qp_cost_table);
1038     vme_context->p_qp_cost_table = NULL;
1039
1040     dri_bo_unreference(vme_context->b_qp_cost_table);
1041     vme_context->b_qp_cost_table = NULL;
1042
1043     free(vme_context->qp_per_mb);
1044     vme_context->qp_per_mb = NULL;
1045
1046     free(vme_context);
1047 }
1048
1049 Bool gen75_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1050 {
1051     struct gen6_vme_context *vme_context = calloc(1, sizeof(struct gen6_vme_context));
1052     struct i965_kernel *vme_kernel_list = NULL;
1053     int i965_kernel_num;
1054
1055     switch (encoder_context->codec) {
1056     case CODEC_H264:
1057     case CODEC_H264_MVC:
1058         vme_kernel_list = gen75_vme_kernels;
1059         encoder_context->vme_pipeline = gen75_vme_pipeline;
1060         i965_kernel_num = sizeof(gen75_vme_kernels) / sizeof(struct i965_kernel);
1061         break;
1062
1063     case CODEC_MPEG2:
1064         vme_kernel_list = gen75_vme_mpeg2_kernels;
1065         encoder_context->vme_pipeline = gen75_vme_mpeg2_pipeline;
1066         i965_kernel_num = sizeof(gen75_vme_mpeg2_kernels) / sizeof(struct i965_kernel);
1067
1068         break;
1069
1070     default:
1071         /* never get here */
1072         assert(0);
1073
1074         break;
1075     }
1076
1077     assert(vme_context);
1078     vme_context->vme_kernel_sum = i965_kernel_num;
1079     vme_context->gpe_context.surface_state_binding_table.length = (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1080
1081     vme_context->gpe_context.idrt.max_entries = MAX_INTERFACE_DESC_GEN6;
1082     vme_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1083
1084     vme_context->gpe_context.curbe.length = CURBE_TOTAL_DATA_LENGTH;
1085
1086     vme_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1087     vme_context->gpe_context.vfe_state.num_urb_entries = 64;
1088     vme_context->gpe_context.vfe_state.gpgpu_mode = 0;
1089     vme_context->gpe_context.vfe_state.urb_entry_size = 16;
1090     vme_context->gpe_context.vfe_state.curbe_allocation_size = CURBE_ALLOCATION_SIZE - 1;
1091
1092     gen7_vme_scoreboard_init(ctx, vme_context);
1093
1094     i965_gpe_load_kernels(ctx,
1095                           &vme_context->gpe_context,
1096                           vme_kernel_list,
1097                           i965_kernel_num);
1098     vme_context->vme_surface2_setup = gen7_gpe_surface2_setup;
1099     vme_context->vme_media_rw_surface_setup = gen7_gpe_media_rw_surface_setup;
1100     vme_context->vme_buffer_suface_setup = gen7_gpe_buffer_suface_setup;
1101     vme_context->vme_media_chroma_surface_setup = gen75_gpe_media_chroma_surface_setup;
1102
1103     encoder_context->vme_context = vme_context;
1104     encoder_context->vme_context_destroy = gen75_vme_context_destroy;
1105
1106     vme_context->vme_state_message = malloc(VME_MSG_LENGTH * sizeof(int));
1107
1108     return True;
1109 }