OSDN Git Service

4a8bc27bcd61459e11bb15f04462e180de68b124
[android-x86/hardware-intel-common-vaapi.git] / src / gen7_mfd.c
1 /*
2  * Copyright © 2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 #include <assert.h>
33 #include <va/va_dec_jpeg.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_decoder_utils.h"
41
42 #include "gen7_mfd.h"
43 #include "intel_media.h"
44
45 static const uint32_t zigzag_direct[64] = {
46     0,   1,  8, 16,  9,  2,  3, 10,
47     17, 24, 32, 25, 18, 11,  4,  5,
48     12, 19, 26, 33, 40, 48, 41, 34,
49     27, 20, 13,  6,  7, 14, 21, 28,
50     35, 42, 49, 56, 57, 50, 43, 36,
51     29, 22, 15, 23, 30, 37, 44, 51,
52     58, 59, 52, 45, 38, 31, 39, 46,
53     53, 60, 61, 54, 47, 55, 62, 63
54 };
55
56 static void
57 gen7_mfd_avc_frame_store_index(VADriverContextP ctx,
58                                VAPictureParameterBufferH264 *pic_param,
59                                struct gen7_mfd_context *gen7_mfd_context)
60 {
61     struct i965_driver_data *i965 = i965_driver_data(ctx);
62     int i, j;
63
64     assert(ARRAY_ELEMS(gen7_mfd_context->reference_surface) == ARRAY_ELEMS(pic_param->ReferenceFrames));
65
66     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
67         int found = 0;
68
69         if (gen7_mfd_context->reference_surface[i].surface_id == VA_INVALID_ID)
70             continue;
71
72         for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
73             VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
74             if (ref_pic->flags & VA_PICTURE_H264_INVALID)
75                 continue;
76
77             if (gen7_mfd_context->reference_surface[i].surface_id == ref_pic->picture_id) {
78                 found = 1;
79                 break;
80             }
81         }
82
83         if (!found) {
84             struct object_surface *obj_surface = SURFACE(gen7_mfd_context->reference_surface[i].surface_id);
85             obj_surface->flags &= ~SURFACE_REFERENCED;
86
87             if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
88                 dri_bo_unreference(obj_surface->bo);
89                 obj_surface->bo = NULL;
90                 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
91             }
92
93             if (obj_surface->free_private_data)
94                 obj_surface->free_private_data(&obj_surface->private_data);
95
96             gen7_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
97             gen7_mfd_context->reference_surface[i].frame_store_id = -1;
98         }
99     }
100
101     for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
102         VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
103         int found = 0;
104
105         if (ref_pic->flags & VA_PICTURE_H264_INVALID)
106             continue;
107
108         for (j = 0; j < ARRAY_ELEMS(gen7_mfd_context->reference_surface); j++) {
109             if (gen7_mfd_context->reference_surface[j].surface_id == VA_INVALID_ID)
110                 continue;
111             
112             if (gen7_mfd_context->reference_surface[j].surface_id == ref_pic->picture_id) {
113                 found = 1;
114                 break;
115             }
116         }
117
118         if (!found) {
119             int frame_idx;
120             struct object_surface *obj_surface = SURFACE(ref_pic->picture_id);
121             
122             assert(obj_surface);
123             i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
124
125             for (frame_idx = 0; frame_idx < ARRAY_ELEMS(gen7_mfd_context->reference_surface); frame_idx++) {
126                 for (j = 0; j < ARRAY_ELEMS(gen7_mfd_context->reference_surface); j++) {
127                     if (gen7_mfd_context->reference_surface[j].surface_id == VA_INVALID_ID)
128                         continue;
129
130                     if (gen7_mfd_context->reference_surface[j].frame_store_id == frame_idx)
131                         break;
132                 }
133
134                 if (j == ARRAY_ELEMS(gen7_mfd_context->reference_surface))
135                     break;
136             }
137
138             assert(frame_idx < ARRAY_ELEMS(gen7_mfd_context->reference_surface));
139
140             for (j = 0; j < ARRAY_ELEMS(gen7_mfd_context->reference_surface); j++) {
141                 if (gen7_mfd_context->reference_surface[j].surface_id == VA_INVALID_ID) {
142                     gen7_mfd_context->reference_surface[j].surface_id = ref_pic->picture_id;
143                     gen7_mfd_context->reference_surface[j].frame_store_id = frame_idx;
144                     break;
145                 }
146             }
147         }
148     }
149
150     /* sort */
151     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface) - 1; i++) {
152         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
153             gen7_mfd_context->reference_surface[i].frame_store_id == i)
154             continue;
155
156         for (j = i + 1; j < ARRAY_ELEMS(gen7_mfd_context->reference_surface); j++) {
157             if (gen7_mfd_context->reference_surface[j].surface_id != VA_INVALID_ID &&
158                 gen7_mfd_context->reference_surface[j].frame_store_id == i) {
159                 VASurfaceID id = gen7_mfd_context->reference_surface[i].surface_id;
160                 int frame_idx = gen7_mfd_context->reference_surface[i].frame_store_id;
161
162                 gen7_mfd_context->reference_surface[i].surface_id = gen7_mfd_context->reference_surface[j].surface_id;
163                 gen7_mfd_context->reference_surface[i].frame_store_id = gen7_mfd_context->reference_surface[j].frame_store_id;
164                 gen7_mfd_context->reference_surface[j].surface_id = id;
165                 gen7_mfd_context->reference_surface[j].frame_store_id = frame_idx;
166                 break;
167             }
168         }
169     }
170 }
171
172 static void
173 gen7_mfd_init_avc_surface(VADriverContextP ctx, 
174                           VAPictureParameterBufferH264 *pic_param,
175                           struct object_surface *obj_surface)
176 {
177     struct i965_driver_data *i965 = i965_driver_data(ctx);
178     GenAvcSurface *gen7_avc_surface = obj_surface->private_data;
179     int width_in_mbs, height_in_mbs;
180
181     obj_surface->free_private_data = gen_free_avc_surface;
182     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
183     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
184
185     if (!gen7_avc_surface) {
186         gen7_avc_surface = calloc(sizeof(GenAvcSurface), 1);
187         assert((obj_surface->size & 0x3f) == 0);
188         obj_surface->private_data = gen7_avc_surface;
189     }
190
191     gen7_avc_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
192                                          !pic_param->seq_fields.bits.direct_8x8_inference_flag);
193
194     if (gen7_avc_surface->dmv_top == NULL) {
195         gen7_avc_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
196                                                  "direct mv w/r buffer",
197                                                  width_in_mbs * height_in_mbs * 64,
198                                                  0x1000);
199         assert(gen7_avc_surface->dmv_top);
200     }
201
202     if (gen7_avc_surface->dmv_bottom_flag &&
203         gen7_avc_surface->dmv_bottom == NULL) {
204         gen7_avc_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
205                                                     "direct mv w/r buffer",
206                                                     width_in_mbs * height_in_mbs * 64,                                                    
207                                                     0x1000);
208         assert(gen7_avc_surface->dmv_bottom);
209     }
210 }
211
212 static void
213 gen7_mfd_pipe_mode_select(VADriverContextP ctx,
214                           struct decode_state *decode_state,
215                           int standard_select,
216                           struct gen7_mfd_context *gen7_mfd_context)
217 {
218     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
219
220     assert(standard_select == MFX_FORMAT_MPEG2 ||
221            standard_select == MFX_FORMAT_AVC ||
222            standard_select == MFX_FORMAT_VC1 ||
223            standard_select == MFX_FORMAT_JPEG);
224
225     BEGIN_BCS_BATCH(batch, 5);
226     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
227     OUT_BCS_BATCH(batch,
228                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
229                   (MFD_MODE_VLD << 15) | /* VLD mode */
230                   (0 << 10) | /* disable Stream-Out */
231                   (gen7_mfd_context->post_deblocking_output.valid << 9)  | /* Post Deblocking Output */
232                   (gen7_mfd_context->pre_deblocking_output.valid << 8)  | /* Pre Deblocking Output */
233                   (0 << 5)  | /* not in stitch mode */
234                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
235                   (standard_select << 0));
236     OUT_BCS_BATCH(batch,
237                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
238                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
239                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
240                   (0 << 1)  |
241                   (0 << 0));
242     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
243     OUT_BCS_BATCH(batch, 0); /* reserved */
244     ADVANCE_BCS_BATCH(batch);
245 }
246
247 static void
248 gen7_mfd_surface_state(VADriverContextP ctx,
249                        struct decode_state *decode_state,
250                        int standard_select,
251                        struct gen7_mfd_context *gen7_mfd_context)
252 {
253     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
254     struct i965_driver_data *i965 = i965_driver_data(ctx);
255     struct object_surface *obj_surface = SURFACE(decode_state->current_render_target);
256     unsigned int y_cb_offset;
257     unsigned int y_cr_offset;
258
259     assert(obj_surface);
260
261     y_cb_offset = obj_surface->y_cb_offset;
262     y_cr_offset = obj_surface->y_cr_offset;
263
264     BEGIN_BCS_BATCH(batch, 6);
265     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
266     OUT_BCS_BATCH(batch, 0);
267     OUT_BCS_BATCH(batch,
268                   ((obj_surface->orig_height - 1) << 18) |
269                   ((obj_surface->orig_width - 1) << 4));
270     OUT_BCS_BATCH(batch,
271                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
272                   ((standard_select != MFX_FORMAT_JPEG) << 27) | /* interleave chroma, set to 0 for JPEG */
273                   (0 << 22) | /* surface object control state, ignored */
274                   ((obj_surface->width - 1) << 3) | /* pitch */
275                   (0 << 2)  | /* must be 0 */
276                   (1 << 1)  | /* must be tiled */
277                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
278     OUT_BCS_BATCH(batch,
279                   (0 << 16) | /* X offset for U(Cb), must be 0 */
280                   (y_cb_offset << 0)); /* Y offset for U(Cb) */
281     OUT_BCS_BATCH(batch,
282                   (0 << 16) | /* X offset for V(Cr), must be 0 */
283                   (y_cr_offset << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
284     ADVANCE_BCS_BATCH(batch);
285 }
286
287 static void
288 gen7_mfd_pipe_buf_addr_state(VADriverContextP ctx,
289                              struct decode_state *decode_state,
290                              int standard_select,
291                              struct gen7_mfd_context *gen7_mfd_context)
292 {
293     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
294     struct i965_driver_data *i965 = i965_driver_data(ctx);
295     int i;
296
297     BEGIN_BCS_BATCH(batch, 24);
298     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
299     if (gen7_mfd_context->pre_deblocking_output.valid)
300         OUT_BCS_RELOC(batch, gen7_mfd_context->pre_deblocking_output.bo,
301                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
302                       0);
303     else
304         OUT_BCS_BATCH(batch, 0);
305
306     if (gen7_mfd_context->post_deblocking_output.valid)
307         OUT_BCS_RELOC(batch, gen7_mfd_context->post_deblocking_output.bo,
308                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
309                       0);
310     else
311         OUT_BCS_BATCH(batch, 0);
312
313     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
314     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
315
316     if (gen7_mfd_context->intra_row_store_scratch_buffer.valid)
317         OUT_BCS_RELOC(batch, gen7_mfd_context->intra_row_store_scratch_buffer.bo,
318                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
319                       0);
320     else
321         OUT_BCS_BATCH(batch, 0);
322
323     if (gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid)
324         OUT_BCS_RELOC(batch, gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo,
325                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
326                       0);
327     else
328         OUT_BCS_BATCH(batch, 0);
329
330     /* DW 7..22 */
331     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
332         struct object_surface *obj_surface;
333
334         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
335             obj_surface = SURFACE(gen7_mfd_context->reference_surface[i].surface_id);
336             assert(obj_surface && obj_surface->bo);
337
338             OUT_BCS_RELOC(batch, obj_surface->bo,
339                           I915_GEM_DOMAIN_INSTRUCTION, 0,
340                           0);
341         } else {
342             OUT_BCS_BATCH(batch, 0);
343         }
344     }
345
346     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
347     ADVANCE_BCS_BATCH(batch);
348 }
349
350 static void
351 gen7_mfd_ind_obj_base_addr_state(VADriverContextP ctx,
352                                  dri_bo *slice_data_bo,
353                                  int standard_select,
354                                  struct gen7_mfd_context *gen7_mfd_context)
355 {
356     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
357
358     BEGIN_BCS_BATCH(batch, 11);
359     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
360     OUT_BCS_RELOC(batch, slice_data_bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0); /* MFX Indirect Bitstream Object Base Address */
361     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
362     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
363     OUT_BCS_BATCH(batch, 0);
364     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
365     OUT_BCS_BATCH(batch, 0);
366     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
367     OUT_BCS_BATCH(batch, 0);
368     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
369     OUT_BCS_BATCH(batch, 0);
370     ADVANCE_BCS_BATCH(batch);
371 }
372
373 static void
374 gen7_mfd_bsp_buf_base_addr_state(VADriverContextP ctx,
375                                  struct decode_state *decode_state,
376                                  int standard_select,
377                                  struct gen7_mfd_context *gen7_mfd_context)
378 {
379     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
380
381     BEGIN_BCS_BATCH(batch, 4);
382     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
383
384     if (gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid)
385         OUT_BCS_RELOC(batch, gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo,
386                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
387                       0);
388     else
389         OUT_BCS_BATCH(batch, 0);
390
391     if (gen7_mfd_context->mpr_row_store_scratch_buffer.valid)
392         OUT_BCS_RELOC(batch, gen7_mfd_context->mpr_row_store_scratch_buffer.bo,
393                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
394                       0);
395     else
396         OUT_BCS_BATCH(batch, 0);
397
398     if (gen7_mfd_context->bitplane_read_buffer.valid)
399         OUT_BCS_RELOC(batch, gen7_mfd_context->bitplane_read_buffer.bo,
400                       I915_GEM_DOMAIN_INSTRUCTION, 0,
401                       0);
402     else
403         OUT_BCS_BATCH(batch, 0);
404
405     ADVANCE_BCS_BATCH(batch);
406 }
407
408 #if 0
409 static void
410 gen7_mfd_aes_state(VADriverContextP ctx,
411                    struct decode_state *decode_state,
412                    int standard_select)
413 {
414     /* FIXME */
415 }
416 #endif
417
418 static void
419 gen7_mfd_qm_state(VADriverContextP ctx,
420                   int qm_type,
421                   unsigned char *qm,
422                   int qm_length,
423                   struct gen7_mfd_context *gen7_mfd_context)
424 {
425     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
426     unsigned int qm_buffer[16];
427
428     assert(qm_length <= 16 * 4);
429     memcpy(qm_buffer, qm, qm_length);
430
431     BEGIN_BCS_BATCH(batch, 18);
432     OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
433     OUT_BCS_BATCH(batch, qm_type << 0);
434     intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
435     ADVANCE_BCS_BATCH(batch);
436 }
437
438 #if 0
439 static void
440 gen7_mfd_wait(VADriverContextP ctx,
441               struct decode_state *decode_state,
442               int standard_select,
443               struct gen7_mfd_context *gen7_mfd_context)
444 {
445     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
446
447     BEGIN_BCS_BATCH(batch, 1);
448     OUT_BCS_BATCH(batch, MFX_WAIT | (1 << 8));
449     ADVANCE_BCS_BATCH(batch);
450 }
451 #endif
452
453 static void
454 gen7_mfd_avc_img_state(VADriverContextP ctx,
455                        struct decode_state *decode_state,
456                        struct gen7_mfd_context *gen7_mfd_context)
457 {
458     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
459     int img_struct;
460     int mbaff_frame_flag;
461     unsigned int width_in_mbs, height_in_mbs;
462     VAPictureParameterBufferH264 *pic_param;
463
464     assert(decode_state->pic_param && decode_state->pic_param->buffer);
465     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
466     assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
467
468     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
469         img_struct = 1;
470     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
471         img_struct = 3;
472     else
473         img_struct = 0;
474
475     if ((img_struct & 0x1) == 0x1) {
476         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
477     } else {
478         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
479     }
480
481     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
482         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
483         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
484     } else {
485         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
486     }
487
488     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
489                         !pic_param->pic_fields.bits.field_pic_flag);
490
491     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
492     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
493
494     /* MFX unit doesn't support 4:2:2 and 4:4:4 picture */
495     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
496            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
497     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
498
499     BEGIN_BCS_BATCH(batch, 16);
500     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
501     OUT_BCS_BATCH(batch, 
502                   width_in_mbs * height_in_mbs);
503     OUT_BCS_BATCH(batch, 
504                   ((height_in_mbs - 1) << 16) | 
505                   ((width_in_mbs - 1) << 0));
506     OUT_BCS_BATCH(batch, 
507                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
508                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
509                   (0 << 14) | /* Max-bit conformance Intra flag ??? FIXME */
510                   (0 << 13) | /* Max Macroblock size conformance Inter flag ??? FIXME */
511                   (pic_param->pic_fields.bits.weighted_pred_flag << 12) | /* differ from GEN6 */
512                   (pic_param->pic_fields.bits.weighted_bipred_idc << 10) |
513                   (img_struct << 8));
514     OUT_BCS_BATCH(batch,
515                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
516                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
517                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
518                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
519                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
520                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
521                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
522                   (mbaff_frame_flag << 1) |
523                   (pic_param->pic_fields.bits.field_pic_flag << 0));
524     OUT_BCS_BATCH(batch, 0);
525     OUT_BCS_BATCH(batch, 0);
526     OUT_BCS_BATCH(batch, 0);
527     OUT_BCS_BATCH(batch, 0);
528     OUT_BCS_BATCH(batch, 0);
529     OUT_BCS_BATCH(batch, 0);
530     OUT_BCS_BATCH(batch, 0);
531     OUT_BCS_BATCH(batch, 0);
532     OUT_BCS_BATCH(batch, 0);
533     OUT_BCS_BATCH(batch, 0);
534     OUT_BCS_BATCH(batch, 0);
535     ADVANCE_BCS_BATCH(batch);
536 }
537
538 static void
539 gen7_mfd_avc_qm_state(VADriverContextP ctx,
540                       struct decode_state *decode_state,
541                       struct gen7_mfd_context *gen7_mfd_context)
542 {
543     VAIQMatrixBufferH264 *iq_matrix;
544     VAPictureParameterBufferH264 *pic_param;
545
546     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
547         iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
548     else
549         iq_matrix = &gen7_mfd_context->iq_matrix.h264;
550
551     assert(decode_state->pic_param && decode_state->pic_param->buffer);
552     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
553
554     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, &iq_matrix->ScalingList4x4[0][0], 3 * 16, gen7_mfd_context);
555     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, &iq_matrix->ScalingList4x4[3][0], 3 * 16, gen7_mfd_context);
556
557     if (pic_param->pic_fields.bits.transform_8x8_mode_flag) {
558         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, &iq_matrix->ScalingList8x8[0][0], 64, gen7_mfd_context);
559         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, &iq_matrix->ScalingList8x8[1][0], 64, gen7_mfd_context);
560     }
561 }
562
563 static void
564 gen7_mfd_avc_directmode_state(VADriverContextP ctx,
565                               VAPictureParameterBufferH264 *pic_param,
566                               VASliceParameterBufferH264 *slice_param,
567                               struct gen7_mfd_context *gen7_mfd_context)
568 {
569     struct i965_driver_data *i965 = i965_driver_data(ctx);
570     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
571     struct object_surface *obj_surface;
572     GenAvcSurface *gen7_avc_surface;
573     VAPictureH264 *va_pic;
574     int i, j;
575
576     BEGIN_BCS_BATCH(batch, 69);
577     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
578
579     /* reference surfaces 0..15 */
580     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
581         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
582             obj_surface = SURFACE(gen7_mfd_context->reference_surface[i].surface_id);
583             assert(obj_surface);
584             gen7_avc_surface = obj_surface->private_data;
585
586             if (gen7_avc_surface == NULL) {
587                 OUT_BCS_BATCH(batch, 0);
588                 OUT_BCS_BATCH(batch, 0);
589             } else {
590                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
591                               I915_GEM_DOMAIN_INSTRUCTION, 0,
592                               0);
593
594                 if (gen7_avc_surface->dmv_bottom_flag == 1)
595                     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
596                                   I915_GEM_DOMAIN_INSTRUCTION, 0,
597                                   0);
598                 else
599                     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
600                                   I915_GEM_DOMAIN_INSTRUCTION, 0,
601                                   0);
602             }
603         } else {
604             OUT_BCS_BATCH(batch, 0);
605             OUT_BCS_BATCH(batch, 0);
606         }
607     }
608
609     /* the current decoding frame/field */
610     va_pic = &pic_param->CurrPic;
611     assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
612     obj_surface = SURFACE(va_pic->picture_id);
613     assert(obj_surface && obj_surface->bo && obj_surface->private_data);
614     gen7_avc_surface = obj_surface->private_data;
615
616     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
617                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
618                   0);
619
620     if (gen7_avc_surface->dmv_bottom_flag == 1)
621         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
622                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
623                       0);
624     else
625         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
626                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
627                       0);
628
629     /* POC List */
630     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
631         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
632             int found = 0;
633             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
634                 va_pic = &pic_param->ReferenceFrames[j];
635                 
636                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
637                     continue;
638
639                 if (va_pic->picture_id == gen7_mfd_context->reference_surface[i].surface_id) {
640                     found = 1;
641                     break;
642                 }
643             }
644
645             assert(found == 1);
646             assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
647             
648             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
649             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
650         } else {
651             OUT_BCS_BATCH(batch, 0);
652             OUT_BCS_BATCH(batch, 0);
653         }
654     }
655
656     va_pic = &pic_param->CurrPic;
657     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
658     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
659
660     ADVANCE_BCS_BATCH(batch);
661 }
662
663 static void
664 gen7_mfd_avc_slice_state(VADriverContextP ctx,
665                          VAPictureParameterBufferH264 *pic_param,
666                          VASliceParameterBufferH264 *slice_param,
667                          VASliceParameterBufferH264 *next_slice_param,
668                          struct gen7_mfd_context *gen7_mfd_context)
669 {
670     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
671     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
672     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
673     int slice_hor_pos, slice_ver_pos, next_slice_hor_pos, next_slice_ver_pos;
674     int num_ref_idx_l0, num_ref_idx_l1;
675     int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
676                          pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
677     int first_mb_in_slice = 0, first_mb_in_next_slice = 0;
678     int slice_type;
679
680     if (slice_param->slice_type == SLICE_TYPE_I ||
681         slice_param->slice_type == SLICE_TYPE_SI) {
682         slice_type = SLICE_TYPE_I;
683     } else if (slice_param->slice_type == SLICE_TYPE_P ||
684                slice_param->slice_type == SLICE_TYPE_SP) {
685         slice_type = SLICE_TYPE_P;
686     } else { 
687         assert(slice_param->slice_type == SLICE_TYPE_B);
688         slice_type = SLICE_TYPE_B;
689     }
690
691     if (slice_type == SLICE_TYPE_I) {
692         assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
693         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
694         num_ref_idx_l0 = 0;
695         num_ref_idx_l1 = 0;
696     } else if (slice_type == SLICE_TYPE_P) {
697         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
698         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
699         num_ref_idx_l1 = 0;
700     } else {
701         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
702         num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
703     }
704
705     first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
706     slice_hor_pos = first_mb_in_slice % width_in_mbs; 
707     slice_ver_pos = first_mb_in_slice / width_in_mbs;
708
709     if (next_slice_param) {
710         first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
711         next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs; 
712         next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
713     } else {
714         next_slice_hor_pos = 0;
715         next_slice_ver_pos = height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
716     }
717
718     BEGIN_BCS_BATCH(batch, 11); /* FIXME: is it 10??? */
719     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
720     OUT_BCS_BATCH(batch, slice_type);
721     OUT_BCS_BATCH(batch, 
722                   (num_ref_idx_l1 << 24) |
723                   (num_ref_idx_l0 << 16) |
724                   (slice_param->chroma_log2_weight_denom << 8) |
725                   (slice_param->luma_log2_weight_denom << 0));
726     OUT_BCS_BATCH(batch, 
727                   (slice_param->direct_spatial_mv_pred_flag << 29) |
728                   (slice_param->disable_deblocking_filter_idc << 27) |
729                   (slice_param->cabac_init_idc << 24) |
730                   ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
731                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
732                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
733     OUT_BCS_BATCH(batch, 
734                   (slice_ver_pos << 24) |
735                   (slice_hor_pos << 16) | 
736                   (first_mb_in_slice << 0));
737     OUT_BCS_BATCH(batch,
738                   (next_slice_ver_pos << 16) |
739                   (next_slice_hor_pos << 0));
740     OUT_BCS_BATCH(batch, 
741                   (next_slice_param == NULL) << 19); /* last slice flag */
742     OUT_BCS_BATCH(batch, 0);
743     OUT_BCS_BATCH(batch, 0);
744     OUT_BCS_BATCH(batch, 0);
745     OUT_BCS_BATCH(batch, 0);
746     ADVANCE_BCS_BATCH(batch);
747 }
748
749 static inline void
750 gen7_mfd_avc_ref_idx_state(VADriverContextP ctx,
751                            VAPictureParameterBufferH264 *pic_param,
752                            VASliceParameterBufferH264 *slice_param,
753                            struct gen7_mfd_context *gen7_mfd_context)
754 {
755     gen6_send_avc_ref_idx_state(
756         gen7_mfd_context->base.batch,
757         slice_param,
758         gen7_mfd_context->reference_surface
759     );
760 }
761
762 static void
763 gen7_mfd_avc_weightoffset_state(VADriverContextP ctx,
764                                 VAPictureParameterBufferH264 *pic_param,
765                                 VASliceParameterBufferH264 *slice_param,
766                                 struct gen7_mfd_context *gen7_mfd_context)
767 {
768     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
769     int i, j, num_weight_offset_table = 0;
770     short weightoffsets[32 * 6];
771
772     if ((slice_param->slice_type == SLICE_TYPE_P ||
773          slice_param->slice_type == SLICE_TYPE_SP) &&
774         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
775         num_weight_offset_table = 1;
776     }
777     
778     if ((slice_param->slice_type == SLICE_TYPE_B) &&
779         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
780         num_weight_offset_table = 2;
781     }
782
783     for (i = 0; i < num_weight_offset_table; i++) {
784         BEGIN_BCS_BATCH(batch, 98);
785         OUT_BCS_BATCH(batch, MFX_AVC_WEIGHTOFFSET_STATE | (98 - 2));
786         OUT_BCS_BATCH(batch, i);
787
788         if (i == 0) {
789             for (j = 0; j < 32; j++) {
790                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l0[j];
791                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l0[j];
792                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l0[j][0];
793                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l0[j][0];
794                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l0[j][1];
795                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l0[j][1];
796             }
797         } else {
798             for (j = 0; j < 32; j++) {
799                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l1[j];
800                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l1[j];
801                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l1[j][0];
802                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l1[j][0];
803                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l1[j][1];
804                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l1[j][1];
805             }
806         }
807
808         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
809         ADVANCE_BCS_BATCH(batch);
810     }
811 }
812
813 static void
814 gen7_mfd_avc_bsd_object(VADriverContextP ctx,
815                         VAPictureParameterBufferH264 *pic_param,
816                         VASliceParameterBufferH264 *slice_param,
817                         dri_bo *slice_data_bo,
818                         VASliceParameterBufferH264 *next_slice_param,
819                         struct gen7_mfd_context *gen7_mfd_context)
820 {
821     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
822     unsigned int slice_data_bit_offset;
823
824     slice_data_bit_offset = avc_get_first_mb_bit_offset(
825         slice_data_bo,
826         slice_param,
827         pic_param->pic_fields.bits.entropy_coding_mode_flag
828     );
829
830     /* the input bitsteam format on GEN7 differs from GEN6 */
831     BEGIN_BCS_BATCH(batch, 6);
832     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
833     OUT_BCS_BATCH(batch, 
834                   (slice_param->slice_data_size - slice_param->slice_data_offset));
835     OUT_BCS_BATCH(batch, slice_param->slice_data_offset);
836     OUT_BCS_BATCH(batch,
837                   (0 << 31) |
838                   (0 << 14) |
839                   (0 << 12) |
840                   (0 << 10) |
841                   (0 << 8));
842     OUT_BCS_BATCH(batch,
843                   ((slice_data_bit_offset >> 3) << 16) |
844                   (0 << 5)  |
845                   (0 << 4)  |
846                   ((next_slice_param == NULL) << 3) | /* LastSlice Flag */
847                   (slice_data_bit_offset & 0x7));
848     OUT_BCS_BATCH(batch, 0);
849     ADVANCE_BCS_BATCH(batch);
850 }
851
852 static inline void
853 gen7_mfd_avc_context_init(
854     VADriverContextP         ctx,
855     struct gen7_mfd_context *gen7_mfd_context
856 )
857 {
858     /* Initialize flat scaling lists */
859     avc_gen_default_iq_matrix(&gen7_mfd_context->iq_matrix.h264);
860 }
861
862 static void
863 gen7_mfd_avc_decode_init(VADriverContextP ctx,
864                          struct decode_state *decode_state,
865                          struct gen7_mfd_context *gen7_mfd_context)
866 {
867     VAPictureParameterBufferH264 *pic_param;
868     VASliceParameterBufferH264 *slice_param;
869     VAPictureH264 *va_pic;
870     struct i965_driver_data *i965 = i965_driver_data(ctx);
871     struct object_surface *obj_surface;
872     dri_bo *bo;
873     int i, j, enable_avc_ildb = 0;
874     unsigned int width_in_mbs, height_in_mbs;
875
876     for (j = 0; j < decode_state->num_slice_params && enable_avc_ildb == 0; j++) {
877         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
878         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
879
880         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
881             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
882             assert((slice_param->slice_type == SLICE_TYPE_I) ||
883                    (slice_param->slice_type == SLICE_TYPE_SI) ||
884                    (slice_param->slice_type == SLICE_TYPE_P) ||
885                    (slice_param->slice_type == SLICE_TYPE_SP) ||
886                    (slice_param->slice_type == SLICE_TYPE_B));
887
888             if (slice_param->disable_deblocking_filter_idc != 1) {
889                 enable_avc_ildb = 1;
890                 break;
891             }
892
893             slice_param++;
894         }
895     }
896
897     assert(decode_state->pic_param && decode_state->pic_param->buffer);
898     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
899     gen7_mfd_avc_frame_store_index(ctx, pic_param, gen7_mfd_context);
900     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
901     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
902     assert(width_in_mbs > 0 && width_in_mbs <= 256); /* 4K */
903     assert(height_in_mbs > 0 && height_in_mbs <= 256);
904
905     /* Current decoded picture */
906     va_pic = &pic_param->CurrPic;
907     assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
908     obj_surface = SURFACE(va_pic->picture_id);
909     assert(obj_surface);
910     obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
911     obj_surface->flags |= (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
912     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
913     gen7_mfd_init_avc_surface(ctx, pic_param, obj_surface);
914
915     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
916     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
917     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
918     gen7_mfd_context->post_deblocking_output.valid = enable_avc_ildb;
919
920     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
921     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
922     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
923     gen7_mfd_context->pre_deblocking_output.valid = !enable_avc_ildb;
924
925     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
926     bo = dri_bo_alloc(i965->intel.bufmgr,
927                       "intra row store",
928                       width_in_mbs * 64,
929                       0x1000);
930     assert(bo);
931     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
932     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
933
934     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
935     bo = dri_bo_alloc(i965->intel.bufmgr,
936                       "deblocking filter row store",
937                       width_in_mbs * 64 * 4,
938                       0x1000);
939     assert(bo);
940     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
941     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
942
943     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
944     bo = dri_bo_alloc(i965->intel.bufmgr,
945                       "bsd mpc row store",
946                       width_in_mbs * 64 * 2,
947                       0x1000);
948     assert(bo);
949     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
950     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
951
952     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
953     bo = dri_bo_alloc(i965->intel.bufmgr,
954                       "mpr row store",
955                       width_in_mbs * 64 * 2,
956                       0x1000);
957     assert(bo);
958     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = bo;
959     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 1;
960
961     gen7_mfd_context->bitplane_read_buffer.valid = 0;
962 }
963
964 static void
965 gen7_mfd_avc_decode_picture(VADriverContextP ctx,
966                             struct decode_state *decode_state,
967                             struct gen7_mfd_context *gen7_mfd_context)
968 {
969     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
970     VAPictureParameterBufferH264 *pic_param;
971     VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
972     dri_bo *slice_data_bo;
973     int i, j;
974
975     assert(decode_state->pic_param && decode_state->pic_param->buffer);
976     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
977     gen7_mfd_avc_decode_init(ctx, decode_state, gen7_mfd_context);
978
979     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
980     intel_batchbuffer_emit_mi_flush(batch);
981     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
982     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
983     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
984     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
985     gen7_mfd_avc_qm_state(ctx, decode_state, gen7_mfd_context);
986     gen7_mfd_avc_img_state(ctx, decode_state, gen7_mfd_context);
987
988     for (j = 0; j < decode_state->num_slice_params; j++) {
989         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
990         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
991         slice_data_bo = decode_state->slice_datas[j]->bo;
992         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC, gen7_mfd_context);
993
994         if (j == decode_state->num_slice_params - 1)
995             next_slice_group_param = NULL;
996         else
997             next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
998
999         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1000             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1001             assert((slice_param->slice_type == SLICE_TYPE_I) ||
1002                    (slice_param->slice_type == SLICE_TYPE_SI) ||
1003                    (slice_param->slice_type == SLICE_TYPE_P) ||
1004                    (slice_param->slice_type == SLICE_TYPE_SP) ||
1005                    (slice_param->slice_type == SLICE_TYPE_B));
1006
1007             if (i < decode_state->slice_params[j]->num_elements - 1)
1008                 next_slice_param = slice_param + 1;
1009             else
1010                 next_slice_param = next_slice_group_param;
1011
1012             gen7_mfd_avc_directmode_state(ctx, pic_param, slice_param, gen7_mfd_context);
1013             gen7_mfd_avc_ref_idx_state(ctx, pic_param, slice_param, gen7_mfd_context);
1014             gen7_mfd_avc_weightoffset_state(ctx, pic_param, slice_param, gen7_mfd_context);
1015             gen7_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
1016             gen7_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo, next_slice_param, gen7_mfd_context);
1017             slice_param++;
1018         }
1019     }
1020
1021     intel_batchbuffer_end_atomic(batch);
1022     intel_batchbuffer_flush(batch);
1023 }
1024
1025 static void
1026 gen7_mfd_mpeg2_decode_init(VADriverContextP ctx,
1027                            struct decode_state *decode_state,
1028                            struct gen7_mfd_context *gen7_mfd_context)
1029 {
1030     VAPictureParameterBufferMPEG2 *pic_param;
1031     struct i965_driver_data *i965 = i965_driver_data(ctx);
1032     struct object_surface *obj_surface;
1033     dri_bo *bo;
1034     unsigned int width_in_mbs;
1035
1036     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1037     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1038     width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1039
1040     mpeg2_set_reference_surfaces(
1041         ctx,
1042         gen7_mfd_context->reference_surface,
1043         decode_state,
1044         pic_param
1045     );
1046
1047     /* Current decoded picture */
1048     obj_surface = SURFACE(decode_state->current_render_target);
1049     assert(obj_surface);
1050     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
1051
1052     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1053     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1054     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1055     gen7_mfd_context->pre_deblocking_output.valid = 1;
1056
1057     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1058     bo = dri_bo_alloc(i965->intel.bufmgr,
1059                       "bsd mpc row store",
1060                       width_in_mbs * 96,
1061                       0x1000);
1062     assert(bo);
1063     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1064     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1065
1066     gen7_mfd_context->post_deblocking_output.valid = 0;
1067     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
1068     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
1069     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1070     gen7_mfd_context->bitplane_read_buffer.valid = 0;
1071 }
1072
1073 static void
1074 gen7_mfd_mpeg2_pic_state(VADriverContextP ctx,
1075                          struct decode_state *decode_state,
1076                          struct gen7_mfd_context *gen7_mfd_context)
1077 {
1078     struct i965_driver_data * const i965 = i965_driver_data(ctx);
1079     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1080     VAPictureParameterBufferMPEG2 *pic_param;
1081     unsigned int slice_concealment_disable_bit = 0;
1082
1083     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1084     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1085
1086     if (IS_HASWELL(i965->intel.device_id)) {
1087         /* XXX: disable concealment for now */
1088         slice_concealment_disable_bit = 1;
1089     }
1090
1091     BEGIN_BCS_BATCH(batch, 13);
1092     OUT_BCS_BATCH(batch, MFX_MPEG2_PIC_STATE | (13 - 2));
1093     OUT_BCS_BATCH(batch,
1094                   (pic_param->f_code & 0xf) << 28 | /* f_code[1][1] */
1095                   ((pic_param->f_code >> 4) & 0xf) << 24 | /* f_code[1][0] */
1096                   ((pic_param->f_code >> 8) & 0xf) << 20 | /* f_code[0][1] */
1097                   ((pic_param->f_code >> 12) & 0xf) << 16 | /* f_code[0][0] */
1098                   pic_param->picture_coding_extension.bits.intra_dc_precision << 14 |
1099                   pic_param->picture_coding_extension.bits.picture_structure << 12 |
1100                   pic_param->picture_coding_extension.bits.top_field_first << 11 |
1101                   pic_param->picture_coding_extension.bits.frame_pred_frame_dct << 10 |
1102                   pic_param->picture_coding_extension.bits.concealment_motion_vectors << 9 |
1103                   pic_param->picture_coding_extension.bits.q_scale_type << 8 |
1104                   pic_param->picture_coding_extension.bits.intra_vlc_format << 7 | 
1105                   pic_param->picture_coding_extension.bits.alternate_scan << 6);
1106     OUT_BCS_BATCH(batch,
1107                   pic_param->picture_coding_type << 9);
1108     OUT_BCS_BATCH(batch,
1109                   (slice_concealment_disable_bit << 31) |
1110                   ((ALIGN(pic_param->vertical_size, 16) / 16) - 1) << 16 |
1111                   ((ALIGN(pic_param->horizontal_size, 16) / 16) - 1));
1112     OUT_BCS_BATCH(batch, 0);
1113     OUT_BCS_BATCH(batch, 0);
1114     OUT_BCS_BATCH(batch, 0);
1115     OUT_BCS_BATCH(batch, 0);
1116     OUT_BCS_BATCH(batch, 0);
1117     OUT_BCS_BATCH(batch, 0);
1118     OUT_BCS_BATCH(batch, 0);
1119     OUT_BCS_BATCH(batch, 0);
1120     OUT_BCS_BATCH(batch, 0);
1121     ADVANCE_BCS_BATCH(batch);
1122 }
1123
1124 static void
1125 gen7_mfd_mpeg2_qm_state(VADriverContextP ctx,
1126                         struct decode_state *decode_state,
1127                         struct gen7_mfd_context *gen7_mfd_context)
1128 {
1129     VAIQMatrixBufferMPEG2 * const gen_iq_matrix = &gen7_mfd_context->iq_matrix.mpeg2;
1130     int i, j;
1131
1132     /* Update internal QM state */
1133     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer) {
1134         VAIQMatrixBufferMPEG2 * const iq_matrix =
1135             (VAIQMatrixBufferMPEG2 *)decode_state->iq_matrix->buffer;
1136
1137         if (gen_iq_matrix->load_intra_quantiser_matrix == -1 ||
1138             iq_matrix->load_intra_quantiser_matrix) {
1139             gen_iq_matrix->load_intra_quantiser_matrix =
1140                 iq_matrix->load_intra_quantiser_matrix;
1141             if (iq_matrix->load_intra_quantiser_matrix) {
1142                 for (j = 0; j < 64; j++)
1143                     gen_iq_matrix->intra_quantiser_matrix[zigzag_direct[j]] =
1144                         iq_matrix->intra_quantiser_matrix[j];
1145             }
1146         }
1147
1148         if (gen_iq_matrix->load_non_intra_quantiser_matrix == -1 ||
1149             iq_matrix->load_non_intra_quantiser_matrix) {
1150             gen_iq_matrix->load_non_intra_quantiser_matrix =
1151                 iq_matrix->load_non_intra_quantiser_matrix;
1152             if (iq_matrix->load_non_intra_quantiser_matrix) {
1153                 for (j = 0; j < 64; j++)
1154                     gen_iq_matrix->non_intra_quantiser_matrix[zigzag_direct[j]] =
1155                         iq_matrix->non_intra_quantiser_matrix[j];
1156             }
1157         }
1158     }
1159
1160     /* Commit QM state to HW */
1161     for (i = 0; i < 2; i++) {
1162         unsigned char *qm = NULL;
1163         int qm_type;
1164
1165         if (i == 0) {
1166             if (gen_iq_matrix->load_intra_quantiser_matrix) {
1167                 qm = gen_iq_matrix->intra_quantiser_matrix;
1168                 qm_type = MFX_QM_MPEG_INTRA_QUANTIZER_MATRIX;
1169             }
1170         } else {
1171             if (gen_iq_matrix->load_non_intra_quantiser_matrix) {
1172                 qm = gen_iq_matrix->non_intra_quantiser_matrix;
1173                 qm_type = MFX_QM_MPEG_NON_INTRA_QUANTIZER_MATRIX;
1174             }
1175         }
1176
1177         if (!qm)
1178             continue;
1179
1180         gen7_mfd_qm_state(ctx, qm_type, qm, 64, gen7_mfd_context);
1181     }
1182 }
1183
1184 static void
1185 gen7_mfd_mpeg2_bsd_object(VADriverContextP ctx,
1186                           VAPictureParameterBufferMPEG2 *pic_param,
1187                           VASliceParameterBufferMPEG2 *slice_param,
1188                           VASliceParameterBufferMPEG2 *next_slice_param,
1189                           struct gen7_mfd_context *gen7_mfd_context)
1190 {
1191     struct i965_driver_data * const i965 = i965_driver_data(ctx);
1192     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1193     unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1194     int mb_count, vpos0, hpos0, vpos1, hpos1, is_field_pic_wa, is_field_pic = 0;
1195
1196     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_TOP_FIELD ||
1197         pic_param->picture_coding_extension.bits.picture_structure == MPEG_BOTTOM_FIELD)
1198         is_field_pic = 1;
1199     is_field_pic_wa = is_field_pic &&
1200         gen7_mfd_context->wa_mpeg2_slice_vertical_position > 0;
1201
1202     vpos0 = slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1203     hpos0 = slice_param->slice_horizontal_position;
1204
1205     if (next_slice_param == NULL) {
1206         vpos1 = ALIGN(pic_param->vertical_size, 16) / 16 / (1 + is_field_pic);
1207         hpos1 = 0;
1208     } else {
1209         vpos1 = next_slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1210         hpos1 = next_slice_param->slice_horizontal_position;
1211     }
1212
1213     mb_count = (vpos1 * width_in_mbs + hpos1) - (vpos0 * width_in_mbs + hpos0);
1214
1215     BEGIN_BCS_BATCH(batch, 5);
1216     OUT_BCS_BATCH(batch, MFD_MPEG2_BSD_OBJECT | (5 - 2));
1217     OUT_BCS_BATCH(batch, 
1218                   slice_param->slice_data_size - (slice_param->macroblock_offset >> 3));
1219     OUT_BCS_BATCH(batch, 
1220                   slice_param->slice_data_offset + (slice_param->macroblock_offset >> 3));
1221     OUT_BCS_BATCH(batch,
1222                   hpos0 << 24 |
1223                   vpos0 << 16 |
1224                   mb_count << 8 |
1225                   (next_slice_param == NULL) << 5 |
1226                   (next_slice_param == NULL) << 3 |
1227                   (slice_param->macroblock_offset & 0x7));
1228     OUT_BCS_BATCH(batch,
1229                   (slice_param->quantiser_scale_code << 24) |
1230                   (IS_HASWELL(i965->intel.device_id) ? (vpos1 << 8 | hpos1) : 0));
1231     ADVANCE_BCS_BATCH(batch);
1232 }
1233
1234 static void
1235 gen7_mfd_mpeg2_decode_picture(VADriverContextP ctx,
1236                               struct decode_state *decode_state,
1237                               struct gen7_mfd_context *gen7_mfd_context)
1238 {
1239     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1240     VAPictureParameterBufferMPEG2 *pic_param;
1241     VASliceParameterBufferMPEG2 *slice_param, *next_slice_param, *next_slice_group_param;
1242     dri_bo *slice_data_bo;
1243     int i, j;
1244
1245     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1246     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1247
1248     gen7_mfd_mpeg2_decode_init(ctx, decode_state, gen7_mfd_context);
1249     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1250     intel_batchbuffer_emit_mi_flush(batch);
1251     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1252     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1253     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1254     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1255     gen7_mfd_mpeg2_pic_state(ctx, decode_state, gen7_mfd_context);
1256     gen7_mfd_mpeg2_qm_state(ctx, decode_state, gen7_mfd_context);
1257
1258     if (gen7_mfd_context->wa_mpeg2_slice_vertical_position < 0)
1259         gen7_mfd_context->wa_mpeg2_slice_vertical_position =
1260             mpeg2_wa_slice_vertical_position(decode_state, pic_param);
1261
1262     for (j = 0; j < decode_state->num_slice_params; j++) {
1263         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1264         slice_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer;
1265         slice_data_bo = decode_state->slice_datas[j]->bo;
1266         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2, gen7_mfd_context);
1267
1268         if (j == decode_state->num_slice_params - 1)
1269             next_slice_group_param = NULL;
1270         else
1271             next_slice_group_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j + 1]->buffer;
1272
1273         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1274             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1275
1276             if (i < decode_state->slice_params[j]->num_elements - 1)
1277                 next_slice_param = slice_param + 1;
1278             else
1279                 next_slice_param = next_slice_group_param;
1280
1281             gen7_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
1282             slice_param++;
1283         }
1284     }
1285
1286     intel_batchbuffer_end_atomic(batch);
1287     intel_batchbuffer_flush(batch);
1288 }
1289
1290 static const int va_to_gen7_vc1_pic_type[5] = {
1291     GEN7_VC1_I_PICTURE,
1292     GEN7_VC1_P_PICTURE,
1293     GEN7_VC1_B_PICTURE,
1294     GEN7_VC1_BI_PICTURE,
1295     GEN7_VC1_P_PICTURE,
1296 };
1297
1298 static const int va_to_gen7_vc1_mv[4] = {
1299     1, /* 1-MV */
1300     2, /* 1-MV half-pel */
1301     3, /* 1-MV half-pef bilinear */
1302     0, /* Mixed MV */
1303 };
1304
1305 static const int b_picture_scale_factor[21] = {
1306     128, 85,  170, 64,  192,
1307     51,  102, 153, 204, 43,
1308     215, 37,  74,  111, 148,
1309     185, 222, 32,  96,  160, 
1310     224,
1311 };
1312
1313 static const int va_to_gen7_vc1_condover[3] = {
1314     0,
1315     2,
1316     3
1317 };
1318
1319 static const int va_to_gen7_vc1_profile[4] = {
1320     GEN7_VC1_SIMPLE_PROFILE,
1321     GEN7_VC1_MAIN_PROFILE,
1322     GEN7_VC1_RESERVED_PROFILE,
1323     GEN7_VC1_ADVANCED_PROFILE
1324 };
1325
1326 static void 
1327 gen7_mfd_free_vc1_surface(void **data)
1328 {
1329     struct gen7_vc1_surface *gen7_vc1_surface = *data;
1330
1331     if (!gen7_vc1_surface)
1332         return;
1333
1334     dri_bo_unreference(gen7_vc1_surface->dmv);
1335     free(gen7_vc1_surface);
1336     *data = NULL;
1337 }
1338
1339 static void
1340 gen7_mfd_init_vc1_surface(VADriverContextP ctx, 
1341                           VAPictureParameterBufferVC1 *pic_param,
1342                           struct object_surface *obj_surface)
1343 {
1344     struct i965_driver_data *i965 = i965_driver_data(ctx);
1345     struct gen7_vc1_surface *gen7_vc1_surface = obj_surface->private_data;
1346     int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1347     int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1348
1349     obj_surface->free_private_data = gen7_mfd_free_vc1_surface;
1350
1351     if (!gen7_vc1_surface) {
1352         gen7_vc1_surface = calloc(sizeof(struct gen7_vc1_surface), 1);
1353         assert((obj_surface->size & 0x3f) == 0);
1354         obj_surface->private_data = gen7_vc1_surface;
1355     }
1356
1357     gen7_vc1_surface->picture_type = pic_param->picture_fields.bits.picture_type;
1358
1359     if (gen7_vc1_surface->dmv == NULL) {
1360         gen7_vc1_surface->dmv = dri_bo_alloc(i965->intel.bufmgr,
1361                                              "direct mv w/r buffer",
1362                                              width_in_mbs * height_in_mbs * 64,
1363                                              0x1000);
1364     }
1365 }
1366
1367 static void
1368 gen7_mfd_vc1_decode_init(VADriverContextP ctx,
1369                          struct decode_state *decode_state,
1370                          struct gen7_mfd_context *gen7_mfd_context)
1371 {
1372     VAPictureParameterBufferVC1 *pic_param;
1373     struct i965_driver_data *i965 = i965_driver_data(ctx);
1374     struct object_surface *obj_surface;
1375     int i;
1376     dri_bo *bo;
1377     int width_in_mbs;
1378     int picture_type;
1379  
1380     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1381     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1382     width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1383     picture_type = pic_param->picture_fields.bits.picture_type;
1384  
1385     /* reference picture */
1386     obj_surface = SURFACE(pic_param->forward_reference_picture);
1387
1388     if (obj_surface && obj_surface->bo)
1389         gen7_mfd_context->reference_surface[0].surface_id = pic_param->forward_reference_picture;
1390     else
1391         gen7_mfd_context->reference_surface[0].surface_id = VA_INVALID_ID;
1392
1393     obj_surface = SURFACE(pic_param->backward_reference_picture);
1394
1395     if (obj_surface && obj_surface->bo)
1396         gen7_mfd_context->reference_surface[1].surface_id = pic_param->backward_reference_picture;
1397     else
1398         gen7_mfd_context->reference_surface[1].surface_id = pic_param->forward_reference_picture;
1399
1400     /* must do so !!! */
1401     for (i = 2; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++)
1402         gen7_mfd_context->reference_surface[i].surface_id = gen7_mfd_context->reference_surface[i % 2].surface_id;
1403
1404     /* Current decoded picture */
1405     obj_surface = SURFACE(decode_state->current_render_target);
1406     assert(obj_surface);
1407     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
1408     gen7_mfd_init_vc1_surface(ctx, pic_param, obj_surface);
1409
1410     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
1411     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
1412     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
1413     gen7_mfd_context->post_deblocking_output.valid = pic_param->entrypoint_fields.bits.loopfilter;
1414
1415     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1416     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1417     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1418     gen7_mfd_context->pre_deblocking_output.valid = !pic_param->entrypoint_fields.bits.loopfilter;
1419
1420     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
1421     bo = dri_bo_alloc(i965->intel.bufmgr,
1422                       "intra row store",
1423                       width_in_mbs * 64,
1424                       0x1000);
1425     assert(bo);
1426     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
1427     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
1428
1429     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
1430     bo = dri_bo_alloc(i965->intel.bufmgr,
1431                       "deblocking filter row store",
1432                       width_in_mbs * 6 * 64,
1433                       0x1000);
1434     assert(bo);
1435     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
1436     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
1437
1438     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1439     bo = dri_bo_alloc(i965->intel.bufmgr,
1440                       "bsd mpc row store",
1441                       width_in_mbs * 96,
1442                       0x1000);
1443     assert(bo);
1444     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1445     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1446
1447     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1448
1449     gen7_mfd_context->bitplane_read_buffer.valid = !!pic_param->bitplane_present.value;
1450     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
1451     
1452     if (gen7_mfd_context->bitplane_read_buffer.valid) {
1453         int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1454         int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1455         int bitplane_width = ALIGN(width_in_mbs, 2) / 2;
1456         int src_w, src_h;
1457         uint8_t *src = NULL, *dst = NULL;
1458
1459         assert(decode_state->bit_plane->buffer);
1460         src = decode_state->bit_plane->buffer;
1461
1462         bo = dri_bo_alloc(i965->intel.bufmgr,
1463                           "VC-1 Bitplane",
1464                           bitplane_width * height_in_mbs,
1465                           0x1000);
1466         assert(bo);
1467         gen7_mfd_context->bitplane_read_buffer.bo = bo;
1468
1469         dri_bo_map(bo, True);
1470         assert(bo->virtual);
1471         dst = bo->virtual;
1472
1473         for (src_h = 0; src_h < height_in_mbs; src_h++) {
1474             for(src_w = 0; src_w < width_in_mbs; src_w++) {
1475                 int src_index, dst_index;
1476                 int src_shift;
1477                 uint8_t src_value;
1478
1479                 src_index = (src_h * width_in_mbs + src_w) / 2;
1480                 src_shift = !((src_h * width_in_mbs + src_w) & 1) * 4;
1481                 src_value = ((src[src_index] >> src_shift) & 0xf);
1482
1483                 if (picture_type == GEN7_VC1_SKIPPED_PICTURE){
1484                     src_value |= 0x2;
1485                 }
1486
1487                 dst_index = src_w / 2;
1488                 dst[dst_index] = ((dst[dst_index] >> 4) | (src_value << 4));
1489             }
1490
1491             if (src_w & 1)
1492                 dst[src_w / 2] >>= 4;
1493
1494             dst += bitplane_width;
1495         }
1496
1497         dri_bo_unmap(bo);
1498     } else
1499         gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1500 }
1501
1502 static void
1503 gen7_mfd_vc1_pic_state(VADriverContextP ctx,
1504                        struct decode_state *decode_state,
1505                        struct gen7_mfd_context *gen7_mfd_context)
1506 {
1507     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1508     VAPictureParameterBufferVC1 *pic_param;
1509     struct i965_driver_data *i965 = i965_driver_data(ctx);
1510     struct object_surface *obj_surface;
1511     int alt_pquant_config = 0, alt_pquant_edge_mask = 0, alt_pq;
1512     int dquant, dquantfrm, dqprofile, dqdbedge, dqsbedge, dqbilevel;
1513     int unified_mv_mode;
1514     int ref_field_pic_polarity = 0;
1515     int scale_factor = 0;
1516     int trans_ac_y = 0;
1517     int dmv_surface_valid = 0;
1518     int brfd = 0;
1519     int fcm = 0;
1520     int picture_type;
1521     int profile;
1522     int overlap;
1523     int interpolation_mode = 0;
1524
1525     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1526     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1527
1528     profile = va_to_gen7_vc1_profile[pic_param->sequence_fields.bits.profile];
1529     dquant = pic_param->pic_quantizer_fields.bits.dquant;
1530     dquantfrm = pic_param->pic_quantizer_fields.bits.dq_frame;
1531     dqprofile = pic_param->pic_quantizer_fields.bits.dq_profile;
1532     dqdbedge = pic_param->pic_quantizer_fields.bits.dq_db_edge;
1533     dqsbedge = pic_param->pic_quantizer_fields.bits.dq_sb_edge;
1534     dqbilevel = pic_param->pic_quantizer_fields.bits.dq_binary_level;
1535     alt_pq = pic_param->pic_quantizer_fields.bits.alt_pic_quantizer;
1536
1537     if (dquant == 0) {
1538         alt_pquant_config = 0;
1539         alt_pquant_edge_mask = 0;
1540     } else if (dquant == 2) {
1541         alt_pquant_config = 1;
1542         alt_pquant_edge_mask = 0xf;
1543     } else {
1544         assert(dquant == 1);
1545         if (dquantfrm == 0) {
1546             alt_pquant_config = 0;
1547             alt_pquant_edge_mask = 0;
1548             alt_pq = 0;
1549         } else {
1550             assert(dquantfrm == 1);
1551             alt_pquant_config = 1;
1552
1553             switch (dqprofile) {
1554             case 3:
1555                 if (dqbilevel == 0) {
1556                     alt_pquant_config = 2;
1557                     alt_pquant_edge_mask = 0;
1558                 } else {
1559                     assert(dqbilevel == 1);
1560                     alt_pquant_config = 3;
1561                     alt_pquant_edge_mask = 0;
1562                 }
1563                 break;
1564                 
1565             case 0:
1566                 alt_pquant_edge_mask = 0xf;
1567                 break;
1568
1569             case 1:
1570                 if (dqdbedge == 3)
1571                     alt_pquant_edge_mask = 0x9;
1572                 else
1573                     alt_pquant_edge_mask = (0x3 << dqdbedge);
1574
1575                 break;
1576
1577             case 2:
1578                 alt_pquant_edge_mask = (0x1 << dqsbedge);
1579                 break;
1580
1581             default:
1582                 assert(0);
1583             }
1584         }
1585     }
1586
1587     if (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation) {
1588         assert(pic_param->mv_fields.bits.mv_mode2 < 4);
1589         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode2];
1590     } else {
1591         assert(pic_param->mv_fields.bits.mv_mode < 4);
1592         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode];
1593     }
1594
1595     if (pic_param->sequence_fields.bits.interlace == 1 &&
1596         pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1597         /* FIXME: calculate reference field picture polarity */
1598         assert(0);
1599         ref_field_pic_polarity = 0;
1600     }
1601
1602     if (pic_param->b_picture_fraction < 21)
1603         scale_factor = b_picture_scale_factor[pic_param->b_picture_fraction];
1604
1605     picture_type = va_to_gen7_vc1_pic_type[pic_param->picture_fields.bits.picture_type];
1606     
1607     if (profile == GEN7_VC1_ADVANCED_PROFILE && 
1608         picture_type == GEN7_VC1_I_PICTURE)
1609         picture_type = GEN7_VC1_BI_PICTURE;
1610
1611     if (picture_type == GEN7_VC1_I_PICTURE || picture_type == GEN7_VC1_BI_PICTURE) /* I picture */
1612         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx2;
1613     else {
1614         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx1;
1615         /*
1616          * 8.3.6.2.1 Transform Type Selection
1617          * If variable-sized transform coding is not enabled,
1618          * then the 8x8 transform shall be used for all blocks.
1619          * it is also MFX_VC1_PIC_STATE requirement.
1620          */
1621         if (pic_param->transform_fields.bits.variable_sized_transform_flag == 0) {
1622             pic_param->transform_fields.bits.mb_level_transform_type_flag   = 1;
1623             pic_param->transform_fields.bits.frame_level_transform_type     = 0;
1624         }
1625     }
1626
1627
1628     if (picture_type == GEN7_VC1_B_PICTURE) {
1629         struct gen7_vc1_surface *gen7_vc1_surface = NULL;
1630
1631         obj_surface = SURFACE(pic_param->backward_reference_picture);
1632         assert(obj_surface);
1633         gen7_vc1_surface = obj_surface->private_data;
1634
1635         if (!gen7_vc1_surface || 
1636             (va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_I_PICTURE ||
1637              va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_BI_PICTURE))
1638             dmv_surface_valid = 0;
1639         else
1640             dmv_surface_valid = 1;
1641     }
1642
1643     assert(pic_param->picture_fields.bits.frame_coding_mode < 3);
1644
1645     if (pic_param->picture_fields.bits.frame_coding_mode < 2)
1646         fcm = pic_param->picture_fields.bits.frame_coding_mode;
1647     else {
1648         if (pic_param->picture_fields.bits.top_field_first)
1649             fcm = 2;
1650         else
1651             fcm = 3;
1652     }
1653
1654     if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_B_PICTURE) { /* B picture */
1655         brfd = pic_param->reference_fields.bits.reference_distance;
1656         brfd = (scale_factor * brfd) >> 8;
1657         brfd = pic_param->reference_fields.bits.reference_distance - brfd - 1;
1658
1659         if (brfd < 0)
1660             brfd = 0;
1661     }
1662
1663     overlap = 0;
1664     if (profile != GEN7_VC1_ADVANCED_PROFILE){
1665        if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1666            overlap = 1; 
1667         }
1668     }else {
1669         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_P_PICTURE &&
1670              pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1671               overlap = 1; 
1672         }
1673         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_I_PICTURE ||
1674             pic_param->picture_fields.bits.picture_type == GEN7_VC1_BI_PICTURE){
1675              if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1676                 overlap = 1; 
1677              }else if(pic_param->conditional_overlap_flag == 2 ||
1678                       pic_param->conditional_overlap_flag == 3){
1679                 overlap = 1;
1680              }
1681          }
1682     } 
1683
1684     assert(pic_param->conditional_overlap_flag < 3);
1685     assert(pic_param->mv_fields.bits.mv_table < 4); /* FIXME: interlace mode */
1686
1687     if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPelBilinear ||
1688         (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1689          pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPelBilinear))
1690         interpolation_mode = 9; /* Half-pel bilinear */
1691     else if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPel ||
1692              (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1693               pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPel))
1694         interpolation_mode = 1; /* Half-pel bicubic */
1695     else
1696         interpolation_mode = 0; /* Quarter-pel bicubic */
1697
1698     BEGIN_BCS_BATCH(batch, 6);
1699     OUT_BCS_BATCH(batch, MFD_VC1_LONG_PIC_STATE | (6 - 2));
1700     OUT_BCS_BATCH(batch,
1701                   (((ALIGN(pic_param->coded_height, 16) / 16) - 1) << 16) |
1702                   ((ALIGN(pic_param->coded_width, 16) / 16) - 1));
1703     OUT_BCS_BATCH(batch,
1704                   ((ALIGN(pic_param->coded_width, 16) / 16 + 1) / 2 - 1) << 24 |
1705                   dmv_surface_valid << 15 |
1706                   (pic_param->pic_quantizer_fields.bits.quantizer == 0) << 14 | /* implicit quantizer */
1707                   pic_param->rounding_control << 13 |
1708                   pic_param->sequence_fields.bits.syncmarker << 12 |
1709                   interpolation_mode << 8 |
1710                   0 << 7 | /* FIXME: scale up or down ??? */
1711                   pic_param->range_reduction_frame << 6 |
1712                   pic_param->entrypoint_fields.bits.loopfilter << 5 |
1713                   overlap << 4 |
1714                   !pic_param->picture_fields.bits.is_first_field << 3 |
1715                   (pic_param->sequence_fields.bits.profile == 3) << 0);
1716     OUT_BCS_BATCH(batch,
1717                   va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] << 29 |
1718                   picture_type << 26 |
1719                   fcm << 24 |
1720                   alt_pq << 16 |
1721                   pic_param->pic_quantizer_fields.bits.pic_quantizer_scale << 8 |
1722                   scale_factor << 0);
1723     OUT_BCS_BATCH(batch,
1724                   unified_mv_mode << 28 |
1725                   pic_param->mv_fields.bits.four_mv_switch << 27 |
1726                   pic_param->fast_uvmc_flag << 26 |
1727                   ref_field_pic_polarity << 25 |
1728                   pic_param->reference_fields.bits.num_reference_pictures << 24 |
1729                   pic_param->reference_fields.bits.reference_distance << 20 |
1730                   pic_param->reference_fields.bits.reference_distance << 16 | /* FIXME: ??? */
1731                   pic_param->mv_fields.bits.extended_dmv_range << 10 |
1732                   pic_param->mv_fields.bits.extended_mv_range << 8 |
1733                   alt_pquant_edge_mask << 4 |
1734                   alt_pquant_config << 2 |
1735                   pic_param->pic_quantizer_fields.bits.half_qp << 1 |                  
1736                   pic_param->pic_quantizer_fields.bits.pic_quantizer_type << 0);
1737     OUT_BCS_BATCH(batch,
1738                   !!pic_param->bitplane_present.value << 31 |
1739                   !pic_param->bitplane_present.flags.bp_forward_mb << 30 |
1740                   !pic_param->bitplane_present.flags.bp_mv_type_mb << 29 |
1741                   !pic_param->bitplane_present.flags.bp_skip_mb << 28 |
1742                   !pic_param->bitplane_present.flags.bp_direct_mb << 27 |
1743                   !pic_param->bitplane_present.flags.bp_overflags << 26 |
1744                   !pic_param->bitplane_present.flags.bp_ac_pred << 25 |
1745                   !pic_param->bitplane_present.flags.bp_field_tx << 24 |
1746                   pic_param->mv_fields.bits.mv_table << 20 |
1747                   pic_param->mv_fields.bits.four_mv_block_pattern_table << 18 |
1748                   pic_param->mv_fields.bits.two_mv_block_pattern_table << 16 |
1749                   pic_param->transform_fields.bits.frame_level_transform_type << 12 |                  
1750                   pic_param->transform_fields.bits.mb_level_transform_type_flag << 11 |
1751                   pic_param->mb_mode_table << 8 |
1752                   trans_ac_y << 6 |
1753                   pic_param->transform_fields.bits.transform_ac_codingset_idx1 << 4 |
1754                   pic_param->transform_fields.bits.intra_transform_dc_table << 3 |
1755                   pic_param->cbp_table << 0);
1756     ADVANCE_BCS_BATCH(batch);
1757 }
1758
1759 static void
1760 gen7_mfd_vc1_pred_pipe_state(VADriverContextP ctx,
1761                              struct decode_state *decode_state,
1762                              struct gen7_mfd_context *gen7_mfd_context)
1763 {
1764     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1765     VAPictureParameterBufferVC1 *pic_param;
1766     int intensitycomp_single;
1767
1768     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1769     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1770
1771     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1772     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1773     intensitycomp_single = (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation);
1774
1775     BEGIN_BCS_BATCH(batch, 6);
1776     OUT_BCS_BATCH(batch, MFX_VC1_PRED_PIPE_STATE | (6 - 2));
1777     OUT_BCS_BATCH(batch,
1778                   0 << 14 | /* FIXME: double ??? */
1779                   0 << 12 |
1780                   intensitycomp_single << 10 |
1781                   intensitycomp_single << 8 |
1782                   0 << 4 | /* FIXME: interlace mode */
1783                   0);
1784     OUT_BCS_BATCH(batch,
1785                   pic_param->luma_shift << 16 |
1786                   pic_param->luma_scale << 0); /* FIXME: Luma Scaling */
1787     OUT_BCS_BATCH(batch, 0);
1788     OUT_BCS_BATCH(batch, 0);
1789     OUT_BCS_BATCH(batch, 0);
1790     ADVANCE_BCS_BATCH(batch);
1791 }
1792
1793
1794 static void
1795 gen7_mfd_vc1_directmode_state(VADriverContextP ctx,
1796                               struct decode_state *decode_state,
1797                               struct gen7_mfd_context *gen7_mfd_context)
1798 {
1799     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1800     VAPictureParameterBufferVC1 *pic_param;
1801     struct i965_driver_data *i965 = i965_driver_data(ctx);
1802     struct object_surface *obj_surface;
1803     dri_bo *dmv_read_buffer = NULL, *dmv_write_buffer = NULL;
1804
1805     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1806     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1807
1808     obj_surface = SURFACE(decode_state->current_render_target);
1809
1810     if (obj_surface && obj_surface->private_data) {
1811         dmv_write_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1812     }
1813
1814     obj_surface = SURFACE(pic_param->backward_reference_picture);
1815
1816     if (obj_surface && obj_surface->private_data) {
1817         dmv_read_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1818     }
1819
1820     BEGIN_BCS_BATCH(batch, 3);
1821     OUT_BCS_BATCH(batch, MFX_VC1_DIRECTMODE_STATE | (3 - 2));
1822
1823     if (dmv_write_buffer)
1824         OUT_BCS_RELOC(batch, dmv_write_buffer,
1825                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
1826                       0);
1827     else
1828         OUT_BCS_BATCH(batch, 0);
1829
1830     if (dmv_read_buffer)
1831         OUT_BCS_RELOC(batch, dmv_read_buffer,
1832                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1833                       0);
1834     else
1835         OUT_BCS_BATCH(batch, 0);
1836                   
1837     ADVANCE_BCS_BATCH(batch);
1838 }
1839
1840 static int
1841 gen7_mfd_vc1_get_macroblock_bit_offset(uint8_t *buf, int in_slice_data_bit_offset, int profile)
1842 {
1843     int out_slice_data_bit_offset;
1844     int slice_header_size = in_slice_data_bit_offset / 8;
1845     int i, j;
1846
1847     if (profile != 3)
1848         out_slice_data_bit_offset = in_slice_data_bit_offset;
1849     else {
1850         for (i = 0, j = 0; i < slice_header_size; i++, j++) {
1851             if (!buf[j] && !buf[j + 1] && buf[j + 2] == 3 && buf[j + 3] < 4) {
1852                 i++, j += 2;
1853             }
1854         }
1855
1856         out_slice_data_bit_offset = 8 * j + in_slice_data_bit_offset % 8;
1857     }
1858
1859     return out_slice_data_bit_offset;
1860 }
1861
1862 static void
1863 gen7_mfd_vc1_bsd_object(VADriverContextP ctx,
1864                         VAPictureParameterBufferVC1 *pic_param,
1865                         VASliceParameterBufferVC1 *slice_param,
1866                         VASliceParameterBufferVC1 *next_slice_param,
1867                         dri_bo *slice_data_bo,
1868                         struct gen7_mfd_context *gen7_mfd_context)
1869 {
1870     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1871     int next_slice_start_vert_pos;
1872     int macroblock_offset;
1873     uint8_t *slice_data = NULL;
1874
1875     dri_bo_map(slice_data_bo, 0);
1876     slice_data = (uint8_t *)(slice_data_bo->virtual + slice_param->slice_data_offset);
1877     macroblock_offset = gen7_mfd_vc1_get_macroblock_bit_offset(slice_data, 
1878                                                                slice_param->macroblock_offset,
1879                                                                pic_param->sequence_fields.bits.profile);
1880     dri_bo_unmap(slice_data_bo);
1881
1882     if (next_slice_param)
1883         next_slice_start_vert_pos = next_slice_param->slice_vertical_position;
1884     else
1885         next_slice_start_vert_pos = ALIGN(pic_param->coded_height, 16) / 16;
1886
1887     BEGIN_BCS_BATCH(batch, 5);
1888     OUT_BCS_BATCH(batch, MFD_VC1_BSD_OBJECT | (5 - 2));
1889     OUT_BCS_BATCH(batch, 
1890                   slice_param->slice_data_size - (macroblock_offset >> 3));
1891     OUT_BCS_BATCH(batch, 
1892                   slice_param->slice_data_offset + (macroblock_offset >> 3));
1893     OUT_BCS_BATCH(batch,
1894                   slice_param->slice_vertical_position << 16 |
1895                   next_slice_start_vert_pos << 0);
1896     OUT_BCS_BATCH(batch,
1897                   (macroblock_offset & 0x7));
1898     ADVANCE_BCS_BATCH(batch);
1899 }
1900
1901 static void
1902 gen7_mfd_vc1_decode_picture(VADriverContextP ctx,
1903                             struct decode_state *decode_state,
1904                             struct gen7_mfd_context *gen7_mfd_context)
1905 {
1906     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1907     VAPictureParameterBufferVC1 *pic_param;
1908     VASliceParameterBufferVC1 *slice_param, *next_slice_param, *next_slice_group_param;
1909     dri_bo *slice_data_bo;
1910     int i, j;
1911
1912     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1913     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1914
1915     gen7_mfd_vc1_decode_init(ctx, decode_state, gen7_mfd_context);
1916     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1917     intel_batchbuffer_emit_mi_flush(batch);
1918     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1919     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1920     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1921     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1922     gen7_mfd_vc1_pic_state(ctx, decode_state, gen7_mfd_context);
1923     gen7_mfd_vc1_pred_pipe_state(ctx, decode_state, gen7_mfd_context);
1924     gen7_mfd_vc1_directmode_state(ctx, decode_state, gen7_mfd_context);
1925
1926     for (j = 0; j < decode_state->num_slice_params; j++) {
1927         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1928         slice_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j]->buffer;
1929         slice_data_bo = decode_state->slice_datas[j]->bo;
1930         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1, gen7_mfd_context);
1931
1932         if (j == decode_state->num_slice_params - 1)
1933             next_slice_group_param = NULL;
1934         else
1935             next_slice_group_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j + 1]->buffer;
1936
1937         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1938             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1939
1940             if (i < decode_state->slice_params[j]->num_elements - 1)
1941                 next_slice_param = slice_param + 1;
1942             else
1943                 next_slice_param = next_slice_group_param;
1944
1945             gen7_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
1946             slice_param++;
1947         }
1948     }
1949
1950     intel_batchbuffer_end_atomic(batch);
1951     intel_batchbuffer_flush(batch);
1952 }
1953
1954 static void
1955 gen7_mfd_jpeg_decode_init(VADriverContextP ctx,
1956                           struct decode_state *decode_state,
1957                           struct gen7_mfd_context *gen7_mfd_context)
1958 {
1959     struct i965_driver_data *i965 = i965_driver_data(ctx);
1960     struct object_surface *obj_surface;
1961     VAPictureParameterBufferJPEGBaseline *pic_param;
1962     int subsampling = SUBSAMPLE_YUV420;
1963
1964     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
1965
1966     if (pic_param->num_components == 1)
1967         subsampling = SUBSAMPLE_YUV400;
1968     else if (pic_param->num_components == 3) {
1969         int h1 = pic_param->components[0].h_sampling_factor;
1970         int h2 = pic_param->components[1].h_sampling_factor;
1971         int h3 = pic_param->components[2].h_sampling_factor;
1972         int v1 = pic_param->components[0].v_sampling_factor;
1973         int v2 = pic_param->components[1].v_sampling_factor;
1974         int v3 = pic_param->components[2].v_sampling_factor;
1975
1976         if (h1 == 2 && h2 == 1 && h3 == 1 &&
1977             v1 == 2 && v2 == 1 && v3 == 1)
1978             subsampling = SUBSAMPLE_YUV420;
1979         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1980                  v1 == 1 && v2 == 1 && v3 == 1)
1981             subsampling = SUBSAMPLE_YUV422H;
1982         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1983                  v1 == 1 && v2 == 1 && v3 == 1)
1984             subsampling = SUBSAMPLE_YUV444;
1985         else if (h1 == 4 && h2 == 1 && h3 == 1 &&
1986                  v1 == 1 && v2 == 1 && v3 == 1)
1987             subsampling = SUBSAMPLE_YUV411;
1988         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1989                  v1 == 2 && v2 == 1 && v3 == 1)
1990             subsampling = SUBSAMPLE_YUV422V;
1991         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1992                  v1 == 2 && v2 == 2 && v3 == 2)
1993             subsampling = SUBSAMPLE_YUV422H;
1994         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
1995                  v1 == 2 && v2 == 1 && v3 == 1)
1996             subsampling = SUBSAMPLE_YUV422V;
1997         else
1998             assert(0);
1999     } else {
2000         assert(0);
2001     }
2002
2003     /* Current decoded picture */
2004     obj_surface = SURFACE(decode_state->current_render_target);
2005     assert(obj_surface);
2006     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('I','M','C','1'), subsampling);
2007
2008     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
2009     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
2010     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
2011     gen7_mfd_context->pre_deblocking_output.valid = 1;
2012
2013     gen7_mfd_context->post_deblocking_output.bo = NULL;
2014     gen7_mfd_context->post_deblocking_output.valid = 0;
2015
2016     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
2017     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
2018
2019     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
2020     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
2021
2022     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
2023     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 0;
2024
2025     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
2026     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
2027
2028     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
2029     gen7_mfd_context->bitplane_read_buffer.valid = 0;
2030 }
2031
2032 static const int va_to_gen7_jpeg_rotation[4] = {
2033     GEN7_JPEG_ROTATION_0,
2034     GEN7_JPEG_ROTATION_90,
2035     GEN7_JPEG_ROTATION_180,
2036     GEN7_JPEG_ROTATION_270
2037 };
2038
2039 static void
2040 gen7_mfd_jpeg_pic_state(VADriverContextP ctx,
2041                         struct decode_state *decode_state,
2042                         struct gen7_mfd_context *gen7_mfd_context)
2043 {
2044     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2045     VAPictureParameterBufferJPEGBaseline *pic_param;
2046     int chroma_type = GEN7_YUV420;
2047     int frame_width_in_blks;
2048     int frame_height_in_blks;
2049
2050     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2051     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2052
2053     if (pic_param->num_components == 1)
2054         chroma_type = GEN7_YUV400;
2055     else if (pic_param->num_components == 3) {
2056         int h1 = pic_param->components[0].h_sampling_factor;
2057         int h2 = pic_param->components[1].h_sampling_factor;
2058         int h3 = pic_param->components[2].h_sampling_factor;
2059         int v1 = pic_param->components[0].v_sampling_factor;
2060         int v2 = pic_param->components[1].v_sampling_factor;
2061         int v3 = pic_param->components[2].v_sampling_factor;
2062
2063         if (h1 == 2 && h2 == 1 && h3 == 1 &&
2064             v1 == 2 && v2 == 1 && v3 == 1)
2065             chroma_type = GEN7_YUV420;
2066         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2067                  v1 == 1 && v2 == 1 && v3 == 1)
2068             chroma_type = GEN7_YUV422H_2Y;
2069         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2070                  v1 == 1 && v2 == 1 && v3 == 1)
2071             chroma_type = GEN7_YUV444;
2072         else if (h1 == 4 && h2 == 1 && h3 == 1 &&
2073                  v1 == 1 && v2 == 1 && v3 == 1)
2074             chroma_type = GEN7_YUV411;
2075         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2076                  v1 == 2 && v2 == 1 && v3 == 1)
2077             chroma_type = GEN7_YUV422V_2Y;
2078         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2079                  v1 == 2 && v2 == 2 && v3 == 2)
2080             chroma_type = GEN7_YUV422H_4Y;
2081         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
2082                  v1 == 2 && v2 == 1 && v3 == 1)
2083             chroma_type = GEN7_YUV422V_4Y;
2084         else
2085             assert(0);
2086     }
2087
2088     if (chroma_type == GEN7_YUV400 ||
2089         chroma_type == GEN7_YUV444 ||
2090         chroma_type == GEN7_YUV422V_2Y) {
2091         frame_width_in_blks = ((pic_param->picture_width + 7) / 8);
2092         frame_height_in_blks = ((pic_param->picture_height + 7) / 8);
2093     } else if (chroma_type == GEN7_YUV411) {
2094         frame_width_in_blks = ((pic_param->picture_width + 31) / 32) * 4;
2095         frame_height_in_blks = ((pic_param->picture_height + 31) / 32) * 4;
2096     } else {
2097         frame_width_in_blks = ((pic_param->picture_width + 15) / 16) * 2;
2098         frame_height_in_blks = ((pic_param->picture_height + 15) / 16) * 2;
2099     }
2100
2101     BEGIN_BCS_BATCH(batch, 3);
2102     OUT_BCS_BATCH(batch, MFX_JPEG_PIC_STATE | (3 - 2));
2103     OUT_BCS_BATCH(batch,
2104                   (va_to_gen7_jpeg_rotation[0] << 4) |    /* without rotation */
2105                   (chroma_type << 0));
2106     OUT_BCS_BATCH(batch,
2107                   ((frame_height_in_blks - 1) << 16) |   /* FrameHeightInBlks */
2108                   ((frame_width_in_blks - 1) << 0));    /* FrameWidthInBlks */
2109     ADVANCE_BCS_BATCH(batch);
2110 }
2111
2112 static const int va_to_gen7_jpeg_hufftable[2] = {
2113     MFX_HUFFTABLE_ID_Y,
2114     MFX_HUFFTABLE_ID_UV
2115 };
2116
2117 static void
2118 gen7_mfd_jpeg_huff_table_state(VADriverContextP ctx,
2119                                struct decode_state *decode_state,
2120                                struct gen7_mfd_context *gen7_mfd_context,
2121                                int num_tables)
2122 {
2123     VAHuffmanTableBufferJPEGBaseline *huffman_table;
2124     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2125     int index;
2126
2127     if (!decode_state->huffman_table || !decode_state->huffman_table->buffer)
2128         return;
2129
2130     huffman_table = (VAHuffmanTableBufferJPEGBaseline *)decode_state->huffman_table->buffer;
2131
2132     for (index = 0; index < num_tables; index++) {
2133         int id = va_to_gen7_jpeg_hufftable[index];
2134         if (!huffman_table->load_huffman_table[index])
2135             continue;
2136         BEGIN_BCS_BATCH(batch, 53);
2137         OUT_BCS_BATCH(batch, MFX_JPEG_HUFF_TABLE_STATE | (53 - 2));
2138         OUT_BCS_BATCH(batch, id);
2139         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_dc_codes, 12);
2140         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].dc_values, 12);
2141         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_ac_codes, 16);
2142         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].ac_values, 164);
2143         ADVANCE_BCS_BATCH(batch);
2144     }
2145 }
2146
2147 static const int va_to_gen7_jpeg_qm[5] = {
2148     -1,
2149     MFX_QM_JPEG_LUMA_Y_QUANTIZER_MATRIX,
2150     MFX_QM_JPEG_CHROMA_CB_QUANTIZER_MATRIX,
2151     MFX_QM_JPEG_CHROMA_CR_QUANTIZER_MATRIX,
2152     MFX_QM_JPEG_ALPHA_QUANTIZER_MATRIX
2153 };
2154
2155 static void
2156 gen7_mfd_jpeg_qm_state(VADriverContextP ctx,
2157                        struct decode_state *decode_state,
2158                        struct gen7_mfd_context *gen7_mfd_context)
2159 {
2160     VAPictureParameterBufferJPEGBaseline *pic_param;
2161     VAIQMatrixBufferJPEGBaseline *iq_matrix;
2162     int index;
2163
2164     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
2165         return;
2166
2167     iq_matrix = (VAIQMatrixBufferJPEGBaseline *)decode_state->iq_matrix->buffer;
2168     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2169
2170     assert(pic_param->num_components <= 3);
2171
2172     for (index = 0; index < pic_param->num_components; index++) {
2173         int qm_type = va_to_gen7_jpeg_qm[pic_param->components[index].component_id - pic_param->components[0].component_id + 1];
2174         unsigned char *qm = iq_matrix->quantiser_table[pic_param->components[index].quantiser_table_selector];
2175         unsigned char raster_qm[64];
2176         int j;
2177
2178         if (!iq_matrix->load_quantiser_table[pic_param->components[index].quantiser_table_selector])
2179             continue;
2180
2181         for (j = 0; j < 64; j++)
2182             raster_qm[zigzag_direct[j]] = qm[j];
2183
2184         gen7_mfd_qm_state(ctx, qm_type, raster_qm, 64, gen7_mfd_context);
2185     }
2186 }
2187
2188 static void
2189 gen7_mfd_jpeg_bsd_object(VADriverContextP ctx,
2190                          VAPictureParameterBufferJPEGBaseline *pic_param,
2191                          VASliceParameterBufferJPEGBaseline *slice_param,
2192                          VASliceParameterBufferJPEGBaseline *next_slice_param,
2193                          dri_bo *slice_data_bo,
2194                          struct gen7_mfd_context *gen7_mfd_context)
2195 {
2196     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2197     int scan_component_mask = 0;
2198     int i;
2199
2200     assert(slice_param->num_components > 0);
2201     assert(slice_param->num_components < 4);
2202     assert(slice_param->num_components <= pic_param->num_components);
2203
2204     for (i = 0; i < slice_param->num_components; i++) {
2205         switch (slice_param->components[i].component_selector - pic_param->components[0].component_id + 1) {
2206         case 1:
2207             scan_component_mask |= (1 << 0);
2208             break;
2209         case 2:
2210             scan_component_mask |= (1 << 1);
2211             break;
2212         case 3:
2213             scan_component_mask |= (1 << 2);
2214             break;
2215         default:
2216             assert(0);
2217             break;
2218         }
2219     }
2220
2221     BEGIN_BCS_BATCH(batch, 6);
2222     OUT_BCS_BATCH(batch, MFD_JPEG_BSD_OBJECT | (6 - 2));
2223     OUT_BCS_BATCH(batch, 
2224                   slice_param->slice_data_size);
2225     OUT_BCS_BATCH(batch, 
2226                   slice_param->slice_data_offset);
2227     OUT_BCS_BATCH(batch,
2228                   slice_param->slice_horizontal_position << 16 |
2229                   slice_param->slice_vertical_position << 0);
2230     OUT_BCS_BATCH(batch,
2231                   ((slice_param->num_components != 1) << 30) |  /* interleaved */
2232                   (scan_component_mask << 27) |                 /* scan components */
2233                   (0 << 26) |   /* disable interrupt allowed */
2234                   (slice_param->num_mcus << 0));                /* MCU count */
2235     OUT_BCS_BATCH(batch,
2236                   (slice_param->restart_interval << 0));    /* RestartInterval */
2237     ADVANCE_BCS_BATCH(batch);
2238 }
2239
2240 /* Workaround for JPEG decoding on Ivybridge */
2241
2242 VAStatus 
2243 i965_DestroySurfaces(VADriverContextP ctx,
2244                      VASurfaceID *surface_list,
2245                      int num_surfaces);
2246 VAStatus 
2247 i965_CreateSurfaces(VADriverContextP ctx,
2248                     int width,
2249                     int height,
2250                     int format,
2251                     int num_surfaces,
2252                     VASurfaceID *surfaces);
2253
2254 static struct {
2255     int width;
2256     int height;
2257     unsigned char data[32];
2258     int data_size;
2259     int data_bit_offset;
2260     int qp;
2261 } gen7_jpeg_wa_clip = {
2262     16,
2263     16,
2264     {
2265         0x65, 0xb8, 0x40, 0x32, 0x13, 0xfd, 0x06, 0x6c,
2266         0xfc, 0x0a, 0x50, 0x71, 0x5c, 0x00
2267     },
2268     14,
2269     40,
2270     28,
2271 };
2272
2273 static void
2274 gen7_jpeg_wa_init(VADriverContextP ctx,
2275                   struct gen7_mfd_context *gen7_mfd_context)
2276 {
2277     struct i965_driver_data *i965 = i965_driver_data(ctx);
2278     VAStatus status;
2279     struct object_surface *obj_surface;
2280
2281     if (gen7_mfd_context->jpeg_wa_surface_id != VA_INVALID_SURFACE)
2282         i965_DestroySurfaces(ctx,
2283                              &gen7_mfd_context->jpeg_wa_surface_id,
2284                              1);
2285
2286     status = i965_CreateSurfaces(ctx,
2287                                  gen7_jpeg_wa_clip.width,
2288                                  gen7_jpeg_wa_clip.height,
2289                                  VA_RT_FORMAT_YUV420,
2290                                  1,
2291                                  &gen7_mfd_context->jpeg_wa_surface_id);
2292     assert(status == VA_STATUS_SUCCESS);
2293
2294     obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2295     assert(obj_surface);
2296     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
2297
2298     if (!gen7_mfd_context->jpeg_wa_slice_data_bo) {
2299         gen7_mfd_context->jpeg_wa_slice_data_bo = dri_bo_alloc(i965->intel.bufmgr,
2300                                                                "JPEG WA data",
2301                                                                0x1000,
2302                                                                0x1000);
2303         dri_bo_subdata(gen7_mfd_context->jpeg_wa_slice_data_bo,
2304                        0,
2305                        gen7_jpeg_wa_clip.data_size,
2306                        gen7_jpeg_wa_clip.data);
2307     }
2308 }
2309
2310 static void
2311 gen7_jpeg_wa_pipe_mode_select(VADriverContextP ctx,
2312                               struct gen7_mfd_context *gen7_mfd_context)
2313 {
2314     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2315
2316     BEGIN_BCS_BATCH(batch, 5);
2317     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
2318     OUT_BCS_BATCH(batch,
2319                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
2320                   (MFD_MODE_VLD << 15) | /* VLD mode */
2321                   (0 << 10) | /* disable Stream-Out */
2322                   (0 << 9)  | /* Post Deblocking Output */
2323                   (1 << 8)  | /* Pre Deblocking Output */
2324                   (0 << 5)  | /* not in stitch mode */
2325                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
2326                   (MFX_FORMAT_AVC << 0));
2327     OUT_BCS_BATCH(batch,
2328                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
2329                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
2330                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
2331                   (0 << 1)  |
2332                   (0 << 0));
2333     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
2334     OUT_BCS_BATCH(batch, 0); /* reserved */
2335     ADVANCE_BCS_BATCH(batch);
2336 }
2337
2338 static void
2339 gen7_jpeg_wa_surface_state(VADriverContextP ctx,
2340                            struct gen7_mfd_context *gen7_mfd_context)
2341 {
2342     struct i965_driver_data *i965 = i965_driver_data(ctx);
2343     struct object_surface *obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2344     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2345
2346     BEGIN_BCS_BATCH(batch, 6);
2347     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
2348     OUT_BCS_BATCH(batch, 0);
2349     OUT_BCS_BATCH(batch,
2350                   ((obj_surface->orig_width - 1) << 18) |
2351                   ((obj_surface->orig_height - 1) << 4));
2352     OUT_BCS_BATCH(batch,
2353                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
2354                   (1 << 27) | /* interleave chroma, set to 0 for JPEG */
2355                   (0 << 22) | /* surface object control state, ignored */
2356                   ((obj_surface->width - 1) << 3) | /* pitch */
2357                   (0 << 2)  | /* must be 0 */
2358                   (1 << 1)  | /* must be tiled */
2359                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
2360     OUT_BCS_BATCH(batch,
2361                   (0 << 16) | /* X offset for U(Cb), must be 0 */
2362                   (obj_surface->y_cb_offset << 0)); /* Y offset for U(Cb) */
2363     OUT_BCS_BATCH(batch,
2364                   (0 << 16) | /* X offset for V(Cr), must be 0 */
2365                   (0 << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
2366     ADVANCE_BCS_BATCH(batch);
2367 }
2368
2369 static void
2370 gen7_jpeg_wa_pipe_buf_addr_state(VADriverContextP ctx,
2371                                  struct gen7_mfd_context *gen7_mfd_context)
2372 {
2373     struct i965_driver_data *i965 = i965_driver_data(ctx);
2374     struct object_surface *obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2375     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2376     dri_bo *intra_bo;
2377     int i;
2378
2379     intra_bo = dri_bo_alloc(i965->intel.bufmgr,
2380                             "intra row store",
2381                             128 * 64,
2382                             0x1000);
2383
2384     BEGIN_BCS_BATCH(batch, 24);
2385     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
2386     OUT_BCS_RELOC(batch,
2387                   obj_surface->bo,
2388                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2389                   0);
2390     
2391     OUT_BCS_BATCH(batch, 0); /* post deblocking */
2392
2393     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2394     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2395
2396     OUT_BCS_RELOC(batch,
2397                   intra_bo,
2398                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2399                   0);
2400
2401     OUT_BCS_BATCH(batch, 0);
2402
2403     /* DW 7..22 */
2404     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2405         OUT_BCS_BATCH(batch, 0);
2406     }
2407
2408     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
2409     ADVANCE_BCS_BATCH(batch);
2410
2411     dri_bo_unreference(intra_bo);
2412 }
2413
2414 static void
2415 gen7_jpeg_wa_bsp_buf_base_addr_state(VADriverContextP ctx,
2416                                      struct gen7_mfd_context *gen7_mfd_context)
2417 {
2418     struct i965_driver_data *i965 = i965_driver_data(ctx);
2419     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2420     dri_bo *bsd_mpc_bo, *mpr_bo;
2421
2422     bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
2423                               "bsd mpc row store",
2424                               11520, /* 1.5 * 120 * 64 */
2425                               0x1000);
2426
2427     mpr_bo = dri_bo_alloc(i965->intel.bufmgr,
2428                           "mpr row store",
2429                           7680, /* 1. 0 * 120 * 64 */
2430                           0x1000);
2431
2432     BEGIN_BCS_BATCH(batch, 4);
2433     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
2434
2435     OUT_BCS_RELOC(batch,
2436                   bsd_mpc_bo,
2437                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2438                   0);
2439
2440     OUT_BCS_RELOC(batch,
2441                   mpr_bo,
2442                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2443                   0);
2444     OUT_BCS_BATCH(batch, 0);
2445
2446     ADVANCE_BCS_BATCH(batch);
2447
2448     dri_bo_unreference(bsd_mpc_bo);
2449     dri_bo_unreference(mpr_bo);
2450 }
2451
2452 static void
2453 gen7_jpeg_wa_avc_qm_state(VADriverContextP ctx,
2454                           struct gen7_mfd_context *gen7_mfd_context)
2455 {
2456
2457 }
2458
2459 static void
2460 gen7_jpeg_wa_avc_img_state(VADriverContextP ctx,
2461                            struct gen7_mfd_context *gen7_mfd_context)
2462 {
2463     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2464     int img_struct = 0;
2465     int mbaff_frame_flag = 0;
2466     unsigned int width_in_mbs = 1, height_in_mbs = 1;
2467
2468     BEGIN_BCS_BATCH(batch, 16);
2469     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
2470     OUT_BCS_BATCH(batch, 
2471                   width_in_mbs * height_in_mbs);
2472     OUT_BCS_BATCH(batch, 
2473                   ((height_in_mbs - 1) << 16) | 
2474                   ((width_in_mbs - 1) << 0));
2475     OUT_BCS_BATCH(batch, 
2476                   (0 << 24) |
2477                   (0 << 16) |
2478                   (0 << 14) |
2479                   (0 << 13) |
2480                   (0 << 12) | /* differ from GEN6 */
2481                   (0 << 10) |
2482                   (img_struct << 8));
2483     OUT_BCS_BATCH(batch,
2484                   (1 << 10) | /* 4:2:0 */
2485                   (1 << 7) |  /* CABAC */
2486                   (0 << 6) |
2487                   (0 << 5) |
2488                   (0 << 4) |
2489                   (0 << 3) |
2490                   (1 << 2) |
2491                   (mbaff_frame_flag << 1) |
2492                   (0 << 0));
2493     OUT_BCS_BATCH(batch, 0);
2494     OUT_BCS_BATCH(batch, 0);
2495     OUT_BCS_BATCH(batch, 0);
2496     OUT_BCS_BATCH(batch, 0);
2497     OUT_BCS_BATCH(batch, 0);
2498     OUT_BCS_BATCH(batch, 0);
2499     OUT_BCS_BATCH(batch, 0);
2500     OUT_BCS_BATCH(batch, 0);
2501     OUT_BCS_BATCH(batch, 0);
2502     OUT_BCS_BATCH(batch, 0);
2503     OUT_BCS_BATCH(batch, 0);
2504     ADVANCE_BCS_BATCH(batch);
2505 }
2506
2507 static void
2508 gen7_jpeg_wa_avc_directmode_state(VADriverContextP ctx,
2509                                   struct gen7_mfd_context *gen7_mfd_context)
2510 {
2511     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2512     int i;
2513
2514     BEGIN_BCS_BATCH(batch, 69);
2515     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
2516
2517     /* reference surfaces 0..15 */
2518     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2519         OUT_BCS_BATCH(batch, 0); /* top */
2520         OUT_BCS_BATCH(batch, 0); /* bottom */
2521     }
2522
2523     /* the current decoding frame/field */
2524     OUT_BCS_BATCH(batch, 0); /* top */
2525     OUT_BCS_BATCH(batch, 0); /* bottom */
2526
2527     /* POC List */
2528     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2529         OUT_BCS_BATCH(batch, 0);
2530         OUT_BCS_BATCH(batch, 0);
2531     }
2532
2533     OUT_BCS_BATCH(batch, 0);
2534     OUT_BCS_BATCH(batch, 0);
2535
2536     ADVANCE_BCS_BATCH(batch);
2537 }
2538
2539 static void
2540 gen7_jpeg_wa_ind_obj_base_addr_state(VADriverContextP ctx,
2541                                      struct gen7_mfd_context *gen7_mfd_context)
2542 {
2543     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2544
2545     BEGIN_BCS_BATCH(batch, 11);
2546     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
2547     OUT_BCS_RELOC(batch,
2548                   gen7_mfd_context->jpeg_wa_slice_data_bo,
2549                   I915_GEM_DOMAIN_INSTRUCTION, 0,
2550                   0);
2551     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
2552     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2553     OUT_BCS_BATCH(batch, 0);
2554     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2555     OUT_BCS_BATCH(batch, 0);
2556     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2557     OUT_BCS_BATCH(batch, 0);
2558     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2559     OUT_BCS_BATCH(batch, 0);
2560     ADVANCE_BCS_BATCH(batch);
2561 }
2562
2563 static void
2564 gen7_jpeg_wa_avc_bsd_object(VADriverContextP ctx,
2565                             struct gen7_mfd_context *gen7_mfd_context)
2566 {
2567     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2568
2569     /* the input bitsteam format on GEN7 differs from GEN6 */
2570     BEGIN_BCS_BATCH(batch, 6);
2571     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
2572     OUT_BCS_BATCH(batch, gen7_jpeg_wa_clip.data_size);
2573     OUT_BCS_BATCH(batch, 0);
2574     OUT_BCS_BATCH(batch,
2575                   (0 << 31) |
2576                   (0 << 14) |
2577                   (0 << 12) |
2578                   (0 << 10) |
2579                   (0 << 8));
2580     OUT_BCS_BATCH(batch,
2581                   ((gen7_jpeg_wa_clip.data_bit_offset >> 3) << 16) |
2582                   (0 << 5)  |
2583                   (0 << 4)  |
2584                   (1 << 3) | /* LastSlice Flag */
2585                   (gen7_jpeg_wa_clip.data_bit_offset & 0x7));
2586     OUT_BCS_BATCH(batch, 0);
2587     ADVANCE_BCS_BATCH(batch);
2588 }
2589
2590 static void
2591 gen7_jpeg_wa_avc_slice_state(VADriverContextP ctx,
2592                              struct gen7_mfd_context *gen7_mfd_context)
2593 {
2594     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2595     int slice_hor_pos = 0, slice_ver_pos = 0, next_slice_hor_pos = 0, next_slice_ver_pos = 1;
2596     int num_ref_idx_l0 = 0, num_ref_idx_l1 = 0;
2597     int first_mb_in_slice = 0;
2598     int slice_type = SLICE_TYPE_I;
2599
2600     BEGIN_BCS_BATCH(batch, 11);
2601     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
2602     OUT_BCS_BATCH(batch, slice_type);
2603     OUT_BCS_BATCH(batch, 
2604                   (num_ref_idx_l1 << 24) |
2605                   (num_ref_idx_l0 << 16) |
2606                   (0 << 8) |
2607                   (0 << 0));
2608     OUT_BCS_BATCH(batch, 
2609                   (0 << 29) |
2610                   (1 << 27) |   /* disable Deblocking */
2611                   (0 << 24) |
2612                   (gen7_jpeg_wa_clip.qp << 16) |
2613                   (0 << 8) |
2614                   (0 << 0));
2615     OUT_BCS_BATCH(batch, 
2616                   (slice_ver_pos << 24) |
2617                   (slice_hor_pos << 16) | 
2618                   (first_mb_in_slice << 0));
2619     OUT_BCS_BATCH(batch,
2620                   (next_slice_ver_pos << 16) |
2621                   (next_slice_hor_pos << 0));
2622     OUT_BCS_BATCH(batch, (1 << 19)); /* last slice flag */
2623     OUT_BCS_BATCH(batch, 0);
2624     OUT_BCS_BATCH(batch, 0);
2625     OUT_BCS_BATCH(batch, 0);
2626     OUT_BCS_BATCH(batch, 0);
2627     ADVANCE_BCS_BATCH(batch);
2628 }
2629
2630 static void
2631 gen7_mfd_jpeg_wa(VADriverContextP ctx,
2632                  struct gen7_mfd_context *gen7_mfd_context)
2633 {
2634     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2635     gen7_jpeg_wa_init(ctx, gen7_mfd_context);
2636     intel_batchbuffer_emit_mi_flush(batch);
2637     gen7_jpeg_wa_pipe_mode_select(ctx, gen7_mfd_context);
2638     gen7_jpeg_wa_surface_state(ctx, gen7_mfd_context);
2639     gen7_jpeg_wa_pipe_buf_addr_state(ctx, gen7_mfd_context);
2640     gen7_jpeg_wa_bsp_buf_base_addr_state(ctx, gen7_mfd_context);
2641     gen7_jpeg_wa_avc_qm_state(ctx, gen7_mfd_context);
2642     gen7_jpeg_wa_avc_img_state(ctx, gen7_mfd_context);
2643     gen7_jpeg_wa_ind_obj_base_addr_state(ctx, gen7_mfd_context);
2644
2645     gen7_jpeg_wa_avc_directmode_state(ctx, gen7_mfd_context);
2646     gen7_jpeg_wa_avc_slice_state(ctx, gen7_mfd_context);
2647     gen7_jpeg_wa_avc_bsd_object(ctx, gen7_mfd_context);
2648 }
2649
2650 void
2651 gen7_mfd_jpeg_decode_picture(VADriverContextP ctx,
2652                              struct decode_state *decode_state,
2653                              struct gen7_mfd_context *gen7_mfd_context)
2654 {
2655     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2656     VAPictureParameterBufferJPEGBaseline *pic_param;
2657     VASliceParameterBufferJPEGBaseline *slice_param, *next_slice_param, *next_slice_group_param;
2658     dri_bo *slice_data_bo;
2659     int i, j, max_selector = 0;
2660
2661     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2662     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2663
2664     /* Currently only support Baseline DCT */
2665     gen7_mfd_jpeg_decode_init(ctx, decode_state, gen7_mfd_context);
2666     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
2667     gen7_mfd_jpeg_wa(ctx, gen7_mfd_context);
2668     intel_batchbuffer_emit_mi_flush(batch);
2669     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2670     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2671     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2672     gen7_mfd_jpeg_pic_state(ctx, decode_state, gen7_mfd_context);
2673     gen7_mfd_jpeg_qm_state(ctx, decode_state, gen7_mfd_context);
2674
2675     for (j = 0; j < decode_state->num_slice_params; j++) {
2676         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2677         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2678         slice_data_bo = decode_state->slice_datas[j]->bo;
2679         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2680
2681         if (j == decode_state->num_slice_params - 1)
2682             next_slice_group_param = NULL;
2683         else
2684             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2685
2686         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2687             int component;
2688
2689             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2690
2691             if (i < decode_state->slice_params[j]->num_elements - 1)
2692                 next_slice_param = slice_param + 1;
2693             else
2694                 next_slice_param = next_slice_group_param;
2695
2696             for (component = 0; component < slice_param->num_components; component++) {
2697                 if (max_selector < slice_param->components[component].dc_table_selector)
2698                     max_selector = slice_param->components[component].dc_table_selector;
2699
2700                 if (max_selector < slice_param->components[component].ac_table_selector)
2701                     max_selector = slice_param->components[component].ac_table_selector;
2702             }
2703
2704             slice_param++;
2705         }
2706     }
2707
2708     assert(max_selector < 2);
2709     gen7_mfd_jpeg_huff_table_state(ctx, decode_state, gen7_mfd_context, max_selector + 1);
2710
2711     for (j = 0; j < decode_state->num_slice_params; j++) {
2712         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2713         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2714         slice_data_bo = decode_state->slice_datas[j]->bo;
2715         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2716
2717         if (j == decode_state->num_slice_params - 1)
2718             next_slice_group_param = NULL;
2719         else
2720             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2721
2722         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2723             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2724
2725             if (i < decode_state->slice_params[j]->num_elements - 1)
2726                 next_slice_param = slice_param + 1;
2727             else
2728                 next_slice_param = next_slice_group_param;
2729
2730             gen7_mfd_jpeg_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
2731             slice_param++;
2732         }
2733     }
2734
2735     intel_batchbuffer_end_atomic(batch);
2736     intel_batchbuffer_flush(batch);
2737 }
2738
2739 static void 
2740 gen7_mfd_decode_picture(VADriverContextP ctx, 
2741                         VAProfile profile, 
2742                         union codec_state *codec_state,
2743                         struct hw_context *hw_context)
2744
2745 {
2746     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2747     struct decode_state *decode_state = &codec_state->decode;
2748
2749     assert(gen7_mfd_context);
2750
2751     gen7_mfd_context->wa_mpeg2_slice_vertical_position = -1;
2752
2753     switch (profile) {
2754     case VAProfileMPEG2Simple:
2755     case VAProfileMPEG2Main:
2756         gen7_mfd_mpeg2_decode_picture(ctx, decode_state, gen7_mfd_context);
2757         break;
2758         
2759     case VAProfileH264Baseline:
2760     case VAProfileH264Main:
2761     case VAProfileH264High:
2762         gen7_mfd_avc_decode_picture(ctx, decode_state, gen7_mfd_context);
2763         break;
2764
2765     case VAProfileVC1Simple:
2766     case VAProfileVC1Main:
2767     case VAProfileVC1Advanced:
2768         gen7_mfd_vc1_decode_picture(ctx, decode_state, gen7_mfd_context);
2769         break;
2770
2771     case VAProfileJPEGBaseline:
2772         gen7_mfd_jpeg_decode_picture(ctx, decode_state, gen7_mfd_context);
2773         break;
2774
2775     default:
2776         assert(0);
2777         break;
2778     }
2779 }
2780
2781 static void
2782 gen7_mfd_context_destroy(void *hw_context)
2783 {
2784     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2785
2786     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
2787     gen7_mfd_context->post_deblocking_output.bo = NULL;
2788
2789     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
2790     gen7_mfd_context->pre_deblocking_output.bo = NULL;
2791
2792     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
2793     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
2794
2795     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
2796     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
2797
2798     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
2799     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
2800
2801     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
2802     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
2803
2804     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
2805     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
2806
2807     dri_bo_unreference(gen7_mfd_context->jpeg_wa_slice_data_bo);
2808
2809     intel_batchbuffer_free(gen7_mfd_context->base.batch);
2810     free(gen7_mfd_context);
2811 }
2812
2813 static void gen7_mfd_mpeg2_context_init(VADriverContextP ctx,
2814                                     struct gen7_mfd_context *gen7_mfd_context)
2815 {
2816     gen7_mfd_context->iq_matrix.mpeg2.load_intra_quantiser_matrix = -1;
2817     gen7_mfd_context->iq_matrix.mpeg2.load_non_intra_quantiser_matrix = -1;
2818     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_intra_quantiser_matrix = -1;
2819     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_non_intra_quantiser_matrix = -1;
2820 }
2821
2822 struct hw_context *
2823 gen7_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
2824 {
2825     struct intel_driver_data *intel = intel_driver_data(ctx);
2826     struct gen7_mfd_context *gen7_mfd_context = calloc(1, sizeof(struct gen7_mfd_context));
2827     int i;
2828
2829     gen7_mfd_context->base.destroy = gen7_mfd_context_destroy;
2830     gen7_mfd_context->base.run = gen7_mfd_decode_picture;
2831     gen7_mfd_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
2832
2833     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
2834         gen7_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
2835         gen7_mfd_context->reference_surface[i].frame_store_id = -1;
2836     }
2837
2838     gen7_mfd_context->jpeg_wa_surface_id = VA_INVALID_SURFACE;
2839
2840     switch (obj_config->profile) {
2841     case VAProfileMPEG2Simple:
2842     case VAProfileMPEG2Main:
2843         gen7_mfd_mpeg2_context_init(ctx, gen7_mfd_context);
2844         break;
2845
2846     case VAProfileH264Baseline:
2847     case VAProfileH264Main:
2848     case VAProfileH264High:
2849         gen7_mfd_avc_context_init(ctx, gen7_mfd_context);
2850         break;
2851     default:
2852         break;
2853     }
2854     return (struct hw_context *)gen7_mfd_context;
2855 }