OSDN Git Service

genX_mfd: fix bitplane encoding for skipped pictures
[android-x86/hardware-intel-common-vaapi.git] / src / gen7_mfd.c
1 /*
2  * Copyright © 2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28
29 #include "sysdeps.h"
30
31 #include <va/va_dec_jpeg.h>
32
33 #include "intel_batchbuffer.h"
34 #include "intel_driver.h"
35 #include "i965_defines.h"
36 #include "i965_drv_video.h"
37 #include "i965_decoder_utils.h"
38
39 #include "gen7_mfd.h"
40 #include "intel_media.h"
41
42 static const uint32_t zigzag_direct[64] = {
43     0,   1,  8, 16,  9,  2,  3, 10,
44     17, 24, 32, 25, 18, 11,  4,  5,
45     12, 19, 26, 33, 40, 48, 41, 34,
46     27, 20, 13,  6,  7, 14, 21, 28,
47     35, 42, 49, 56, 57, 50, 43, 36,
48     29, 22, 15, 23, 30, 37, 44, 51,
49     58, 59, 52, 45, 38, 31, 39, 46,
50     53, 60, 61, 54, 47, 55, 62, 63
51 };
52
53 static void
54 gen7_mfd_init_avc_surface(VADriverContextP ctx,
55                           VAPictureParameterBufferH264 *pic_param,
56                           struct object_surface *obj_surface)
57 {
58     struct i965_driver_data *i965 = i965_driver_data(ctx);
59     GenAvcSurface *gen7_avc_surface = obj_surface->private_data;
60     int width_in_mbs, height_in_mbs;
61
62     obj_surface->free_private_data = gen_free_avc_surface;
63     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
64     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
65
66     if (!gen7_avc_surface) {
67         gen7_avc_surface = calloc(sizeof(GenAvcSurface), 1);
68         assert(gen7_avc_surface);
69         gen7_avc_surface->base.frame_store_id = -1;
70         assert((obj_surface->size & 0x3f) == 0);
71         obj_surface->private_data = gen7_avc_surface;
72     }
73
74     gen7_avc_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
75                                          !pic_param->seq_fields.bits.direct_8x8_inference_flag);
76
77     if (gen7_avc_surface->dmv_top == NULL) {
78         gen7_avc_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
79                                                  "direct mv w/r buffer",
80                                                  width_in_mbs * (height_in_mbs + 1) * 64,
81                                                  0x1000);
82         assert(gen7_avc_surface->dmv_top);
83     }
84
85     if (gen7_avc_surface->dmv_bottom_flag &&
86         gen7_avc_surface->dmv_bottom == NULL) {
87         gen7_avc_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
88                                                     "direct mv w/r buffer",
89                                                     width_in_mbs * (height_in_mbs + 1) * 64,
90                                                     0x1000);
91         assert(gen7_avc_surface->dmv_bottom);
92     }
93 }
94
95 static void
96 gen7_mfd_pipe_mode_select(VADriverContextP ctx,
97                           struct decode_state *decode_state,
98                           int standard_select,
99                           struct gen7_mfd_context *gen7_mfd_context)
100 {
101     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
102
103     assert(standard_select == MFX_FORMAT_MPEG2 ||
104            standard_select == MFX_FORMAT_AVC ||
105            standard_select == MFX_FORMAT_VC1 ||
106            standard_select == MFX_FORMAT_JPEG);
107
108     BEGIN_BCS_BATCH(batch, 5);
109     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
110     OUT_BCS_BATCH(batch,
111                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
112                   (MFD_MODE_VLD << 15) | /* VLD mode */
113                   (0 << 10) | /* disable Stream-Out */
114                   (gen7_mfd_context->post_deblocking_output.valid << 9)  | /* Post Deblocking Output */
115                   (gen7_mfd_context->pre_deblocking_output.valid << 8)  | /* Pre Deblocking Output */
116                   (0 << 5)  | /* not in stitch mode */
117                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
118                   (standard_select << 0));
119     OUT_BCS_BATCH(batch,
120                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
121                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
122                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
123                   (0 << 1)  |
124                   (0 << 0));
125     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */
126     OUT_BCS_BATCH(batch, 0); /* reserved */
127     ADVANCE_BCS_BATCH(batch);
128 }
129
130 static void
131 gen7_mfd_surface_state(VADriverContextP ctx,
132                        struct decode_state *decode_state,
133                        int standard_select,
134                        struct gen7_mfd_context *gen7_mfd_context)
135 {
136     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
137     struct object_surface *obj_surface = decode_state->render_object;
138     unsigned int y_cb_offset;
139     unsigned int y_cr_offset;
140     unsigned int surface_format;
141
142     assert(obj_surface);
143
144     y_cb_offset = obj_surface->y_cb_offset;
145     y_cr_offset = obj_surface->y_cr_offset;
146
147     surface_format = obj_surface->fourcc == VA_FOURCC_Y800 ?
148                      MFX_SURFACE_MONOCHROME : MFX_SURFACE_PLANAR_420_8;
149
150     BEGIN_BCS_BATCH(batch, 6);
151     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
152     OUT_BCS_BATCH(batch, 0);
153     OUT_BCS_BATCH(batch,
154                   ((obj_surface->orig_height - 1) << 18) |
155                   ((obj_surface->orig_width - 1) << 4));
156     OUT_BCS_BATCH(batch,
157                   (surface_format << 28) | /* 420 planar YUV surface */
158                   ((standard_select != MFX_FORMAT_JPEG) << 27) | /* interleave chroma, set to 0 for JPEG */
159                   (0 << 22) | /* surface object control state, ignored */
160                   ((obj_surface->width - 1) << 3) | /* pitch */
161                   (0 << 2)  | /* must be 0 */
162                   (1 << 1)  | /* must be tiled */
163                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
164     OUT_BCS_BATCH(batch,
165                   (0 << 16) | /* X offset for U(Cb), must be 0 */
166                   (y_cb_offset << 0)); /* Y offset for U(Cb) */
167     OUT_BCS_BATCH(batch,
168                   (0 << 16) | /* X offset for V(Cr), must be 0 */
169                   (y_cr_offset << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
170     ADVANCE_BCS_BATCH(batch);
171 }
172
173 static void
174 gen7_mfd_pipe_buf_addr_state(VADriverContextP ctx,
175                              struct decode_state *decode_state,
176                              int standard_select,
177                              struct gen7_mfd_context *gen7_mfd_context)
178 {
179     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
180     int i;
181
182     BEGIN_BCS_BATCH(batch, 24);
183     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
184     if (gen7_mfd_context->pre_deblocking_output.valid)
185         OUT_BCS_RELOC(batch, gen7_mfd_context->pre_deblocking_output.bo,
186                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
187                       0);
188     else
189         OUT_BCS_BATCH(batch, 0);
190
191     if (gen7_mfd_context->post_deblocking_output.valid)
192         OUT_BCS_RELOC(batch, gen7_mfd_context->post_deblocking_output.bo,
193                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
194                       0);
195     else
196         OUT_BCS_BATCH(batch, 0);
197
198     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
199     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
200
201     if (gen7_mfd_context->intra_row_store_scratch_buffer.valid)
202         OUT_BCS_RELOC(batch, gen7_mfd_context->intra_row_store_scratch_buffer.bo,
203                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
204                       0);
205     else
206         OUT_BCS_BATCH(batch, 0);
207
208     if (gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid)
209         OUT_BCS_RELOC(batch, gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo,
210                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
211                       0);
212     else
213         OUT_BCS_BATCH(batch, 0);
214
215     /* DW 7..22 */
216     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
217         struct object_surface *obj_surface;
218
219         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
220             gen7_mfd_context->reference_surface[i].obj_surface &&
221             gen7_mfd_context->reference_surface[i].obj_surface->bo) {
222             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
223
224             OUT_BCS_RELOC(batch, obj_surface->bo,
225                           I915_GEM_DOMAIN_INSTRUCTION, 0,
226                           0);
227         } else {
228             OUT_BCS_BATCH(batch, 0);
229         }
230     }
231
232     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
233     ADVANCE_BCS_BATCH(batch);
234 }
235
236 static void
237 gen7_mfd_ind_obj_base_addr_state(VADriverContextP ctx,
238                                  dri_bo *slice_data_bo,
239                                  int standard_select,
240                                  struct gen7_mfd_context *gen7_mfd_context)
241 {
242     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
243
244     BEGIN_BCS_BATCH(batch, 11);
245     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
246     OUT_BCS_RELOC(batch, slice_data_bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0); /* MFX Indirect Bitstream Object Base Address */
247     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
248     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
249     OUT_BCS_BATCH(batch, 0);
250     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
251     OUT_BCS_BATCH(batch, 0);
252     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
253     OUT_BCS_BATCH(batch, 0);
254     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
255     OUT_BCS_BATCH(batch, 0);
256     ADVANCE_BCS_BATCH(batch);
257 }
258
259 static void
260 gen7_mfd_bsp_buf_base_addr_state(VADriverContextP ctx,
261                                  struct decode_state *decode_state,
262                                  int standard_select,
263                                  struct gen7_mfd_context *gen7_mfd_context)
264 {
265     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
266
267     BEGIN_BCS_BATCH(batch, 4);
268     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
269
270     if (gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid)
271         OUT_BCS_RELOC(batch, gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo,
272                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
273                       0);
274     else
275         OUT_BCS_BATCH(batch, 0);
276
277     if (gen7_mfd_context->mpr_row_store_scratch_buffer.valid)
278         OUT_BCS_RELOC(batch, gen7_mfd_context->mpr_row_store_scratch_buffer.bo,
279                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
280                       0);
281     else
282         OUT_BCS_BATCH(batch, 0);
283
284     if (gen7_mfd_context->bitplane_read_buffer.valid)
285         OUT_BCS_RELOC(batch, gen7_mfd_context->bitplane_read_buffer.bo,
286                       I915_GEM_DOMAIN_INSTRUCTION, 0,
287                       0);
288     else
289         OUT_BCS_BATCH(batch, 0);
290
291     ADVANCE_BCS_BATCH(batch);
292 }
293
294 static void
295 gen7_mfd_qm_state(VADriverContextP ctx,
296                   int qm_type,
297                   unsigned char *qm,
298                   int qm_length,
299                   struct gen7_mfd_context *gen7_mfd_context)
300 {
301     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
302     unsigned int qm_buffer[16];
303
304     assert(qm_length <= 16 * 4);
305     memcpy(qm_buffer, qm, qm_length);
306
307     BEGIN_BCS_BATCH(batch, 18);
308     OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
309     OUT_BCS_BATCH(batch, qm_type << 0);
310     intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
311     ADVANCE_BCS_BATCH(batch);
312 }
313
314 static void
315 gen7_mfd_avc_img_state(VADriverContextP ctx,
316                        struct decode_state *decode_state,
317                        struct gen7_mfd_context *gen7_mfd_context)
318 {
319     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
320     int img_struct;
321     int mbaff_frame_flag;
322     unsigned int width_in_mbs, height_in_mbs;
323     VAPictureParameterBufferH264 *pic_param;
324
325     assert(decode_state->pic_param && decode_state->pic_param->buffer);
326     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
327
328     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
329         img_struct = 1;
330     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
331         img_struct = 3;
332     else
333         img_struct = 0;
334
335     if ((img_struct & 0x1) == 0x1) {
336         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
337     } else {
338         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
339     }
340
341     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
342         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
343         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
344     } else {
345         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
346     }
347
348     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
349                         !pic_param->pic_fields.bits.field_pic_flag);
350
351     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
352     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
353
354     /* MFX unit doesn't support 4:2:2 and 4:4:4 picture */
355     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
356            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
357     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
358
359     BEGIN_BCS_BATCH(batch, 16);
360     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
361     OUT_BCS_BATCH(batch,
362                   (width_in_mbs * height_in_mbs - 1));
363     OUT_BCS_BATCH(batch,
364                   ((height_in_mbs - 1) << 16) |
365                   ((width_in_mbs - 1) << 0));
366     OUT_BCS_BATCH(batch,
367                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
368                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
369                   (0 << 14) | /* Max-bit conformance Intra flag ??? FIXME */
370                   (0 << 13) | /* Max Macroblock size conformance Inter flag ??? FIXME */
371                   (pic_param->pic_fields.bits.weighted_pred_flag << 12) | /* differ from GEN6 */
372                   (pic_param->pic_fields.bits.weighted_bipred_idc << 10) |
373                   (img_struct << 8));
374     OUT_BCS_BATCH(batch,
375                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
376                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
377                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
378                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
379                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
380                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
381                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
382                   (mbaff_frame_flag << 1) |
383                   (pic_param->pic_fields.bits.field_pic_flag << 0));
384     OUT_BCS_BATCH(batch, 0);
385     OUT_BCS_BATCH(batch, 0);
386     OUT_BCS_BATCH(batch, 0);
387     OUT_BCS_BATCH(batch, 0);
388     OUT_BCS_BATCH(batch, 0);
389     OUT_BCS_BATCH(batch, 0);
390     OUT_BCS_BATCH(batch, 0);
391     OUT_BCS_BATCH(batch, 0);
392     OUT_BCS_BATCH(batch, 0);
393     OUT_BCS_BATCH(batch, 0);
394     OUT_BCS_BATCH(batch, 0);
395     ADVANCE_BCS_BATCH(batch);
396 }
397
398 static void
399 gen7_mfd_avc_qm_state(VADriverContextP ctx,
400                       struct decode_state *decode_state,
401                       struct gen7_mfd_context *gen7_mfd_context)
402 {
403     VAIQMatrixBufferH264 *iq_matrix;
404     VAPictureParameterBufferH264 *pic_param;
405
406     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
407         iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
408     else
409         iq_matrix = &gen7_mfd_context->iq_matrix.h264;
410
411     assert(decode_state->pic_param && decode_state->pic_param->buffer);
412     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
413
414     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, &iq_matrix->ScalingList4x4[0][0], 3 * 16, gen7_mfd_context);
415     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, &iq_matrix->ScalingList4x4[3][0], 3 * 16, gen7_mfd_context);
416
417     if (pic_param->pic_fields.bits.transform_8x8_mode_flag) {
418         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, &iq_matrix->ScalingList8x8[0][0], 64, gen7_mfd_context);
419         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, &iq_matrix->ScalingList8x8[1][0], 64, gen7_mfd_context);
420     }
421 }
422
423 static void
424 gen7_mfd_avc_directmode_state(VADriverContextP ctx,
425                               struct decode_state *decode_state,
426                               VAPictureParameterBufferH264 *pic_param,
427                               VASliceParameterBufferH264 *slice_param,
428                               struct gen7_mfd_context *gen7_mfd_context)
429 {
430     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
431     struct object_surface *obj_surface;
432     GenAvcSurface *gen7_avc_surface;
433     VAPictureH264 *va_pic;
434     int i;
435
436     BEGIN_BCS_BATCH(batch, 69);
437     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
438
439     /* reference surfaces 0..15 */
440     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
441         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
442             gen7_mfd_context->reference_surface[i].obj_surface &&
443             gen7_mfd_context->reference_surface[i].obj_surface->private_data) {
444
445             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
446             gen7_avc_surface = obj_surface->private_data;
447             OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
448                           I915_GEM_DOMAIN_INSTRUCTION, 0,
449                           0);
450
451             if (gen7_avc_surface->dmv_bottom_flag == 1)
452                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
453                               I915_GEM_DOMAIN_INSTRUCTION, 0,
454                               0);
455             else
456                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
457                               I915_GEM_DOMAIN_INSTRUCTION, 0,
458                               0);
459         } else {
460             OUT_BCS_BATCH(batch, 0);
461             OUT_BCS_BATCH(batch, 0);
462         }
463     }
464
465     /* the current decoding frame/field */
466     va_pic = &pic_param->CurrPic;
467     obj_surface = decode_state->render_object;
468     assert(obj_surface->bo && obj_surface->private_data);
469     gen7_avc_surface = obj_surface->private_data;
470
471     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
472                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
473                   0);
474
475     if (gen7_avc_surface->dmv_bottom_flag == 1)
476         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
477                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
478                       0);
479     else
480         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
481                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
482                       0);
483
484     /* POC List */
485     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
486         obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
487
488         if (obj_surface) {
489             const VAPictureH264 * const va_pic = avc_find_picture(
490                                                      obj_surface->base.id, pic_param->ReferenceFrames,
491                                                      ARRAY_ELEMS(pic_param->ReferenceFrames));
492
493             assert(va_pic != NULL);
494             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
495             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
496         } else {
497             OUT_BCS_BATCH(batch, 0);
498             OUT_BCS_BATCH(batch, 0);
499         }
500     }
501
502     va_pic = &pic_param->CurrPic;
503     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
504     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
505
506     ADVANCE_BCS_BATCH(batch);
507 }
508
509 static void
510 gen7_mfd_avc_phantom_slice_first(VADriverContextP ctx,
511                                  VAPictureParameterBufferH264 *pic_param,
512                                  VASliceParameterBufferH264 *next_slice_param,
513                                  struct gen7_mfd_context *gen7_mfd_context)
514 {
515     gen6_mfd_avc_phantom_slice(ctx, pic_param, next_slice_param, gen7_mfd_context->base.batch);
516 }
517
518 static void
519 gen7_mfd_avc_slice_state(VADriverContextP ctx,
520                          VAPictureParameterBufferH264 *pic_param,
521                          VASliceParameterBufferH264 *slice_param,
522                          VASliceParameterBufferH264 *next_slice_param,
523                          struct gen7_mfd_context *gen7_mfd_context)
524 {
525     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
526     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
527     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
528     int slice_hor_pos, slice_ver_pos, next_slice_hor_pos, next_slice_ver_pos;
529     int num_ref_idx_l0, num_ref_idx_l1;
530     int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
531                          pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
532     int first_mb_in_slice = 0, first_mb_in_next_slice = 0;
533     int slice_type;
534
535     if (slice_param->slice_type == SLICE_TYPE_I ||
536         slice_param->slice_type == SLICE_TYPE_SI) {
537         slice_type = SLICE_TYPE_I;
538     } else if (slice_param->slice_type == SLICE_TYPE_P ||
539                slice_param->slice_type == SLICE_TYPE_SP) {
540         slice_type = SLICE_TYPE_P;
541     } else {
542         assert(slice_param->slice_type == SLICE_TYPE_B);
543         slice_type = SLICE_TYPE_B;
544     }
545
546     if (slice_type == SLICE_TYPE_I) {
547         assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
548         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
549         num_ref_idx_l0 = 0;
550         num_ref_idx_l1 = 0;
551     } else if (slice_type == SLICE_TYPE_P) {
552         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
553         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
554         num_ref_idx_l1 = 0;
555     } else {
556         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
557         num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
558     }
559
560     first_mb_in_slice = slice_param->first_mb_in_slice;
561     slice_hor_pos = first_mb_in_slice % width_in_mbs;
562     slice_ver_pos = first_mb_in_slice / width_in_mbs;
563
564     if (mbaff_picture)
565         slice_ver_pos = slice_ver_pos << 1;
566
567     if (next_slice_param) {
568         first_mb_in_next_slice = next_slice_param->first_mb_in_slice;
569         next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs;
570         next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
571
572         if (mbaff_picture)
573             next_slice_ver_pos = next_slice_ver_pos << 1;
574     } else {
575         next_slice_hor_pos = 0;
576         next_slice_ver_pos = height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
577     }
578
579     BEGIN_BCS_BATCH(batch, 11); /* FIXME: is it 10??? */
580     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
581     OUT_BCS_BATCH(batch, slice_type);
582     OUT_BCS_BATCH(batch,
583                   (num_ref_idx_l1 << 24) |
584                   (num_ref_idx_l0 << 16) |
585                   (slice_param->chroma_log2_weight_denom << 8) |
586                   (slice_param->luma_log2_weight_denom << 0));
587     OUT_BCS_BATCH(batch,
588                   (slice_param->direct_spatial_mv_pred_flag << 29) |
589                   (slice_param->disable_deblocking_filter_idc << 27) |
590                   (slice_param->cabac_init_idc << 24) |
591                   ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
592                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
593                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
594     OUT_BCS_BATCH(batch,
595                   (slice_ver_pos << 24) |
596                   (slice_hor_pos << 16) |
597                   (first_mb_in_slice << 0));
598     OUT_BCS_BATCH(batch,
599                   (next_slice_ver_pos << 16) |
600                   (next_slice_hor_pos << 0));
601     OUT_BCS_BATCH(batch,
602                   (next_slice_param == NULL) << 19); /* last slice flag */
603     OUT_BCS_BATCH(batch, 0);
604     OUT_BCS_BATCH(batch, 0);
605     OUT_BCS_BATCH(batch, 0);
606     OUT_BCS_BATCH(batch, 0);
607     ADVANCE_BCS_BATCH(batch);
608 }
609
610 static inline void
611 gen7_mfd_avc_ref_idx_state(VADriverContextP ctx,
612                            VAPictureParameterBufferH264 *pic_param,
613                            VASliceParameterBufferH264 *slice_param,
614                            struct gen7_mfd_context *gen7_mfd_context)
615 {
616     gen6_send_avc_ref_idx_state(
617         gen7_mfd_context->base.batch,
618         slice_param,
619         gen7_mfd_context->reference_surface
620     );
621 }
622
623 static void
624 gen7_mfd_avc_weightoffset_state(VADriverContextP ctx,
625                                 VAPictureParameterBufferH264 *pic_param,
626                                 VASliceParameterBufferH264 *slice_param,
627                                 struct gen7_mfd_context *gen7_mfd_context)
628 {
629     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
630     int i, j, num_weight_offset_table = 0;
631     short weightoffsets[32 * 6];
632
633     if ((slice_param->slice_type == SLICE_TYPE_P ||
634          slice_param->slice_type == SLICE_TYPE_SP) &&
635         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
636         num_weight_offset_table = 1;
637     }
638
639     if ((slice_param->slice_type == SLICE_TYPE_B) &&
640         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
641         num_weight_offset_table = 2;
642     }
643
644     for (i = 0; i < num_weight_offset_table; i++) {
645         BEGIN_BCS_BATCH(batch, 98);
646         OUT_BCS_BATCH(batch, MFX_AVC_WEIGHTOFFSET_STATE | (98 - 2));
647         OUT_BCS_BATCH(batch, i);
648
649         if (i == 0) {
650             for (j = 0; j < 32; j++) {
651                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l0[j];
652                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l0[j];
653                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l0[j][0];
654                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l0[j][0];
655                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l0[j][1];
656                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l0[j][1];
657             }
658         } else {
659             for (j = 0; j < 32; j++) {
660                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l1[j];
661                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l1[j];
662                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l1[j][0];
663                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l1[j][0];
664                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l1[j][1];
665                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l1[j][1];
666             }
667         }
668
669         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
670         ADVANCE_BCS_BATCH(batch);
671     }
672 }
673
674 static void
675 gen7_mfd_avc_bsd_object(VADriverContextP ctx,
676                         VAPictureParameterBufferH264 *pic_param,
677                         VASliceParameterBufferH264 *slice_param,
678                         dri_bo *slice_data_bo,
679                         VASliceParameterBufferH264 *next_slice_param,
680                         struct gen7_mfd_context *gen7_mfd_context)
681 {
682     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
683     unsigned int slice_data_bit_offset;
684
685     slice_data_bit_offset = avc_get_first_mb_bit_offset(
686                                 slice_data_bo,
687                                 slice_param,
688                                 pic_param->pic_fields.bits.entropy_coding_mode_flag
689                             );
690
691     /* the input bitsteam format on GEN7 differs from GEN6 */
692     BEGIN_BCS_BATCH(batch, 6);
693     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
694     OUT_BCS_BATCH(batch,
695                   (slice_param->slice_data_size - slice_param->slice_data_offset));
696     OUT_BCS_BATCH(batch, slice_param->slice_data_offset);
697     OUT_BCS_BATCH(batch,
698                   (0 << 31) |
699                   (0 << 14) |
700                   (0 << 12) |
701                   (0 << 10) |
702                   (0 << 8));
703     OUT_BCS_BATCH(batch,
704                   ((slice_data_bit_offset >> 3) << 16) |
705                   (1 << 7)  |
706                   (0 << 5)  |
707                   (0 << 4)  |
708                   ((next_slice_param == NULL) << 3) | /* LastSlice Flag */
709                   (slice_data_bit_offset & 0x7));
710     OUT_BCS_BATCH(batch, 0);
711     ADVANCE_BCS_BATCH(batch);
712 }
713
714 static inline void
715 gen7_mfd_avc_context_init(
716     VADriverContextP         ctx,
717     struct gen7_mfd_context *gen7_mfd_context
718 )
719 {
720     /* Initialize flat scaling lists */
721     avc_gen_default_iq_matrix(&gen7_mfd_context->iq_matrix.h264);
722 }
723
724 static void
725 gen7_mfd_avc_decode_init(VADriverContextP ctx,
726                          struct decode_state *decode_state,
727                          struct gen7_mfd_context *gen7_mfd_context)
728 {
729     VAPictureParameterBufferH264 *pic_param;
730     VASliceParameterBufferH264 *slice_param;
731     struct i965_driver_data *i965 = i965_driver_data(ctx);
732     struct object_surface *obj_surface;
733     dri_bo *bo;
734     int i, j, enable_avc_ildb = 0;
735     unsigned int width_in_mbs, height_in_mbs;
736
737     for (j = 0; j < decode_state->num_slice_params && enable_avc_ildb == 0; j++) {
738         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
739         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
740
741         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
742             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
743             assert((slice_param->slice_type == SLICE_TYPE_I) ||
744                    (slice_param->slice_type == SLICE_TYPE_SI) ||
745                    (slice_param->slice_type == SLICE_TYPE_P) ||
746                    (slice_param->slice_type == SLICE_TYPE_SP) ||
747                    (slice_param->slice_type == SLICE_TYPE_B));
748
749             if (slice_param->disable_deblocking_filter_idc != 1) {
750                 enable_avc_ildb = 1;
751                 break;
752             }
753
754             slice_param++;
755         }
756     }
757
758     assert(decode_state->pic_param && decode_state->pic_param->buffer);
759     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
760     intel_update_avc_frame_store_index(ctx, decode_state, pic_param,
761                                        gen7_mfd_context->reference_surface, &gen7_mfd_context->fs_ctx);
762     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
763     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
764     assert(width_in_mbs > 0 && width_in_mbs <= 256); /* 4K */
765     assert(height_in_mbs > 0 && height_in_mbs <= 256);
766
767     /* Current decoded picture */
768     obj_surface = decode_state->render_object;
769     if (pic_param->pic_fields.bits.reference_pic_flag)
770         obj_surface->flags |= SURFACE_REFERENCED;
771     else
772         obj_surface->flags &= ~SURFACE_REFERENCED;
773
774     avc_ensure_surface_bo(ctx, decode_state, obj_surface, pic_param);
775     gen7_mfd_init_avc_surface(ctx, pic_param, obj_surface);
776
777     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
778     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
779     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
780     gen7_mfd_context->post_deblocking_output.valid = enable_avc_ildb;
781
782     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
783     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
784     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
785     gen7_mfd_context->pre_deblocking_output.valid = !enable_avc_ildb;
786
787     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
788     bo = dri_bo_alloc(i965->intel.bufmgr,
789                       "intra row store",
790                       width_in_mbs * 64,
791                       0x1000);
792     assert(bo);
793     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
794     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
795
796     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
797     bo = dri_bo_alloc(i965->intel.bufmgr,
798                       "deblocking filter row store",
799                       width_in_mbs * 64 * 4,
800                       0x1000);
801     assert(bo);
802     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
803     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
804
805     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
806     bo = dri_bo_alloc(i965->intel.bufmgr,
807                       "bsd mpc row store",
808                       width_in_mbs * 64 * 2,
809                       0x1000);
810     assert(bo);
811     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
812     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
813
814     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
815     bo = dri_bo_alloc(i965->intel.bufmgr,
816                       "mpr row store",
817                       width_in_mbs * 64 * 2,
818                       0x1000);
819     assert(bo);
820     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = bo;
821     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 1;
822
823     gen7_mfd_context->bitplane_read_buffer.valid = 0;
824 }
825
826 static void
827 gen7_mfd_avc_decode_picture(VADriverContextP ctx,
828                             struct decode_state *decode_state,
829                             struct gen7_mfd_context *gen7_mfd_context)
830 {
831     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
832     VAPictureParameterBufferH264 *pic_param;
833     VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
834     dri_bo *slice_data_bo;
835     int i, j;
836
837     assert(decode_state->pic_param && decode_state->pic_param->buffer);
838     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
839     gen7_mfd_avc_decode_init(ctx, decode_state, gen7_mfd_context);
840
841     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
842     intel_batchbuffer_emit_mi_flush(batch);
843     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
844     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
845     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
846     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
847     gen7_mfd_avc_qm_state(ctx, decode_state, gen7_mfd_context);
848     gen7_mfd_avc_img_state(ctx, decode_state, gen7_mfd_context);
849
850     for (j = 0; j < decode_state->num_slice_params; j++) {
851         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
852         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
853         slice_data_bo = decode_state->slice_datas[j]->bo;
854         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC, gen7_mfd_context);
855
856         if (j == decode_state->num_slice_params - 1)
857             next_slice_group_param = NULL;
858         else
859             next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
860
861         if (j == 0 && slice_param->first_mb_in_slice)
862             gen7_mfd_avc_phantom_slice_first(ctx, pic_param, slice_param, gen7_mfd_context);
863
864         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
865             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
866             assert((slice_param->slice_type == SLICE_TYPE_I) ||
867                    (slice_param->slice_type == SLICE_TYPE_SI) ||
868                    (slice_param->slice_type == SLICE_TYPE_P) ||
869                    (slice_param->slice_type == SLICE_TYPE_SP) ||
870                    (slice_param->slice_type == SLICE_TYPE_B));
871
872             if (i < decode_state->slice_params[j]->num_elements - 1)
873                 next_slice_param = slice_param + 1;
874             else
875                 next_slice_param = next_slice_group_param;
876
877             gen7_mfd_avc_directmode_state(ctx, decode_state, pic_param, slice_param, gen7_mfd_context);
878             gen7_mfd_avc_ref_idx_state(ctx, pic_param, slice_param, gen7_mfd_context);
879             gen7_mfd_avc_weightoffset_state(ctx, pic_param, slice_param, gen7_mfd_context);
880             gen7_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
881             gen7_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo, next_slice_param, gen7_mfd_context);
882             slice_param++;
883         }
884     }
885
886     intel_batchbuffer_end_atomic(batch);
887     intel_batchbuffer_flush(batch);
888 }
889
890 static void
891 gen7_mfd_mpeg2_decode_init(VADriverContextP ctx,
892                            struct decode_state *decode_state,
893                            struct gen7_mfd_context *gen7_mfd_context)
894 {
895     VAPictureParameterBufferMPEG2 *pic_param;
896     struct i965_driver_data *i965 = i965_driver_data(ctx);
897     struct object_surface *obj_surface;
898     dri_bo *bo;
899     unsigned int width_in_mbs;
900
901     assert(decode_state->pic_param && decode_state->pic_param->buffer);
902     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
903     width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
904
905     mpeg2_set_reference_surfaces(
906         ctx,
907         gen7_mfd_context->reference_surface,
908         decode_state,
909         pic_param
910     );
911
912     /* Current decoded picture */
913     obj_surface = decode_state->render_object;
914     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
915
916     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
917     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
918     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
919     gen7_mfd_context->pre_deblocking_output.valid = 1;
920
921     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
922     bo = dri_bo_alloc(i965->intel.bufmgr,
923                       "bsd mpc row store",
924                       width_in_mbs * 96,
925                       0x1000);
926     assert(bo);
927     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
928     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
929
930     gen7_mfd_context->post_deblocking_output.valid = 0;
931     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
932     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
933     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
934     gen7_mfd_context->bitplane_read_buffer.valid = 0;
935 }
936
937 static void
938 gen7_mfd_mpeg2_pic_state(VADriverContextP ctx,
939                          struct decode_state *decode_state,
940                          struct gen7_mfd_context *gen7_mfd_context)
941 {
942     struct i965_driver_data * const i965 = i965_driver_data(ctx);
943     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
944     VAPictureParameterBufferMPEG2 *pic_param;
945     unsigned int slice_concealment_disable_bit = 0;
946
947     assert(decode_state->pic_param && decode_state->pic_param->buffer);
948     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
949
950     if (IS_HASWELL(i965->intel.device_info)) {
951         /* XXX: disable concealment for now */
952         slice_concealment_disable_bit = 1;
953     }
954
955     BEGIN_BCS_BATCH(batch, 13);
956     OUT_BCS_BATCH(batch, MFX_MPEG2_PIC_STATE | (13 - 2));
957     OUT_BCS_BATCH(batch,
958                   (pic_param->f_code & 0xf) << 28 | /* f_code[1][1] */
959                   ((pic_param->f_code >> 4) & 0xf) << 24 | /* f_code[1][0] */
960                   ((pic_param->f_code >> 8) & 0xf) << 20 | /* f_code[0][1] */
961                   ((pic_param->f_code >> 12) & 0xf) << 16 | /* f_code[0][0] */
962                   pic_param->picture_coding_extension.bits.intra_dc_precision << 14 |
963                   pic_param->picture_coding_extension.bits.picture_structure << 12 |
964                   pic_param->picture_coding_extension.bits.top_field_first << 11 |
965                   pic_param->picture_coding_extension.bits.frame_pred_frame_dct << 10 |
966                   pic_param->picture_coding_extension.bits.concealment_motion_vectors << 9 |
967                   pic_param->picture_coding_extension.bits.q_scale_type << 8 |
968                   pic_param->picture_coding_extension.bits.intra_vlc_format << 7 |
969                   pic_param->picture_coding_extension.bits.alternate_scan << 6);
970     OUT_BCS_BATCH(batch,
971                   pic_param->picture_coding_type << 9);
972     OUT_BCS_BATCH(batch,
973                   (slice_concealment_disable_bit << 31) |
974                   ((ALIGN(pic_param->vertical_size, 16) / 16) - 1) << 16 |
975                   ((ALIGN(pic_param->horizontal_size, 16) / 16) - 1));
976     OUT_BCS_BATCH(batch, 0);
977     OUT_BCS_BATCH(batch, 0);
978     OUT_BCS_BATCH(batch, 0);
979     OUT_BCS_BATCH(batch, 0);
980     OUT_BCS_BATCH(batch, 0);
981     OUT_BCS_BATCH(batch, 0);
982     OUT_BCS_BATCH(batch, 0);
983     OUT_BCS_BATCH(batch, 0);
984     OUT_BCS_BATCH(batch, 0);
985     ADVANCE_BCS_BATCH(batch);
986 }
987
988 static void
989 gen7_mfd_mpeg2_qm_state(VADriverContextP ctx,
990                         struct decode_state *decode_state,
991                         struct gen7_mfd_context *gen7_mfd_context)
992 {
993     VAIQMatrixBufferMPEG2 * const gen_iq_matrix = &gen7_mfd_context->iq_matrix.mpeg2;
994     int i, j;
995
996     /* Update internal QM state */
997     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer) {
998         VAIQMatrixBufferMPEG2 * const iq_matrix =
999             (VAIQMatrixBufferMPEG2 *)decode_state->iq_matrix->buffer;
1000
1001         if (gen_iq_matrix->load_intra_quantiser_matrix == -1 ||
1002             iq_matrix->load_intra_quantiser_matrix) {
1003             gen_iq_matrix->load_intra_quantiser_matrix =
1004                 iq_matrix->load_intra_quantiser_matrix;
1005             if (iq_matrix->load_intra_quantiser_matrix) {
1006                 for (j = 0; j < 64; j++)
1007                     gen_iq_matrix->intra_quantiser_matrix[zigzag_direct[j]] =
1008                         iq_matrix->intra_quantiser_matrix[j];
1009             }
1010         }
1011
1012         if (gen_iq_matrix->load_non_intra_quantiser_matrix == -1 ||
1013             iq_matrix->load_non_intra_quantiser_matrix) {
1014             gen_iq_matrix->load_non_intra_quantiser_matrix =
1015                 iq_matrix->load_non_intra_quantiser_matrix;
1016             if (iq_matrix->load_non_intra_quantiser_matrix) {
1017                 for (j = 0; j < 64; j++)
1018                     gen_iq_matrix->non_intra_quantiser_matrix[zigzag_direct[j]] =
1019                         iq_matrix->non_intra_quantiser_matrix[j];
1020             }
1021         }
1022     }
1023
1024     /* Commit QM state to HW */
1025     for (i = 0; i < 2; i++) {
1026         unsigned char *qm = NULL;
1027         int qm_type;
1028
1029         if (i == 0) {
1030             if (gen_iq_matrix->load_intra_quantiser_matrix) {
1031                 qm = gen_iq_matrix->intra_quantiser_matrix;
1032                 qm_type = MFX_QM_MPEG_INTRA_QUANTIZER_MATRIX;
1033             }
1034         } else {
1035             if (gen_iq_matrix->load_non_intra_quantiser_matrix) {
1036                 qm = gen_iq_matrix->non_intra_quantiser_matrix;
1037                 qm_type = MFX_QM_MPEG_NON_INTRA_QUANTIZER_MATRIX;
1038             }
1039         }
1040
1041         if (!qm)
1042             continue;
1043
1044         gen7_mfd_qm_state(ctx, qm_type, qm, 64, gen7_mfd_context);
1045     }
1046 }
1047
1048 uint32_t mpeg2_get_slice_data_length(dri_bo *slice_data_bo, VASliceParameterBufferMPEG2 *slice_param)
1049 {
1050     uint8_t *buf;
1051     uint32_t buf_offset = slice_param->slice_data_offset + (slice_param->macroblock_offset >> 3);
1052     uint32_t buf_size = slice_param->slice_data_size - (slice_param->macroblock_offset >> 3);
1053     uint32_t i = 0;
1054
1055     dri_bo_map(slice_data_bo, 0);
1056     buf = (uint8_t *)slice_data_bo->virtual + buf_offset;
1057
1058     if (buf_size < 4)
1059         return buf_size;
1060
1061     while (i <= (buf_size - 4)) {
1062         if (buf[i + 2] > 1) {
1063             i += 3;
1064         } else if (buf[i + 1]) {
1065             i += 2;
1066         } else if (buf[i] || buf[i + 2] != 1) {
1067             i++;
1068         } else {
1069             break;
1070         }
1071     }
1072
1073     if (i <= (buf_size - 4))
1074         buf_size = i;
1075
1076     dri_bo_unmap(slice_data_bo);
1077     return buf_size;
1078 }
1079
1080 static void
1081 gen7_mfd_mpeg2_bsd_object(VADriverContextP ctx,
1082                           VAPictureParameterBufferMPEG2 *pic_param,
1083                           VASliceParameterBufferMPEG2 *slice_param,
1084                           dri_bo *slice_data_bo,
1085                           VASliceParameterBufferMPEG2 *next_slice_param,
1086                           struct gen7_mfd_context *gen7_mfd_context)
1087 {
1088     struct i965_driver_data * const i965 = i965_driver_data(ctx);
1089     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1090     unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1091     int mb_count, vpos0, hpos0, vpos1, hpos1, is_field_pic_wa, is_field_pic = 0;
1092
1093     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_TOP_FIELD ||
1094         pic_param->picture_coding_extension.bits.picture_structure == MPEG_BOTTOM_FIELD)
1095         is_field_pic = 1;
1096     is_field_pic_wa = is_field_pic &&
1097                       gen7_mfd_context->wa_mpeg2_slice_vertical_position > 0;
1098
1099     vpos0 = slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1100     hpos0 = slice_param->slice_horizontal_position;
1101
1102     if (next_slice_param == NULL) {
1103         vpos1 = ALIGN(pic_param->vertical_size, 16) / 16 / (1 + is_field_pic);
1104         hpos1 = 0;
1105     } else {
1106         vpos1 = next_slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1107         hpos1 = next_slice_param->slice_horizontal_position;
1108     }
1109
1110     mb_count = (vpos1 * width_in_mbs + hpos1) - (vpos0 * width_in_mbs + hpos0);
1111
1112     BEGIN_BCS_BATCH(batch, 5);
1113     OUT_BCS_BATCH(batch, MFD_MPEG2_BSD_OBJECT | (5 - 2));
1114     OUT_BCS_BATCH(batch,
1115                   mpeg2_get_slice_data_length(slice_data_bo, slice_param));
1116     OUT_BCS_BATCH(batch,
1117                   slice_param->slice_data_offset + (slice_param->macroblock_offset >> 3));
1118     OUT_BCS_BATCH(batch,
1119                   hpos0 << 24 |
1120                   vpos0 << 16 |
1121                   mb_count << 8 |
1122                   (next_slice_param == NULL) << 5 |
1123                   (next_slice_param == NULL) << 3 |
1124                   (slice_param->macroblock_offset & 0x7));
1125     OUT_BCS_BATCH(batch,
1126                   (slice_param->quantiser_scale_code << 24) |
1127                   (IS_HASWELL(i965->intel.device_info) ? (vpos1 << 8 | hpos1) : 0));
1128     ADVANCE_BCS_BATCH(batch);
1129 }
1130
1131 static void
1132 gen7_mfd_mpeg2_decode_picture(VADriverContextP ctx,
1133                               struct decode_state *decode_state,
1134                               struct gen7_mfd_context *gen7_mfd_context)
1135 {
1136     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1137     VAPictureParameterBufferMPEG2 *pic_param;
1138     VASliceParameterBufferMPEG2 *slice_param, *next_slice_param, *next_slice_group_param;
1139     dri_bo *slice_data_bo;
1140     int i, j;
1141
1142     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1143     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1144
1145     gen7_mfd_mpeg2_decode_init(ctx, decode_state, gen7_mfd_context);
1146     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1147     intel_batchbuffer_emit_mi_flush(batch);
1148     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1149     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1150     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1151     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1152     gen7_mfd_mpeg2_pic_state(ctx, decode_state, gen7_mfd_context);
1153     gen7_mfd_mpeg2_qm_state(ctx, decode_state, gen7_mfd_context);
1154
1155     if (gen7_mfd_context->wa_mpeg2_slice_vertical_position < 0)
1156         gen7_mfd_context->wa_mpeg2_slice_vertical_position =
1157             mpeg2_wa_slice_vertical_position(decode_state, pic_param);
1158
1159     for (j = 0; j < decode_state->num_slice_params; j++) {
1160         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1161         slice_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer;
1162         slice_data_bo = decode_state->slice_datas[j]->bo;
1163         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2, gen7_mfd_context);
1164
1165         if (j == decode_state->num_slice_params - 1)
1166             next_slice_group_param = NULL;
1167         else
1168             next_slice_group_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j + 1]->buffer;
1169
1170         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1171             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1172
1173             if (i < decode_state->slice_params[j]->num_elements - 1)
1174                 next_slice_param = slice_param + 1;
1175             else
1176                 next_slice_param = next_slice_group_param;
1177
1178             gen7_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, slice_data_bo, next_slice_param, gen7_mfd_context);
1179             slice_param++;
1180         }
1181     }
1182
1183     intel_batchbuffer_end_atomic(batch);
1184     intel_batchbuffer_flush(batch);
1185 }
1186
1187 static const int va_to_gen7_vc1_pic_type[5] = {
1188     GEN7_VC1_I_PICTURE,
1189     GEN7_VC1_P_PICTURE,
1190     GEN7_VC1_B_PICTURE,
1191     GEN7_VC1_BI_PICTURE,
1192     GEN7_VC1_P_PICTURE,
1193 };
1194
1195 static const int va_to_gen7_vc1_mv[4] = {
1196     1, /* 1-MV */
1197     2, /* 1-MV half-pel */
1198     3, /* 1-MV half-pef bilinear */
1199     0, /* Mixed MV */
1200 };
1201
1202 static const int b_picture_scale_factor[21] = {
1203     128, 85,  170, 64,  192,
1204     51,  102, 153, 204, 43,
1205     215, 37,  74,  111, 148,
1206     185, 222, 32,  96,  160,
1207     224,
1208 };
1209
1210 static const int va_to_gen7_vc1_condover[3] = {
1211     0,
1212     2,
1213     3
1214 };
1215
1216 static const int va_to_gen7_vc1_profile[4] = {
1217     GEN7_VC1_SIMPLE_PROFILE,
1218     GEN7_VC1_MAIN_PROFILE,
1219     GEN7_VC1_RESERVED_PROFILE,
1220     GEN7_VC1_ADVANCED_PROFILE
1221 };
1222
1223 static void
1224 gen7_mfd_free_vc1_surface(void **data)
1225 {
1226     struct gen7_vc1_surface *gen7_vc1_surface = *data;
1227
1228     if (!gen7_vc1_surface)
1229         return;
1230
1231     dri_bo_unreference(gen7_vc1_surface->dmv);
1232     free(gen7_vc1_surface);
1233     *data = NULL;
1234 }
1235
1236 static void
1237 gen7_mfd_init_vc1_surface(VADriverContextP ctx,
1238                           VAPictureParameterBufferVC1 *pic_param,
1239                           struct object_surface *obj_surface)
1240 {
1241     struct i965_driver_data *i965 = i965_driver_data(ctx);
1242     struct gen7_vc1_surface *gen7_vc1_surface = obj_surface->private_data;
1243     int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1244     int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1245
1246     obj_surface->free_private_data = gen7_mfd_free_vc1_surface;
1247
1248     if (!gen7_vc1_surface) {
1249         gen7_vc1_surface = calloc(sizeof(struct gen7_vc1_surface), 1);
1250         assert(gen7_vc1_surface);
1251         assert((obj_surface->size & 0x3f) == 0);
1252         obj_surface->private_data = gen7_vc1_surface;
1253     }
1254
1255     gen7_vc1_surface->picture_type = pic_param->picture_fields.bits.picture_type;
1256     gen7_vc1_surface->intensity_compensation = 0;
1257     gen7_vc1_surface->luma_scale = 0;
1258     gen7_vc1_surface->luma_shift = 0;
1259
1260     if (gen7_vc1_surface->dmv == NULL) {
1261         gen7_vc1_surface->dmv = dri_bo_alloc(i965->intel.bufmgr,
1262                                              "direct mv w/r buffer",
1263                                              width_in_mbs * height_in_mbs * 64,
1264                                              0x1000);
1265     }
1266 }
1267
1268 static void
1269 gen7_mfd_vc1_decode_init(VADriverContextP ctx,
1270                          struct decode_state *decode_state,
1271                          struct gen7_mfd_context *gen7_mfd_context)
1272 {
1273     VAPictureParameterBufferVC1 *pic_param;
1274     struct i965_driver_data *i965 = i965_driver_data(ctx);
1275     struct object_surface *obj_surface;
1276     dri_bo *bo;
1277     int width_in_mbs;
1278     int picture_type;
1279     int intensity_compensation;
1280
1281     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1282     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1283     width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1284     picture_type = pic_param->picture_fields.bits.picture_type;
1285     intensity_compensation = (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation);
1286
1287     intel_update_vc1_frame_store_index(ctx,
1288                                        decode_state,
1289                                        pic_param,
1290                                        gen7_mfd_context->reference_surface);
1291
1292     /* Forward reference picture */
1293     obj_surface = decode_state->reference_objects[0];
1294     if (pic_param->forward_reference_picture != VA_INVALID_ID &&
1295         obj_surface &&
1296         obj_surface->private_data) {
1297         if (picture_type == 1 && intensity_compensation) { /* P picture */
1298             struct gen7_vc1_surface *gen7_vc1_surface = obj_surface->private_data;
1299
1300             gen7_vc1_surface->intensity_compensation = intensity_compensation;
1301             gen7_vc1_surface->luma_scale = pic_param->luma_scale;
1302             gen7_vc1_surface->luma_shift = pic_param->luma_shift;
1303         }
1304     }
1305
1306     /* Current decoded picture */
1307     obj_surface = decode_state->render_object;
1308     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
1309     gen7_mfd_init_vc1_surface(ctx, pic_param, obj_surface);
1310
1311     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
1312     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
1313     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
1314     gen7_mfd_context->post_deblocking_output.valid = pic_param->entrypoint_fields.bits.loopfilter;
1315
1316     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1317     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1318     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1319     gen7_mfd_context->pre_deblocking_output.valid = !pic_param->entrypoint_fields.bits.loopfilter;
1320
1321     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
1322     bo = dri_bo_alloc(i965->intel.bufmgr,
1323                       "intra row store",
1324                       width_in_mbs * 64,
1325                       0x1000);
1326     assert(bo);
1327     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
1328     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
1329
1330     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
1331     bo = dri_bo_alloc(i965->intel.bufmgr,
1332                       "deblocking filter row store",
1333                       width_in_mbs * 7 * 64,
1334                       0x1000);
1335     assert(bo);
1336     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
1337     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
1338
1339     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1340     bo = dri_bo_alloc(i965->intel.bufmgr,
1341                       "bsd mpc row store",
1342                       width_in_mbs * 96,
1343                       0x1000);
1344     assert(bo);
1345     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1346     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1347
1348     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1349
1350     if (picture_type == GEN7_VC1_SKIPPED_PICTURE)
1351         gen7_mfd_context->bitplane_read_buffer.valid = 1;
1352     else
1353         gen7_mfd_context->bitplane_read_buffer.valid = !!pic_param->bitplane_present.value;
1354     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
1355
1356     if (gen7_mfd_context->bitplane_read_buffer.valid) {
1357         int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1358         int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1359         int bitplane_width = ALIGN(width_in_mbs, 2) / 2;
1360         int src_w, src_h;
1361         uint8_t *src = NULL, *dst = NULL;
1362
1363         bo = dri_bo_alloc(i965->intel.bufmgr,
1364                           "VC-1 Bitplane",
1365                           bitplane_width * height_in_mbs,
1366                           0x1000);
1367         assert(bo);
1368         gen7_mfd_context->bitplane_read_buffer.bo = bo;
1369
1370         dri_bo_map(bo, True);
1371         assert(bo->virtual);
1372         dst = bo->virtual;
1373
1374         if (picture_type == GEN7_VC1_SKIPPED_PICTURE) {
1375             for (src_h = 0; src_h < height_in_mbs; src_h++) {
1376                 for (src_w = 0; src_w < width_in_mbs; src_w++) {
1377                     int dst_index;
1378                     uint8_t src_value = 0x2;
1379
1380                     dst_index = src_w / 2;
1381                     dst[dst_index] = ((dst[dst_index] >> 4) | (src_value << 4));
1382                 }
1383
1384                 if (src_w & 1)
1385                     dst[src_w / 2] >>= 4;
1386
1387                 dst += bitplane_width;
1388             }
1389         } else {
1390             assert(decode_state->bit_plane->buffer);
1391             src = decode_state->bit_plane->buffer;
1392
1393             for (src_h = 0; src_h < height_in_mbs; src_h++) {
1394                 for (src_w = 0; src_w < width_in_mbs; src_w++) {
1395                     int src_index, dst_index;
1396                     int src_shift;
1397                     uint8_t src_value;
1398
1399                     src_index = (src_h * width_in_mbs + src_w) / 2;
1400                     src_shift = !((src_h * width_in_mbs + src_w) & 1) * 4;
1401                     src_value = ((src[src_index] >> src_shift) & 0xf);
1402
1403                     dst_index = src_w / 2;
1404                     dst[dst_index] = ((dst[dst_index] >> 4) | (src_value << 4));
1405                 }
1406
1407                 if (src_w & 1)
1408                     dst[src_w / 2] >>= 4;
1409
1410                 dst += bitplane_width;
1411             }
1412         }
1413
1414         dri_bo_unmap(bo);
1415     } else
1416         gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1417 }
1418
1419 static void
1420 gen7_mfd_vc1_pic_state(VADriverContextP ctx,
1421                        struct decode_state *decode_state,
1422                        struct gen7_mfd_context *gen7_mfd_context)
1423 {
1424     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1425     VAPictureParameterBufferVC1 *pic_param;
1426     struct object_surface *obj_surface;
1427     int alt_pquant_config = 0, alt_pquant_edge_mask = 0, alt_pq;
1428     int dquant, dquantfrm, dqprofile, dqdbedge, dqsbedge, dqbilevel;
1429     int unified_mv_mode;
1430     int ref_field_pic_polarity = 0;
1431     int scale_factor = 0;
1432     int trans_ac_y = 0;
1433     int dmv_surface_valid = 0;
1434     int brfd = 0;
1435     int fcm = 0;
1436     int picture_type;
1437     int profile;
1438     int overlap;
1439     int interpolation_mode = 0;
1440
1441     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1442     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1443
1444     profile = va_to_gen7_vc1_profile[pic_param->sequence_fields.bits.profile];
1445     dquant = pic_param->pic_quantizer_fields.bits.dquant;
1446     dquantfrm = pic_param->pic_quantizer_fields.bits.dq_frame;
1447     dqprofile = pic_param->pic_quantizer_fields.bits.dq_profile;
1448     dqdbedge = pic_param->pic_quantizer_fields.bits.dq_db_edge;
1449     dqsbedge = pic_param->pic_quantizer_fields.bits.dq_sb_edge;
1450     dqbilevel = pic_param->pic_quantizer_fields.bits.dq_binary_level;
1451     alt_pq = pic_param->pic_quantizer_fields.bits.alt_pic_quantizer;
1452
1453     if (dquant == 0) {
1454         alt_pquant_config = 0;
1455         alt_pquant_edge_mask = 0;
1456     } else if (dquant == 2) {
1457         alt_pquant_config = 1;
1458         alt_pquant_edge_mask = 0xf;
1459     } else {
1460         assert(dquant == 1);
1461         if (dquantfrm == 0) {
1462             alt_pquant_config = 0;
1463             alt_pquant_edge_mask = 0;
1464             alt_pq = 0;
1465         } else {
1466             assert(dquantfrm == 1);
1467             alt_pquant_config = 1;
1468
1469             switch (dqprofile) {
1470             case 3:
1471                 if (dqbilevel == 0) {
1472                     alt_pquant_config = 2;
1473                     alt_pquant_edge_mask = 0;
1474                 } else {
1475                     assert(dqbilevel == 1);
1476                     alt_pquant_config = 3;
1477                     alt_pquant_edge_mask = 0;
1478                 }
1479                 break;
1480
1481             case 0:
1482                 alt_pquant_edge_mask = 0xf;
1483                 break;
1484
1485             case 1:
1486                 if (dqdbedge == 3)
1487                     alt_pquant_edge_mask = 0x9;
1488                 else
1489                     alt_pquant_edge_mask = (0x3 << dqdbedge);
1490
1491                 break;
1492
1493             case 2:
1494                 alt_pquant_edge_mask = (0x1 << dqsbedge);
1495                 break;
1496
1497             default:
1498                 assert(0);
1499             }
1500         }
1501     }
1502
1503     if (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation) {
1504         assert(pic_param->mv_fields.bits.mv_mode2 < 4);
1505         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode2];
1506     } else {
1507         assert(pic_param->mv_fields.bits.mv_mode < 4);
1508         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode];
1509     }
1510
1511     if (pic_param->sequence_fields.bits.interlace == 1 &&
1512         pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1513         /* FIXME: calculate reference field picture polarity */
1514         assert(0);
1515         ref_field_pic_polarity = 0;
1516     }
1517
1518     if (pic_param->b_picture_fraction < 21)
1519         scale_factor = b_picture_scale_factor[pic_param->b_picture_fraction];
1520
1521     picture_type = va_to_gen7_vc1_pic_type[pic_param->picture_fields.bits.picture_type];
1522
1523     if (profile == GEN7_VC1_ADVANCED_PROFILE &&
1524         picture_type == GEN7_VC1_I_PICTURE)
1525         picture_type = GEN7_VC1_BI_PICTURE;
1526
1527     if (picture_type == GEN7_VC1_I_PICTURE || picture_type == GEN7_VC1_BI_PICTURE) /* I picture */
1528         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx2;
1529     else {
1530         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx1;
1531         /*
1532          * 8.3.6.2.1 Transform Type Selection
1533          * If variable-sized transform coding is not enabled,
1534          * then the 8x8 transform shall be used for all blocks.
1535          * it is also MFX_VC1_PIC_STATE requirement.
1536          */
1537         if (pic_param->transform_fields.bits.variable_sized_transform_flag == 0) {
1538             pic_param->transform_fields.bits.mb_level_transform_type_flag   = 1;
1539             pic_param->transform_fields.bits.frame_level_transform_type     = 0;
1540         }
1541     }
1542
1543
1544     if (picture_type == GEN7_VC1_B_PICTURE) {
1545         struct gen7_vc1_surface *gen7_vc1_surface = NULL;
1546
1547         obj_surface = decode_state->reference_objects[1];
1548
1549         if (obj_surface)
1550             gen7_vc1_surface = obj_surface->private_data;
1551
1552         if (!gen7_vc1_surface ||
1553             (va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_I_PICTURE ||
1554              va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_BI_PICTURE))
1555             dmv_surface_valid = 0;
1556         else
1557             dmv_surface_valid = 1;
1558     }
1559
1560     assert(pic_param->picture_fields.bits.frame_coding_mode < 3);
1561
1562     if (pic_param->picture_fields.bits.frame_coding_mode < 2)
1563         fcm = pic_param->picture_fields.bits.frame_coding_mode;
1564     else {
1565         if (pic_param->picture_fields.bits.top_field_first)
1566             fcm = 2;
1567         else
1568             fcm = 3;
1569     }
1570
1571     if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_B_PICTURE) { /* B picture */
1572         brfd = pic_param->reference_fields.bits.reference_distance;
1573         brfd = (scale_factor * brfd) >> 8;
1574         brfd = pic_param->reference_fields.bits.reference_distance - brfd - 1;
1575
1576         if (brfd < 0)
1577             brfd = 0;
1578     }
1579
1580     overlap = pic_param->sequence_fields.bits.overlap;
1581
1582     if (overlap) {
1583         overlap = 0;
1584         if (profile != GEN7_VC1_ADVANCED_PROFILE) {
1585             if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9 &&
1586                 pic_param->picture_fields.bits.picture_type != GEN7_VC1_B_PICTURE) {
1587                 overlap = 1;
1588             }
1589         } else {
1590             if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_P_PICTURE &&
1591                 pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9) {
1592                 overlap = 1;
1593             }
1594             if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_I_PICTURE ||
1595                 pic_param->picture_fields.bits.picture_type == GEN7_VC1_BI_PICTURE) {
1596                 if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9) {
1597                     overlap = 1;
1598                 } else if (va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 2 ||
1599                            va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 3) {
1600                     overlap = 1;
1601                 }
1602             }
1603         }
1604     }
1605
1606     assert(pic_param->conditional_overlap_flag < 3);
1607     assert(pic_param->mv_fields.bits.mv_table < 4); /* FIXME: interlace mode */
1608
1609     if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPelBilinear ||
1610         (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1611          pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPelBilinear))
1612         interpolation_mode = 9; /* Half-pel bilinear */
1613     else if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPel ||
1614              (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1615               pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPel))
1616         interpolation_mode = 1; /* Half-pel bicubic */
1617     else
1618         interpolation_mode = 0; /* Quarter-pel bicubic */
1619
1620     BEGIN_BCS_BATCH(batch, 6);
1621     OUT_BCS_BATCH(batch, MFD_VC1_LONG_PIC_STATE | (6 - 2));
1622     OUT_BCS_BATCH(batch,
1623                   (((ALIGN(pic_param->coded_height, 16) / 16) - 1) << 16) |
1624                   ((ALIGN(pic_param->coded_width, 16) / 16) - 1));
1625     OUT_BCS_BATCH(batch,
1626                   ((ALIGN(pic_param->coded_width, 16) / 16 + 1) / 2 - 1) << 24 |
1627                   dmv_surface_valid << 15 |
1628                   (pic_param->pic_quantizer_fields.bits.quantizer == 0) << 14 | /* implicit quantizer */
1629                   pic_param->rounding_control << 13 |
1630                   pic_param->sequence_fields.bits.syncmarker << 12 |
1631                   interpolation_mode << 8 |
1632                   0 << 7 | /* FIXME: scale up or down ??? */
1633                   pic_param->range_reduction_frame << 6 |
1634                   pic_param->entrypoint_fields.bits.loopfilter << 5 |
1635                   overlap << 4 |
1636                   !pic_param->picture_fields.bits.is_first_field << 3 |
1637                   (pic_param->sequence_fields.bits.profile == 3) << 0);
1638     OUT_BCS_BATCH(batch,
1639                   va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] << 29 |
1640                   picture_type << 26 |
1641                   fcm << 24 |
1642                   alt_pq << 16 |
1643                   pic_param->pic_quantizer_fields.bits.pic_quantizer_scale << 8 |
1644                   scale_factor << 0);
1645     OUT_BCS_BATCH(batch,
1646                   unified_mv_mode << 28 |
1647                   pic_param->mv_fields.bits.four_mv_switch << 27 |
1648                   pic_param->fast_uvmc_flag << 26 |
1649                   ref_field_pic_polarity << 25 |
1650                   pic_param->reference_fields.bits.num_reference_pictures << 24 |
1651                   pic_param->reference_fields.bits.reference_distance << 20 |
1652                   pic_param->reference_fields.bits.reference_distance << 16 | /* FIXME: ??? */
1653                   pic_param->mv_fields.bits.extended_dmv_range << 10 |
1654                   pic_param->mv_fields.bits.extended_mv_range << 8 |
1655                   alt_pquant_edge_mask << 4 |
1656                   alt_pquant_config << 2 |
1657                   pic_param->pic_quantizer_fields.bits.half_qp << 1 |
1658                   pic_param->pic_quantizer_fields.bits.pic_quantizer_type << 0);
1659     OUT_BCS_BATCH(batch,
1660                   !!pic_param->bitplane_present.value << 31 |
1661                   !pic_param->bitplane_present.flags.bp_forward_mb << 30 |
1662                   !pic_param->bitplane_present.flags.bp_mv_type_mb << 29 |
1663                   !pic_param->bitplane_present.flags.bp_skip_mb << 28 |
1664                   !pic_param->bitplane_present.flags.bp_direct_mb << 27 |
1665                   !pic_param->bitplane_present.flags.bp_overflags << 26 |
1666                   !pic_param->bitplane_present.flags.bp_ac_pred << 25 |
1667                   !pic_param->bitplane_present.flags.bp_field_tx << 24 |
1668                   pic_param->mv_fields.bits.mv_table << 20 |
1669                   pic_param->mv_fields.bits.four_mv_block_pattern_table << 18 |
1670                   pic_param->mv_fields.bits.two_mv_block_pattern_table << 16 |
1671                   pic_param->transform_fields.bits.frame_level_transform_type << 12 |
1672                   pic_param->transform_fields.bits.mb_level_transform_type_flag << 11 |
1673                   pic_param->mb_mode_table << 8 |
1674                   trans_ac_y << 6 |
1675                   pic_param->transform_fields.bits.transform_ac_codingset_idx1 << 4 |
1676                   pic_param->transform_fields.bits.intra_transform_dc_table << 3 |
1677                   pic_param->cbp_table << 0);
1678     ADVANCE_BCS_BATCH(batch);
1679 }
1680
1681 static void
1682 gen7_mfd_vc1_pred_pipe_state(VADriverContextP ctx,
1683                              struct decode_state *decode_state,
1684                              struct gen7_mfd_context *gen7_mfd_context)
1685 {
1686     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1687     VAPictureParameterBufferVC1 *pic_param;
1688     int picture_type;
1689     int intensitycomp_single_fwd = 0;
1690     int luma_scale1 = 0;
1691     int luma_shift1 = 0;
1692
1693     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1694     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1695     picture_type = pic_param->picture_fields.bits.picture_type;
1696
1697     if (gen7_mfd_context->reference_surface[0].surface_id != VA_INVALID_ID) {
1698         if (picture_type == 1 || picture_type == 2) { /* P/B picture */
1699             struct gen7_vc1_surface *gen7_vc1_surface = gen7_mfd_context->reference_surface[0].obj_surface->private_data;
1700             if (gen7_vc1_surface) {
1701                 intensitycomp_single_fwd = gen7_vc1_surface->intensity_compensation;
1702                 luma_scale1 = gen7_vc1_surface->luma_scale;
1703                 luma_shift1 = gen7_vc1_surface->luma_shift;
1704             }
1705         }
1706     }
1707
1708     BEGIN_BCS_BATCH(batch, 6);
1709     OUT_BCS_BATCH(batch, MFX_VC1_PRED_PIPE_STATE | (6 - 2));
1710     OUT_BCS_BATCH(batch,
1711                   0 << 14 | /* FIXME: double ??? */
1712                   0 << 12 |
1713                   intensitycomp_single_fwd << 10 |
1714                   0 << 8 |
1715                   0 << 4 | /* FIXME: interlace mode */
1716                   0);
1717     OUT_BCS_BATCH(batch,
1718                   luma_shift1 << 16 |
1719                   luma_scale1 << 0);
1720     OUT_BCS_BATCH(batch, 0);
1721     OUT_BCS_BATCH(batch, 0);
1722     OUT_BCS_BATCH(batch, 0);
1723     ADVANCE_BCS_BATCH(batch);
1724 }
1725
1726
1727 static void
1728 gen7_mfd_vc1_directmode_state(VADriverContextP ctx,
1729                               struct decode_state *decode_state,
1730                               struct gen7_mfd_context *gen7_mfd_context)
1731 {
1732     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1733     struct object_surface *obj_surface;
1734     dri_bo *dmv_read_buffer = NULL, *dmv_write_buffer = NULL;
1735
1736     obj_surface = decode_state->render_object;
1737
1738     if (obj_surface && obj_surface->private_data) {
1739         dmv_write_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1740     }
1741
1742     obj_surface = decode_state->reference_objects[1];
1743
1744     if (obj_surface && obj_surface->private_data) {
1745         dmv_read_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1746     }
1747
1748     BEGIN_BCS_BATCH(batch, 3);
1749     OUT_BCS_BATCH(batch, MFX_VC1_DIRECTMODE_STATE | (3 - 2));
1750
1751     if (dmv_write_buffer)
1752         OUT_BCS_RELOC(batch, dmv_write_buffer,
1753                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
1754                       0);
1755     else
1756         OUT_BCS_BATCH(batch, 0);
1757
1758     if (dmv_read_buffer)
1759         OUT_BCS_RELOC(batch, dmv_read_buffer,
1760                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1761                       0);
1762     else
1763         OUT_BCS_BATCH(batch, 0);
1764
1765     ADVANCE_BCS_BATCH(batch);
1766 }
1767
1768 static int
1769 gen7_mfd_vc1_get_macroblock_bit_offset(uint8_t *buf, int in_slice_data_bit_offset, int profile)
1770 {
1771     int out_slice_data_bit_offset;
1772     int slice_header_size = in_slice_data_bit_offset / 8;
1773     int i, j;
1774
1775     if (profile != 3)
1776         out_slice_data_bit_offset = in_slice_data_bit_offset;
1777     else {
1778         for (i = 0, j = 0; i < slice_header_size; i++, j++) {
1779             if (!buf[j] && !buf[j + 1] && buf[j + 2] == 3 && buf[j + 3] < 4) {
1780                 if (i < slice_header_size - 1)
1781                     i++, j += 2;
1782                 else {
1783                     buf[j + 2] = buf[j + 1];
1784                     j++;
1785                 }
1786             }
1787         }
1788
1789         out_slice_data_bit_offset = 8 * j + in_slice_data_bit_offset % 8;
1790     }
1791
1792     return out_slice_data_bit_offset;
1793 }
1794
1795 static void
1796 gen7_mfd_vc1_bsd_object(VADriverContextP ctx,
1797                         VAPictureParameterBufferVC1 *pic_param,
1798                         VASliceParameterBufferVC1 *slice_param,
1799                         VASliceParameterBufferVC1 *next_slice_param,
1800                         dri_bo *slice_data_bo,
1801                         struct gen7_mfd_context *gen7_mfd_context)
1802 {
1803     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1804     int next_slice_start_vert_pos;
1805     int macroblock_offset;
1806     uint8_t *slice_data = NULL;
1807
1808     dri_bo_map(slice_data_bo, True);
1809     slice_data = (uint8_t *)(slice_data_bo->virtual + slice_param->slice_data_offset);
1810     macroblock_offset = gen7_mfd_vc1_get_macroblock_bit_offset(slice_data,
1811                                                                slice_param->macroblock_offset,
1812                                                                pic_param->sequence_fields.bits.profile);
1813     dri_bo_unmap(slice_data_bo);
1814
1815     if (next_slice_param)
1816         next_slice_start_vert_pos = next_slice_param->slice_vertical_position;
1817     else
1818         next_slice_start_vert_pos = ALIGN(pic_param->coded_height, 16) / 16;
1819
1820     BEGIN_BCS_BATCH(batch, 5);
1821     OUT_BCS_BATCH(batch, MFD_VC1_BSD_OBJECT | (5 - 2));
1822     OUT_BCS_BATCH(batch,
1823                   slice_param->slice_data_size - (macroblock_offset >> 3));
1824     OUT_BCS_BATCH(batch,
1825                   slice_param->slice_data_offset + (macroblock_offset >> 3));
1826     OUT_BCS_BATCH(batch,
1827                   slice_param->slice_vertical_position << 16 |
1828                   next_slice_start_vert_pos << 0);
1829     OUT_BCS_BATCH(batch,
1830                   (macroblock_offset & 0x7));
1831     ADVANCE_BCS_BATCH(batch);
1832 }
1833
1834 static void
1835 gen7_mfd_vc1_decode_picture(VADriverContextP ctx,
1836                             struct decode_state *decode_state,
1837                             struct gen7_mfd_context *gen7_mfd_context)
1838 {
1839     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1840     VAPictureParameterBufferVC1 *pic_param;
1841     VASliceParameterBufferVC1 *slice_param, *next_slice_param, *next_slice_group_param;
1842     dri_bo *slice_data_bo;
1843     int i, j;
1844
1845     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1846     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1847
1848     gen7_mfd_vc1_decode_init(ctx, decode_state, gen7_mfd_context);
1849     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1850     intel_batchbuffer_emit_mi_flush(batch);
1851     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1852     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1853     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1854     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1855     gen7_mfd_vc1_pic_state(ctx, decode_state, gen7_mfd_context);
1856     gen7_mfd_vc1_pred_pipe_state(ctx, decode_state, gen7_mfd_context);
1857     gen7_mfd_vc1_directmode_state(ctx, decode_state, gen7_mfd_context);
1858
1859     for (j = 0; j < decode_state->num_slice_params; j++) {
1860         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1861         slice_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j]->buffer;
1862         slice_data_bo = decode_state->slice_datas[j]->bo;
1863         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1, gen7_mfd_context);
1864
1865         if (j == decode_state->num_slice_params - 1)
1866             next_slice_group_param = NULL;
1867         else
1868             next_slice_group_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j + 1]->buffer;
1869
1870         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1871             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1872
1873             if (i < decode_state->slice_params[j]->num_elements - 1)
1874                 next_slice_param = slice_param + 1;
1875             else
1876                 next_slice_param = next_slice_group_param;
1877
1878             gen7_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
1879             slice_param++;
1880         }
1881     }
1882
1883     intel_batchbuffer_end_atomic(batch);
1884     intel_batchbuffer_flush(batch);
1885 }
1886
1887 static void
1888 gen7_mfd_jpeg_decode_init(VADriverContextP ctx,
1889                           struct decode_state *decode_state,
1890                           struct gen7_mfd_context *gen7_mfd_context)
1891 {
1892     struct object_surface *obj_surface;
1893     VAPictureParameterBufferJPEGBaseline *pic_param;
1894     int subsampling = SUBSAMPLE_YUV420;
1895     int fourcc = VA_FOURCC_IMC3;
1896
1897     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
1898
1899     if (pic_param->num_components == 1) {
1900         subsampling = SUBSAMPLE_YUV400;
1901         fourcc = VA_FOURCC_Y800;
1902     } else if (pic_param->num_components == 3) {
1903         int h1 = pic_param->components[0].h_sampling_factor;
1904         int h2 = pic_param->components[1].h_sampling_factor;
1905         int h3 = pic_param->components[2].h_sampling_factor;
1906         int v1 = pic_param->components[0].v_sampling_factor;
1907         int v2 = pic_param->components[1].v_sampling_factor;
1908         int v3 = pic_param->components[2].v_sampling_factor;
1909
1910         if (h1 == 2 * h2 && h2 == h3 &&
1911             v1 == 2 * v2 && v2 == v3) {
1912             subsampling = SUBSAMPLE_YUV420;
1913             fourcc = VA_FOURCC_IMC3;
1914         } else if (h1 == 2 * h2  && h2 == h3 &&
1915                    v1 == v2 && v2 == v3) {
1916             subsampling = SUBSAMPLE_YUV422H;
1917             fourcc = VA_FOURCC_422H;
1918         } else if (h1 == h2 && h2 == h3 &&
1919                    v1 == v2  && v2 == v3) {
1920             subsampling = SUBSAMPLE_YUV444;
1921             fourcc = VA_FOURCC_444P;
1922         } else if (h1 == 4 * h2 && h2 ==  h3 &&
1923                    v1 == v2 && v2 == v3) {
1924             subsampling = SUBSAMPLE_YUV411;
1925             fourcc = VA_FOURCC_411P;
1926         } else if (h1 == h2 && h2 == h3 &&
1927                    v1 == 2 * v2 && v2 == v3) {
1928             subsampling = SUBSAMPLE_YUV422V;
1929             fourcc = VA_FOURCC_422V;
1930         } else
1931             assert(0);
1932     } else {
1933         assert(0);
1934     }
1935
1936     /* Current decoded picture */
1937     obj_surface = decode_state->render_object;
1938     i965_check_alloc_surface_bo(ctx, obj_surface, 1, fourcc, subsampling);
1939
1940     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1941     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1942     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1943     gen7_mfd_context->pre_deblocking_output.valid = 1;
1944
1945     gen7_mfd_context->post_deblocking_output.bo = NULL;
1946     gen7_mfd_context->post_deblocking_output.valid = 0;
1947
1948     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
1949     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
1950
1951     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1952     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
1953
1954     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1955     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 0;
1956
1957     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
1958     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1959
1960     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1961     gen7_mfd_context->bitplane_read_buffer.valid = 0;
1962 }
1963
1964 static const int va_to_gen7_jpeg_rotation[4] = {
1965     GEN7_JPEG_ROTATION_0,
1966     GEN7_JPEG_ROTATION_90,
1967     GEN7_JPEG_ROTATION_180,
1968     GEN7_JPEG_ROTATION_270
1969 };
1970
1971 static void
1972 gen7_mfd_jpeg_pic_state(VADriverContextP ctx,
1973                         struct decode_state *decode_state,
1974                         struct gen7_mfd_context *gen7_mfd_context)
1975 {
1976     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1977     VAPictureParameterBufferJPEGBaseline *pic_param;
1978     int chroma_type = GEN7_YUV420;
1979     int frame_width_in_blks;
1980     int frame_height_in_blks;
1981
1982     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1983     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
1984
1985     if (pic_param->num_components == 1)
1986         chroma_type = GEN7_YUV400;
1987     else if (pic_param->num_components == 3) {
1988         int h1 = pic_param->components[0].h_sampling_factor;
1989         int h2 = pic_param->components[1].h_sampling_factor;
1990         int h3 = pic_param->components[2].h_sampling_factor;
1991         int v1 = pic_param->components[0].v_sampling_factor;
1992         int v2 = pic_param->components[1].v_sampling_factor;
1993         int v3 = pic_param->components[2].v_sampling_factor;
1994
1995         if (h1 == 2 * h2 && h2 == h3 &&
1996             v1 == 2 * v2 && v2 == v3)
1997             chroma_type = GEN7_YUV420;
1998         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1999                  v1 == 1 && v2 == 1 && v3 == 1)
2000             chroma_type = GEN7_YUV422H_2Y;
2001         else if (h1 == h2 && h2 == h3 &&
2002                  v1 == v2 && v2 == v3)
2003             chroma_type = GEN7_YUV444;
2004         else if (h1 == 4 * h2 && h2 == h3 &&
2005                  v1 == v2 && v2 == v3)
2006             chroma_type = GEN7_YUV411;
2007         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2008                  v1 == 2 && v2 == 1 && v3 == 1)
2009             chroma_type = GEN7_YUV422V_2Y;
2010         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2011                  v1 == 2 && v2 == 2 && v3 == 2)
2012             chroma_type = GEN7_YUV422H_4Y;
2013         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
2014                  v1 == 2 && v2 == 1 && v3 == 1)
2015             chroma_type = GEN7_YUV422V_4Y;
2016         else
2017             assert(0);
2018     }
2019
2020     if (chroma_type == GEN7_YUV400 ||
2021         chroma_type == GEN7_YUV444 ||
2022         chroma_type == GEN7_YUV422V_2Y) {
2023         frame_width_in_blks = ((pic_param->picture_width + 7) / 8);
2024         frame_height_in_blks = ((pic_param->picture_height + 7) / 8);
2025     } else if (chroma_type == GEN7_YUV411) {
2026         frame_width_in_blks = ((pic_param->picture_width + 31) / 32) * 4;
2027         frame_height_in_blks = ((pic_param->picture_height + 31) / 32) * 4;
2028     } else {
2029         frame_width_in_blks = ((pic_param->picture_width + 15) / 16) * 2;
2030         frame_height_in_blks = ((pic_param->picture_height + 15) / 16) * 2;
2031     }
2032
2033     BEGIN_BCS_BATCH(batch, 3);
2034     OUT_BCS_BATCH(batch, MFX_JPEG_PIC_STATE | (3 - 2));
2035     OUT_BCS_BATCH(batch,
2036                   (va_to_gen7_jpeg_rotation[0] << 4) |    /* without rotation */
2037                   (chroma_type << 0));
2038     OUT_BCS_BATCH(batch,
2039                   ((frame_height_in_blks - 1) << 16) |   /* FrameHeightInBlks */
2040                   ((frame_width_in_blks - 1) << 0));    /* FrameWidthInBlks */
2041     ADVANCE_BCS_BATCH(batch);
2042 }
2043
2044 static const int va_to_gen7_jpeg_hufftable[2] = {
2045     MFX_HUFFTABLE_ID_Y,
2046     MFX_HUFFTABLE_ID_UV
2047 };
2048
2049 static void
2050 gen7_mfd_jpeg_huff_table_state(VADriverContextP ctx,
2051                                struct decode_state *decode_state,
2052                                struct gen7_mfd_context *gen7_mfd_context,
2053                                int num_tables)
2054 {
2055     VAHuffmanTableBufferJPEGBaseline *huffman_table;
2056     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2057     int index;
2058
2059     if (!decode_state->huffman_table || !decode_state->huffman_table->buffer)
2060         return;
2061
2062     huffman_table = (VAHuffmanTableBufferJPEGBaseline *)decode_state->huffman_table->buffer;
2063
2064     for (index = 0; index < num_tables; index++) {
2065         int id = va_to_gen7_jpeg_hufftable[index];
2066         if (!huffman_table->load_huffman_table[index])
2067             continue;
2068         BEGIN_BCS_BATCH(batch, 53);
2069         OUT_BCS_BATCH(batch, MFX_JPEG_HUFF_TABLE_STATE | (53 - 2));
2070         OUT_BCS_BATCH(batch, id);
2071         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_dc_codes, 12);
2072         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].dc_values, 12);
2073         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_ac_codes, 16);
2074         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].ac_values, 164);
2075         ADVANCE_BCS_BATCH(batch);
2076     }
2077 }
2078
2079 static const int va_to_gen7_jpeg_qm[5] = {
2080     -1,
2081     MFX_QM_JPEG_LUMA_Y_QUANTIZER_MATRIX,
2082     MFX_QM_JPEG_CHROMA_CB_QUANTIZER_MATRIX,
2083     MFX_QM_JPEG_CHROMA_CR_QUANTIZER_MATRIX,
2084     MFX_QM_JPEG_ALPHA_QUANTIZER_MATRIX
2085 };
2086
2087 static void
2088 gen7_mfd_jpeg_qm_state(VADriverContextP ctx,
2089                        struct decode_state *decode_state,
2090                        struct gen7_mfd_context *gen7_mfd_context)
2091 {
2092     VAPictureParameterBufferJPEGBaseline *pic_param;
2093     VAIQMatrixBufferJPEGBaseline *iq_matrix;
2094     int index;
2095
2096     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
2097         return;
2098
2099     iq_matrix = (VAIQMatrixBufferJPEGBaseline *)decode_state->iq_matrix->buffer;
2100     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2101
2102     assert(pic_param->num_components <= 3);
2103
2104     for (index = 0; index < pic_param->num_components; index++) {
2105         int id = pic_param->components[index].component_id - pic_param->components[0].component_id + 1;
2106         int qm_type;
2107         unsigned char *qm = iq_matrix->quantiser_table[pic_param->components[index].quantiser_table_selector];
2108         unsigned char raster_qm[64];
2109         int j;
2110
2111         if (id > 4 || id < 1)
2112             continue;
2113
2114         if (!iq_matrix->load_quantiser_table[pic_param->components[index].quantiser_table_selector])
2115             continue;
2116
2117         qm_type = va_to_gen7_jpeg_qm[id];
2118
2119         for (j = 0; j < 64; j++)
2120             raster_qm[zigzag_direct[j]] = qm[j];
2121
2122         gen7_mfd_qm_state(ctx, qm_type, raster_qm, 64, gen7_mfd_context);
2123     }
2124 }
2125
2126 static void
2127 gen7_mfd_jpeg_bsd_object(VADriverContextP ctx,
2128                          VAPictureParameterBufferJPEGBaseline *pic_param,
2129                          VASliceParameterBufferJPEGBaseline *slice_param,
2130                          VASliceParameterBufferJPEGBaseline *next_slice_param,
2131                          dri_bo *slice_data_bo,
2132                          struct gen7_mfd_context *gen7_mfd_context)
2133 {
2134     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2135     int scan_component_mask = 0;
2136     int i;
2137
2138     assert(slice_param->num_components > 0);
2139     assert(slice_param->num_components < 4);
2140     assert(slice_param->num_components <= pic_param->num_components);
2141
2142     for (i = 0; i < slice_param->num_components; i++) {
2143         switch (slice_param->components[i].component_selector - pic_param->components[0].component_id + 1) {
2144         case 1:
2145             scan_component_mask |= (1 << 0);
2146             break;
2147         case 2:
2148             scan_component_mask |= (1 << 1);
2149             break;
2150         case 3:
2151             scan_component_mask |= (1 << 2);
2152             break;
2153         default:
2154             assert(0);
2155             break;
2156         }
2157     }
2158
2159     BEGIN_BCS_BATCH(batch, 6);
2160     OUT_BCS_BATCH(batch, MFD_JPEG_BSD_OBJECT | (6 - 2));
2161     OUT_BCS_BATCH(batch,
2162                   slice_param->slice_data_size);
2163     OUT_BCS_BATCH(batch,
2164                   slice_param->slice_data_offset);
2165     OUT_BCS_BATCH(batch,
2166                   slice_param->slice_horizontal_position << 16 |
2167                   slice_param->slice_vertical_position << 0);
2168     OUT_BCS_BATCH(batch,
2169                   ((slice_param->num_components != 1) << 30) |  /* interleaved */
2170                   (scan_component_mask << 27) |                 /* scan components */
2171                   (0 << 26) |   /* disable interrupt allowed */
2172                   (slice_param->num_mcus << 0));                /* MCU count */
2173     OUT_BCS_BATCH(batch,
2174                   (slice_param->restart_interval << 0));    /* RestartInterval */
2175     ADVANCE_BCS_BATCH(batch);
2176 }
2177
2178 /* Workaround for JPEG decoding on Ivybridge */
2179
2180 static struct {
2181     int width;
2182     int height;
2183     unsigned char data[32];
2184     int data_size;
2185     int data_bit_offset;
2186     int qp;
2187 } gen7_jpeg_wa_clip = {
2188     16,
2189     16,
2190     {
2191         0x65, 0xb8, 0x40, 0x32, 0x13, 0xfd, 0x06, 0x6c,
2192         0xfc, 0x0a, 0x50, 0x71, 0x5c, 0x00
2193     },
2194     14,
2195     40,
2196     28,
2197 };
2198
2199 static void
2200 gen7_jpeg_wa_init(VADriverContextP ctx,
2201                   struct gen7_mfd_context *gen7_mfd_context)
2202 {
2203     struct i965_driver_data *i965 = i965_driver_data(ctx);
2204     VAStatus status;
2205     struct object_surface *obj_surface;
2206
2207     if (gen7_mfd_context->jpeg_wa_surface_id != VA_INVALID_SURFACE)
2208         i965_DestroySurfaces(ctx,
2209                              &gen7_mfd_context->jpeg_wa_surface_id,
2210                              1);
2211
2212     status = i965_CreateSurfaces(ctx,
2213                                  gen7_jpeg_wa_clip.width,
2214                                  gen7_jpeg_wa_clip.height,
2215                                  VA_RT_FORMAT_YUV420,
2216                                  1,
2217                                  &gen7_mfd_context->jpeg_wa_surface_id);
2218     assert(status == VA_STATUS_SUCCESS);
2219
2220     obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2221     assert(obj_surface);
2222     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
2223     gen7_mfd_context->jpeg_wa_surface_object = obj_surface;
2224
2225     if (!gen7_mfd_context->jpeg_wa_slice_data_bo) {
2226         gen7_mfd_context->jpeg_wa_slice_data_bo = dri_bo_alloc(i965->intel.bufmgr,
2227                                                                "JPEG WA data",
2228                                                                0x1000,
2229                                                                0x1000);
2230         dri_bo_subdata(gen7_mfd_context->jpeg_wa_slice_data_bo,
2231                        0,
2232                        gen7_jpeg_wa_clip.data_size,
2233                        gen7_jpeg_wa_clip.data);
2234     }
2235 }
2236
2237 static void
2238 gen7_jpeg_wa_pipe_mode_select(VADriverContextP ctx,
2239                               struct gen7_mfd_context *gen7_mfd_context)
2240 {
2241     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2242
2243     BEGIN_BCS_BATCH(batch, 5);
2244     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
2245     OUT_BCS_BATCH(batch,
2246                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
2247                   (MFD_MODE_VLD << 15) | /* VLD mode */
2248                   (0 << 10) | /* disable Stream-Out */
2249                   (0 << 9)  | /* Post Deblocking Output */
2250                   (1 << 8)  | /* Pre Deblocking Output */
2251                   (0 << 5)  | /* not in stitch mode */
2252                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
2253                   (MFX_FORMAT_AVC << 0));
2254     OUT_BCS_BATCH(batch,
2255                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
2256                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
2257                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
2258                   (0 << 1)  |
2259                   (0 << 0));
2260     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */
2261     OUT_BCS_BATCH(batch, 0); /* reserved */
2262     ADVANCE_BCS_BATCH(batch);
2263 }
2264
2265 static void
2266 gen7_jpeg_wa_surface_state(VADriverContextP ctx,
2267                            struct gen7_mfd_context *gen7_mfd_context)
2268 {
2269     struct object_surface *obj_surface = gen7_mfd_context->jpeg_wa_surface_object;
2270     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2271
2272     BEGIN_BCS_BATCH(batch, 6);
2273     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
2274     OUT_BCS_BATCH(batch, 0);
2275     OUT_BCS_BATCH(batch,
2276                   ((obj_surface->orig_width - 1) << 18) |
2277                   ((obj_surface->orig_height - 1) << 4));
2278     OUT_BCS_BATCH(batch,
2279                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
2280                   (1 << 27) | /* interleave chroma, set to 0 for JPEG */
2281                   (0 << 22) | /* surface object control state, ignored */
2282                   ((obj_surface->width - 1) << 3) | /* pitch */
2283                   (0 << 2)  | /* must be 0 */
2284                   (1 << 1)  | /* must be tiled */
2285                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
2286     OUT_BCS_BATCH(batch,
2287                   (0 << 16) | /* X offset for U(Cb), must be 0 */
2288                   (obj_surface->y_cb_offset << 0)); /* Y offset for U(Cb) */
2289     OUT_BCS_BATCH(batch,
2290                   (0 << 16) | /* X offset for V(Cr), must be 0 */
2291                   (0 << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
2292     ADVANCE_BCS_BATCH(batch);
2293 }
2294
2295 static void
2296 gen7_jpeg_wa_pipe_buf_addr_state(VADriverContextP ctx,
2297                                  struct gen7_mfd_context *gen7_mfd_context)
2298 {
2299     struct i965_driver_data *i965 = i965_driver_data(ctx);
2300     struct object_surface *obj_surface = gen7_mfd_context->jpeg_wa_surface_object;
2301     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2302     dri_bo *intra_bo;
2303     int i;
2304
2305     intra_bo = dri_bo_alloc(i965->intel.bufmgr,
2306                             "intra row store",
2307                             128 * 64,
2308                             0x1000);
2309
2310     BEGIN_BCS_BATCH(batch, 24);
2311     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
2312     OUT_BCS_RELOC(batch,
2313                   obj_surface->bo,
2314                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2315                   0);
2316
2317     OUT_BCS_BATCH(batch, 0); /* post deblocking */
2318
2319     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2320     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2321
2322     OUT_BCS_RELOC(batch,
2323                   intra_bo,
2324                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2325                   0);
2326
2327     OUT_BCS_BATCH(batch, 0);
2328
2329     /* DW 7..22 */
2330     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2331         OUT_BCS_BATCH(batch, 0);
2332     }
2333
2334     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
2335     ADVANCE_BCS_BATCH(batch);
2336
2337     dri_bo_unreference(intra_bo);
2338 }
2339
2340 static void
2341 gen7_jpeg_wa_bsp_buf_base_addr_state(VADriverContextP ctx,
2342                                      struct gen7_mfd_context *gen7_mfd_context)
2343 {
2344     struct i965_driver_data *i965 = i965_driver_data(ctx);
2345     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2346     dri_bo *bsd_mpc_bo, *mpr_bo;
2347
2348     bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
2349                               "bsd mpc row store",
2350                               11520, /* 1.5 * 120 * 64 */
2351                               0x1000);
2352
2353     mpr_bo = dri_bo_alloc(i965->intel.bufmgr,
2354                           "mpr row store",
2355                           7680, /* 1. 0 * 120 * 64 */
2356                           0x1000);
2357
2358     BEGIN_BCS_BATCH(batch, 4);
2359     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
2360
2361     OUT_BCS_RELOC(batch,
2362                   bsd_mpc_bo,
2363                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2364                   0);
2365
2366     OUT_BCS_RELOC(batch,
2367                   mpr_bo,
2368                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2369                   0);
2370     OUT_BCS_BATCH(batch, 0);
2371
2372     ADVANCE_BCS_BATCH(batch);
2373
2374     dri_bo_unreference(bsd_mpc_bo);
2375     dri_bo_unreference(mpr_bo);
2376 }
2377
2378 static void
2379 gen7_jpeg_wa_avc_qm_state(VADriverContextP ctx,
2380                           struct gen7_mfd_context *gen7_mfd_context)
2381 {
2382
2383 }
2384
2385 static void
2386 gen7_jpeg_wa_avc_img_state(VADriverContextP ctx,
2387                            struct gen7_mfd_context *gen7_mfd_context)
2388 {
2389     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2390     int img_struct = 0;
2391     int mbaff_frame_flag = 0;
2392     unsigned int width_in_mbs = 1, height_in_mbs = 1;
2393
2394     BEGIN_BCS_BATCH(batch, 16);
2395     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
2396     OUT_BCS_BATCH(batch,
2397                   (width_in_mbs * height_in_mbs - 1));
2398     OUT_BCS_BATCH(batch,
2399                   ((height_in_mbs - 1) << 16) |
2400                   ((width_in_mbs - 1) << 0));
2401     OUT_BCS_BATCH(batch,
2402                   (0 << 24) |
2403                   (0 << 16) |
2404                   (0 << 14) |
2405                   (0 << 13) |
2406                   (0 << 12) | /* differ from GEN6 */
2407                   (0 << 10) |
2408                   (img_struct << 8));
2409     OUT_BCS_BATCH(batch,
2410                   (1 << 10) | /* 4:2:0 */
2411                   (1 << 7) |  /* CABAC */
2412                   (0 << 6) |
2413                   (0 << 5) |
2414                   (0 << 4) |
2415                   (0 << 3) |
2416                   (1 << 2) |
2417                   (mbaff_frame_flag << 1) |
2418                   (0 << 0));
2419     OUT_BCS_BATCH(batch, 0);
2420     OUT_BCS_BATCH(batch, 0);
2421     OUT_BCS_BATCH(batch, 0);
2422     OUT_BCS_BATCH(batch, 0);
2423     OUT_BCS_BATCH(batch, 0);
2424     OUT_BCS_BATCH(batch, 0);
2425     OUT_BCS_BATCH(batch, 0);
2426     OUT_BCS_BATCH(batch, 0);
2427     OUT_BCS_BATCH(batch, 0);
2428     OUT_BCS_BATCH(batch, 0);
2429     OUT_BCS_BATCH(batch, 0);
2430     ADVANCE_BCS_BATCH(batch);
2431 }
2432
2433 static void
2434 gen7_jpeg_wa_avc_directmode_state(VADriverContextP ctx,
2435                                   struct gen7_mfd_context *gen7_mfd_context)
2436 {
2437     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2438     int i;
2439
2440     BEGIN_BCS_BATCH(batch, 69);
2441     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
2442
2443     /* reference surfaces 0..15 */
2444     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2445         OUT_BCS_BATCH(batch, 0); /* top */
2446         OUT_BCS_BATCH(batch, 0); /* bottom */
2447     }
2448
2449     /* the current decoding frame/field */
2450     OUT_BCS_BATCH(batch, 0); /* top */
2451     OUT_BCS_BATCH(batch, 0); /* bottom */
2452
2453     /* POC List */
2454     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2455         OUT_BCS_BATCH(batch, 0);
2456         OUT_BCS_BATCH(batch, 0);
2457     }
2458
2459     OUT_BCS_BATCH(batch, 0);
2460     OUT_BCS_BATCH(batch, 0);
2461
2462     ADVANCE_BCS_BATCH(batch);
2463 }
2464
2465 static void
2466 gen7_jpeg_wa_ind_obj_base_addr_state(VADriverContextP ctx,
2467                                      struct gen7_mfd_context *gen7_mfd_context)
2468 {
2469     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2470
2471     BEGIN_BCS_BATCH(batch, 11);
2472     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
2473     OUT_BCS_RELOC(batch,
2474                   gen7_mfd_context->jpeg_wa_slice_data_bo,
2475                   I915_GEM_DOMAIN_INSTRUCTION, 0,
2476                   0);
2477     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
2478     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2479     OUT_BCS_BATCH(batch, 0);
2480     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2481     OUT_BCS_BATCH(batch, 0);
2482     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2483     OUT_BCS_BATCH(batch, 0);
2484     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2485     OUT_BCS_BATCH(batch, 0);
2486     ADVANCE_BCS_BATCH(batch);
2487 }
2488
2489 static void
2490 gen7_jpeg_wa_avc_bsd_object(VADriverContextP ctx,
2491                             struct gen7_mfd_context *gen7_mfd_context)
2492 {
2493     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2494
2495     /* the input bitsteam format on GEN7 differs from GEN6 */
2496     BEGIN_BCS_BATCH(batch, 6);
2497     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
2498     OUT_BCS_BATCH(batch, gen7_jpeg_wa_clip.data_size);
2499     OUT_BCS_BATCH(batch, 0);
2500     OUT_BCS_BATCH(batch,
2501                   (0 << 31) |
2502                   (0 << 14) |
2503                   (0 << 12) |
2504                   (0 << 10) |
2505                   (0 << 8));
2506     OUT_BCS_BATCH(batch,
2507                   ((gen7_jpeg_wa_clip.data_bit_offset >> 3) << 16) |
2508                   (0 << 5)  |
2509                   (0 << 4)  |
2510                   (1 << 3) | /* LastSlice Flag */
2511                   (gen7_jpeg_wa_clip.data_bit_offset & 0x7));
2512     OUT_BCS_BATCH(batch, 0);
2513     ADVANCE_BCS_BATCH(batch);
2514 }
2515
2516 static void
2517 gen7_jpeg_wa_avc_slice_state(VADriverContextP ctx,
2518                              struct gen7_mfd_context *gen7_mfd_context)
2519 {
2520     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2521     int slice_hor_pos = 0, slice_ver_pos = 0, next_slice_hor_pos = 0, next_slice_ver_pos = 1;
2522     int num_ref_idx_l0 = 0, num_ref_idx_l1 = 0;
2523     int first_mb_in_slice = 0;
2524     int slice_type = SLICE_TYPE_I;
2525
2526     BEGIN_BCS_BATCH(batch, 11);
2527     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
2528     OUT_BCS_BATCH(batch, slice_type);
2529     OUT_BCS_BATCH(batch,
2530                   (num_ref_idx_l1 << 24) |
2531                   (num_ref_idx_l0 << 16) |
2532                   (0 << 8) |
2533                   (0 << 0));
2534     OUT_BCS_BATCH(batch,
2535                   (0 << 29) |
2536                   (1 << 27) |   /* disable Deblocking */
2537                   (0 << 24) |
2538                   (gen7_jpeg_wa_clip.qp << 16) |
2539                   (0 << 8) |
2540                   (0 << 0));
2541     OUT_BCS_BATCH(batch,
2542                   (slice_ver_pos << 24) |
2543                   (slice_hor_pos << 16) |
2544                   (first_mb_in_slice << 0));
2545     OUT_BCS_BATCH(batch,
2546                   (next_slice_ver_pos << 16) |
2547                   (next_slice_hor_pos << 0));
2548     OUT_BCS_BATCH(batch, (1 << 19)); /* last slice flag */
2549     OUT_BCS_BATCH(batch, 0);
2550     OUT_BCS_BATCH(batch, 0);
2551     OUT_BCS_BATCH(batch, 0);
2552     OUT_BCS_BATCH(batch, 0);
2553     ADVANCE_BCS_BATCH(batch);
2554 }
2555
2556 static void
2557 gen7_mfd_jpeg_wa(VADriverContextP ctx,
2558                  struct gen7_mfd_context *gen7_mfd_context)
2559 {
2560     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2561     gen7_jpeg_wa_init(ctx, gen7_mfd_context);
2562     intel_batchbuffer_emit_mi_flush(batch);
2563     gen7_jpeg_wa_pipe_mode_select(ctx, gen7_mfd_context);
2564     gen7_jpeg_wa_surface_state(ctx, gen7_mfd_context);
2565     gen7_jpeg_wa_pipe_buf_addr_state(ctx, gen7_mfd_context);
2566     gen7_jpeg_wa_bsp_buf_base_addr_state(ctx, gen7_mfd_context);
2567     gen7_jpeg_wa_avc_qm_state(ctx, gen7_mfd_context);
2568     gen7_jpeg_wa_avc_img_state(ctx, gen7_mfd_context);
2569     gen7_jpeg_wa_ind_obj_base_addr_state(ctx, gen7_mfd_context);
2570
2571     gen7_jpeg_wa_avc_directmode_state(ctx, gen7_mfd_context);
2572     gen7_jpeg_wa_avc_slice_state(ctx, gen7_mfd_context);
2573     gen7_jpeg_wa_avc_bsd_object(ctx, gen7_mfd_context);
2574 }
2575
2576 void
2577 gen7_mfd_jpeg_decode_picture(VADriverContextP ctx,
2578                              struct decode_state *decode_state,
2579                              struct gen7_mfd_context *gen7_mfd_context)
2580 {
2581     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2582     VAPictureParameterBufferJPEGBaseline *pic_param;
2583     VASliceParameterBufferJPEGBaseline *slice_param, *next_slice_param, *next_slice_group_param;
2584     dri_bo *slice_data_bo;
2585     int i, j, max_selector = 0;
2586
2587     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2588     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2589
2590     /* Currently only support Baseline DCT */
2591     gen7_mfd_jpeg_decode_init(ctx, decode_state, gen7_mfd_context);
2592     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
2593     gen7_mfd_jpeg_wa(ctx, gen7_mfd_context);
2594     intel_batchbuffer_emit_mi_flush(batch);
2595     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2596     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2597     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2598     gen7_mfd_jpeg_pic_state(ctx, decode_state, gen7_mfd_context);
2599     gen7_mfd_jpeg_qm_state(ctx, decode_state, gen7_mfd_context);
2600
2601     for (j = 0; j < decode_state->num_slice_params; j++) {
2602         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2603         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2604         slice_data_bo = decode_state->slice_datas[j]->bo;
2605         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2606
2607         if (j == decode_state->num_slice_params - 1)
2608             next_slice_group_param = NULL;
2609         else
2610             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2611
2612         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2613             int component;
2614
2615             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2616
2617             if (i < decode_state->slice_params[j]->num_elements - 1)
2618                 next_slice_param = slice_param + 1;
2619             else
2620                 next_slice_param = next_slice_group_param;
2621
2622             for (component = 0; component < slice_param->num_components; component++) {
2623                 if (max_selector < slice_param->components[component].dc_table_selector)
2624                     max_selector = slice_param->components[component].dc_table_selector;
2625
2626                 if (max_selector < slice_param->components[component].ac_table_selector)
2627                     max_selector = slice_param->components[component].ac_table_selector;
2628             }
2629
2630             slice_param++;
2631         }
2632     }
2633
2634     assert(max_selector < 2);
2635     gen7_mfd_jpeg_huff_table_state(ctx, decode_state, gen7_mfd_context, max_selector + 1);
2636
2637     for (j = 0; j < decode_state->num_slice_params; j++) {
2638         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2639         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2640         slice_data_bo = decode_state->slice_datas[j]->bo;
2641         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2642
2643         if (j == decode_state->num_slice_params - 1)
2644             next_slice_group_param = NULL;
2645         else
2646             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2647
2648         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2649             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2650
2651             if (i < decode_state->slice_params[j]->num_elements - 1)
2652                 next_slice_param = slice_param + 1;
2653             else
2654                 next_slice_param = next_slice_group_param;
2655
2656             gen7_mfd_jpeg_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
2657             slice_param++;
2658         }
2659     }
2660
2661     intel_batchbuffer_end_atomic(batch);
2662     intel_batchbuffer_flush(batch);
2663 }
2664
2665 static VAStatus
2666 gen7_mfd_decode_picture(VADriverContextP ctx,
2667                         VAProfile profile,
2668                         union codec_state *codec_state,
2669                         struct hw_context *hw_context)
2670
2671 {
2672     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2673     struct decode_state *decode_state = &codec_state->decode;
2674     VAStatus vaStatus;
2675
2676     assert(gen7_mfd_context);
2677
2678     vaStatus = intel_decoder_sanity_check_input(ctx, profile, decode_state);
2679
2680     if (vaStatus != VA_STATUS_SUCCESS)
2681         goto out;
2682
2683     gen7_mfd_context->wa_mpeg2_slice_vertical_position = -1;
2684
2685     switch (profile) {
2686     case VAProfileMPEG2Simple:
2687     case VAProfileMPEG2Main:
2688         gen7_mfd_mpeg2_decode_picture(ctx, decode_state, gen7_mfd_context);
2689         break;
2690
2691     case VAProfileH264ConstrainedBaseline:
2692     case VAProfileH264Main:
2693     case VAProfileH264High:
2694     case VAProfileH264StereoHigh:
2695         gen7_mfd_avc_decode_picture(ctx, decode_state, gen7_mfd_context);
2696         break;
2697
2698     case VAProfileVC1Simple:
2699     case VAProfileVC1Main:
2700     case VAProfileVC1Advanced:
2701         gen7_mfd_vc1_decode_picture(ctx, decode_state, gen7_mfd_context);
2702         break;
2703
2704     case VAProfileJPEGBaseline:
2705         gen7_mfd_jpeg_decode_picture(ctx, decode_state, gen7_mfd_context);
2706         break;
2707
2708     default:
2709         assert(0);
2710         break;
2711     }
2712
2713     vaStatus = VA_STATUS_SUCCESS;
2714
2715 out:
2716     return vaStatus;
2717 }
2718
2719 static void
2720 gen7_mfd_context_destroy(void *hw_context)
2721 {
2722     VADriverContextP ctx;
2723     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2724
2725     ctx = (VADriverContextP)(gen7_mfd_context->driver_context);
2726
2727     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
2728     gen7_mfd_context->post_deblocking_output.bo = NULL;
2729
2730     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
2731     gen7_mfd_context->pre_deblocking_output.bo = NULL;
2732
2733     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
2734     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
2735
2736     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
2737     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
2738
2739     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
2740     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
2741
2742     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
2743     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
2744
2745     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
2746     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
2747
2748     dri_bo_unreference(gen7_mfd_context->jpeg_wa_slice_data_bo);
2749
2750     if (gen7_mfd_context->jpeg_wa_surface_id != VA_INVALID_SURFACE) {
2751         i965_DestroySurfaces(ctx,
2752                              &gen7_mfd_context->jpeg_wa_surface_id,
2753                              1);
2754         gen7_mfd_context->jpeg_wa_surface_object = NULL;
2755     }
2756
2757     intel_batchbuffer_free(gen7_mfd_context->base.batch);
2758     free(gen7_mfd_context);
2759 }
2760
2761 static void gen7_mfd_mpeg2_context_init(VADriverContextP ctx,
2762                                         struct gen7_mfd_context *gen7_mfd_context)
2763 {
2764     gen7_mfd_context->iq_matrix.mpeg2.load_intra_quantiser_matrix = -1;
2765     gen7_mfd_context->iq_matrix.mpeg2.load_non_intra_quantiser_matrix = -1;
2766     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_intra_quantiser_matrix = -1;
2767     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_non_intra_quantiser_matrix = -1;
2768 }
2769
2770 struct hw_context *
2771 gen7_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
2772 {
2773     struct intel_driver_data *intel = intel_driver_data(ctx);
2774     struct gen7_mfd_context *gen7_mfd_context = calloc(1, sizeof(struct gen7_mfd_context));
2775     int i;
2776
2777     assert(gen7_mfd_context);
2778     gen7_mfd_context->base.destroy = gen7_mfd_context_destroy;
2779     gen7_mfd_context->base.run = gen7_mfd_decode_picture;
2780     gen7_mfd_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
2781
2782     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
2783         gen7_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
2784         gen7_mfd_context->reference_surface[i].frame_store_id = -1;
2785         gen7_mfd_context->reference_surface[i].obj_surface = NULL;
2786     }
2787
2788     gen7_mfd_context->jpeg_wa_surface_id = VA_INVALID_SURFACE;
2789     gen7_mfd_context->jpeg_wa_surface_object = NULL;
2790
2791     switch (obj_config->profile) {
2792     case VAProfileMPEG2Simple:
2793     case VAProfileMPEG2Main:
2794         gen7_mfd_mpeg2_context_init(ctx, gen7_mfd_context);
2795         break;
2796
2797     case VAProfileH264ConstrainedBaseline:
2798     case VAProfileH264Main:
2799     case VAProfileH264High:
2800     case VAProfileH264StereoHigh:
2801         gen7_mfd_avc_context_init(ctx, gen7_mfd_context);
2802         break;
2803     default:
2804         break;
2805     }
2806
2807     gen7_mfd_context->driver_context = ctx;
2808     return (struct hw_context *)gen7_mfd_context;
2809 }