OSDN Git Service

612d0ee6e46b3cdfb3b29d30355b7148b7340d28
[android-x86/hardware-intel-common-vaapi.git] / src / i965_avc_bsd.c
1 /*
2  * Copyright © 2010 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <string.h>
31 #include <assert.h>
32
33 #include "intel_batchbuffer.h"
34 #include "intel_driver.h"
35
36 #include "i965_defines.h"
37 #include "i965_drv_video.h"
38 #include "i965_avc_bsd.h"
39 #include "i965_media_h264.h"
40 #include "i965_media.h"
41 #include "i965_decoder_utils.h"
42
43 static void 
44 i965_avc_bsd_free_avc_bsd_surface(void **data)
45 {
46     struct i965_avc_bsd_surface *avc_bsd_surface = *data;
47
48     if (!avc_bsd_surface)
49         return;
50
51     dri_bo_unreference(avc_bsd_surface->dmv_top);
52     avc_bsd_surface->dmv_top = NULL;
53     dri_bo_unreference(avc_bsd_surface->dmv_bottom);
54     avc_bsd_surface->dmv_bottom = NULL;
55
56     free(avc_bsd_surface);
57     *data = NULL;
58 }
59
60 static void
61 i965_avc_bsd_init_avc_bsd_surface(VADriverContextP ctx, 
62                                   struct object_surface *obj_surface,
63                                   VAPictureParameterBufferH264 *pic_param,
64                                   struct i965_h264_context *i965_h264_context)
65 {
66     struct i965_driver_data *i965 = i965_driver_data(ctx);
67     struct i965_avc_bsd_context *i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
68     struct i965_avc_bsd_surface *avc_bsd_surface = obj_surface->private_data;
69
70     obj_surface->free_private_data = i965_avc_bsd_free_avc_bsd_surface;
71
72     if (!avc_bsd_surface) {
73         avc_bsd_surface = calloc(sizeof(struct i965_avc_bsd_surface), 1);
74         assert((obj_surface->size & 0x3f) == 0);
75         obj_surface->private_data = avc_bsd_surface;
76     }
77
78     avc_bsd_surface->ctx = i965_avc_bsd_context;
79     avc_bsd_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
80                                         !pic_param->seq_fields.bits.direct_8x8_inference_flag);
81
82     if (avc_bsd_surface->dmv_top == NULL) {
83         avc_bsd_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
84                                                 "direct mv w/r buffer",
85                                                 DMV_SIZE,
86                                                 0x1000);
87     }
88
89     if (avc_bsd_surface->dmv_bottom_flag &&
90         avc_bsd_surface->dmv_bottom == NULL) {
91         avc_bsd_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
92                                                    "direct mv w/r buffer",
93                                                    DMV_SIZE,
94                                                    0x1000);
95     }
96 }
97
98 static void
99 i965_bsd_ind_obj_base_address(VADriverContextP ctx,
100                               struct decode_state *decode_state,
101                               int slice,
102                               struct i965_h264_context *i965_h264_context)
103                               
104 {
105     struct intel_batchbuffer *batch = i965_h264_context->batch;
106
107     dri_bo *ind_bo = decode_state->slice_datas[slice]->bo;
108
109     BEGIN_BCS_BATCH(batch, 3);
110     OUT_BCS_BATCH(batch, CMD_BSD_IND_OBJ_BASE_ADDR | (3 - 2));
111     OUT_BCS_RELOC(batch, ind_bo,
112                   I915_GEM_DOMAIN_INSTRUCTION, 0,
113                   0);
114     OUT_BCS_BATCH(batch, 0);
115     ADVANCE_BCS_BATCH(batch);
116 }
117
118 static void
119 i965_avc_bsd_img_state(VADriverContextP ctx,
120                        struct decode_state *decode_state,
121                        struct i965_h264_context *i965_h264_context)
122 {
123     struct intel_batchbuffer *batch = i965_h264_context->batch;
124     int qm_present_flag;
125     int img_struct;
126     int mbaff_frame_flag;
127     unsigned int avc_it_command_header;
128     unsigned int width_in_mbs, height_in_mbs;
129     VAPictureParameterBufferH264 *pic_param;
130
131     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
132         qm_present_flag = 1;
133     else
134         qm_present_flag = 0; /* built-in QM matrices */
135
136     assert(decode_state->pic_param && decode_state->pic_param->buffer);
137     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
138
139     assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
140
141     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
142         img_struct = 1;
143     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
144         img_struct = 3;
145     else
146         img_struct = 0;
147
148     if ((img_struct & 0x1) == 0x1) {
149         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
150     } else {
151         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
152     }
153
154     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
155         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
156         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
157     } else {
158         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
159     }
160
161     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
162                         !pic_param->pic_fields.bits.field_pic_flag);
163
164     width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
165     height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff); /* frame height */
166                                                                                
167     assert(!((width_in_mbs * height_in_mbs) & 0x8000)); /* hardware requirement */
168
169     /* BSD unit doesn't support 4:2:2 and 4:4:4 picture */
170     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
171            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
172     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
173
174     avc_it_command_header = (CMD_MEDIA_OBJECT_EX | (12 - 2));
175
176     BEGIN_BCS_BATCH(batch, 6);
177     OUT_BCS_BATCH(batch, CMD_AVC_BSD_IMG_STATE | (6 - 2));
178     OUT_BCS_BATCH(batch, 
179                   ((width_in_mbs * height_in_mbs) & 0x7fff));
180     OUT_BCS_BATCH(batch, 
181                   (height_in_mbs << 16) | 
182                   (width_in_mbs << 0));
183     OUT_BCS_BATCH(batch, 
184                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
185                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) | 
186                   (SCAN_RASTER_ORDER << 15) | /* AVC ILDB Data */
187                   (SCAN_SPECIAL_ORDER << 14) | /* AVC IT Command */
188                   (SCAN_RASTER_ORDER << 13) | /* AVC IT Data */
189                   (1 << 12) | /* always 1, hardware requirement */
190                   (qm_present_flag << 10) |
191                   (img_struct << 8) |
192                   (16 << 0)); /* FIXME: always support 16 reference frames ??? */
193     OUT_BCS_BATCH(batch,
194                   (RESIDUAL_DATA_OFFSET << 24) | /* residual data offset */
195                   (0 << 17) | /* don't overwrite SRT */
196                   (0 << 16) | /* Un-SRT (Unsynchronized Root Thread) */
197                   (0 << 12) | /* FIXME: no 16MV ??? */
198                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
199                   (i965_h264_context->enable_avc_ildb << 8)  | /* Enable ILDB writing output */
200                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
201                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
202                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
203                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
204                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
205                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
206                   (mbaff_frame_flag << 1) |
207                   (pic_param->pic_fields.bits.field_pic_flag << 0));
208     OUT_BCS_BATCH(batch, avc_it_command_header);
209     ADVANCE_BCS_BATCH(batch);
210 }
211
212 static void
213 i965_avc_bsd_qm_state(VADriverContextP ctx,
214                       struct decode_state *decode_state,
215                       struct i965_h264_context *i965_h264_context)
216 {
217     struct intel_batchbuffer *batch = i965_h264_context->batch;
218     int cmd_len;
219     VAIQMatrixBufferH264 *iq_matrix;
220     VAPictureParameterBufferH264 *pic_param;
221
222     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
223         return;
224
225     iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
226
227     assert(decode_state->pic_param && decode_state->pic_param->buffer);
228     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
229
230     cmd_len = 2 + 6 * 4; /* always load six 4x4 scaling matrices */
231
232     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
233         cmd_len += 2 * 16; /* load two 8x8 scaling matrices */
234
235     BEGIN_BCS_BATCH(batch, cmd_len);
236     OUT_BCS_BATCH(batch, CMD_AVC_BSD_QM_STATE | (cmd_len - 2));
237
238     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
239         OUT_BCS_BATCH(batch, 
240                       (0x0  << 8) | /* don't use default built-in matrices */
241                       (0xff << 0)); /* six 4x4 and two 8x8 scaling matrices */
242     else
243         OUT_BCS_BATCH(batch, 
244                       (0x0  << 8) | /* don't use default built-in matrices */
245                       (0x3f << 0)); /* six 4x4 scaling matrices */
246
247     intel_batchbuffer_data(batch, &iq_matrix->ScalingList4x4[0][0], 6 * 4 * 4);
248
249     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
250         intel_batchbuffer_data(batch, &iq_matrix->ScalingList8x8[0][0], 2 * 16 * 4);
251
252     ADVANCE_BCS_BATCH(batch);
253 }
254
255 static void
256 i965_avc_bsd_slice_state(VADriverContextP ctx, 
257                          VAPictureParameterBufferH264 *pic_param, 
258                          VASliceParameterBufferH264 *slice_param,
259                          struct i965_h264_context *i965_h264_context)
260 {
261     struct intel_batchbuffer *batch = i965_h264_context->batch;
262     int present_flag, cmd_len, list, j;
263     uint8_t ref_idx_state[32];
264     char weightoffsets[32 * 6];
265
266     /* don't issue SLICE_STATE for intra-prediction decoding */
267     if (slice_param->slice_type == SLICE_TYPE_I ||
268         slice_param->slice_type == SLICE_TYPE_SI)
269         return;
270
271     cmd_len = 2;
272
273     if (slice_param->slice_type == SLICE_TYPE_P ||
274         slice_param->slice_type == SLICE_TYPE_SP) {
275         present_flag = PRESENT_REF_LIST0;
276         cmd_len += 8;
277     } else { 
278         present_flag = PRESENT_REF_LIST0 | PRESENT_REF_LIST1;
279         cmd_len += 16;
280     }
281
282     if ((slice_param->slice_type == SLICE_TYPE_P ||
283          slice_param->slice_type == SLICE_TYPE_SP) && 
284         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
285         present_flag |= PRESENT_WEIGHT_OFFSET_L0;
286         cmd_len += 48;
287     }
288
289     if ((slice_param->slice_type == SLICE_TYPE_B) &&
290         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
291         present_flag |= PRESENT_WEIGHT_OFFSET_L0 | PRESENT_WEIGHT_OFFSET_L1;
292         cmd_len += 96;
293     }
294
295     BEGIN_BCS_BATCH(batch, cmd_len);
296     OUT_BCS_BATCH(batch, CMD_AVC_BSD_SLICE_STATE | (cmd_len - 2));
297     OUT_BCS_BATCH(batch, present_flag);
298
299     for (list = 0; list < 2; list++) {
300         int flag, num_va_pics;
301         VAPictureH264 *va_pic;
302
303         if (list == 0) {
304             flag        = PRESENT_REF_LIST0;
305             va_pic      = slice_param->RefPicList0;
306             num_va_pics = slice_param->num_ref_idx_l0_active_minus1 + 1;
307         } else {
308             flag        = PRESENT_REF_LIST1;
309             va_pic      = slice_param->RefPicList1;
310             num_va_pics = slice_param->num_ref_idx_l1_active_minus1 + 1;
311         }
312
313         if (!(present_flag & flag))
314             continue;
315
316         gen5_fill_avc_ref_idx_state(
317             ref_idx_state,
318             va_pic, num_va_pics,
319             i965_h264_context->fsid_list
320         );            
321         intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
322     }
323
324     i965_h264_context->weight128_luma_l0 = 0;
325     i965_h264_context->weight128_luma_l1 = 0;
326     i965_h264_context->weight128_chroma_l0 = 0;
327     i965_h264_context->weight128_chroma_l1 = 0;
328
329     i965_h264_context->weight128_offset0_flag = 0;
330     i965_h264_context->weight128_offset0 = 0;
331
332     if (present_flag & PRESENT_WEIGHT_OFFSET_L0) {
333         for (j = 0; j < 32; j++) {
334             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l0[j];
335             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l0[j];
336             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l0[j][0];
337             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l0[j][0];
338             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l0[j][1];
339             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l0[j][1];
340
341             if (pic_param->pic_fields.bits.weighted_pred_flag == 1 ||
342                 pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
343                 if (i965_h264_context->use_hw_w128) {
344                     if (slice_param->luma_weight_l0[j] == 128)
345                         i965_h264_context->weight128_luma_l0 |= (1 << j);
346
347                     if (slice_param->chroma_weight_l0[j][0] == 128 ||
348                         slice_param->chroma_weight_l0[j][1] == 128)
349                         i965_h264_context->weight128_chroma_l0 |= (1 << j);
350                 } else {
351                     /* FIXME: workaround for weight 128 */
352                     if (slice_param->luma_weight_l0[j] == 128 ||
353                         slice_param->chroma_weight_l0[j][0] == 128 ||
354                         slice_param->chroma_weight_l0[j][1] == 128)
355                         i965_h264_context->weight128_offset0_flag = 1;
356                 }
357             }
358         }
359
360         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
361     }
362
363     if (present_flag & PRESENT_WEIGHT_OFFSET_L1) {
364         for (j = 0; j < 32; j++) {
365             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l1[j];
366             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l1[j];
367             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l1[j][0];
368             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l1[j][0];
369             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l1[j][1];
370             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l1[j][1];
371
372             if (pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
373                 if (i965_h264_context->use_hw_w128) {
374                     if (slice_param->luma_weight_l1[j] == 128)
375                         i965_h264_context->weight128_luma_l1 |= (1 << j);
376
377                     if (slice_param->chroma_weight_l1[j][0] == 128 ||
378                         slice_param->chroma_weight_l1[j][1] == 128)
379                         i965_h264_context->weight128_chroma_l1 |= (1 << j);
380                 } else {
381                     if (slice_param->luma_weight_l0[j] == 128 ||
382                         slice_param->chroma_weight_l0[j][0] == 128 ||
383                         slice_param->chroma_weight_l0[j][1] == 128)
384                         i965_h264_context->weight128_offset0_flag = 1;
385                 }
386             }
387         }
388
389         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
390     }
391
392     ADVANCE_BCS_BATCH(batch);
393 }
394
395 static void
396 i965_avc_bsd_buf_base_state(VADriverContextP ctx,
397                             VAPictureParameterBufferH264 *pic_param, 
398                             VASliceParameterBufferH264 *slice_param,
399                             struct i965_h264_context *i965_h264_context)
400 {
401     struct i965_driver_data *i965 = i965_driver_data(ctx);
402     struct intel_batchbuffer *batch = i965_h264_context->batch;
403     struct i965_avc_bsd_context *i965_avc_bsd_context;
404     int i, j;
405     VAPictureH264 *va_pic;
406     struct object_surface *obj_surface;
407     struct i965_avc_bsd_surface *avc_bsd_surface;
408
409     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
410
411     BEGIN_BCS_BATCH(batch, 74);
412     OUT_BCS_BATCH(batch, CMD_AVC_BSD_BUF_BASE_STATE | (74 - 2));
413     OUT_BCS_RELOC(batch, i965_avc_bsd_context->bsd_raw_store.bo,
414                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
415                   0);
416     OUT_BCS_RELOC(batch, i965_avc_bsd_context->mpr_row_store.bo,
417                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
418                   0);
419     OUT_BCS_RELOC(batch, i965_h264_context->avc_it_command_mb_info.bo,
420                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
421                   i965_h264_context->avc_it_command_mb_info.mbs * i965_h264_context->use_avc_hw_scoreboard * MB_CMD_IN_BYTES);
422     OUT_BCS_RELOC(batch, i965_h264_context->avc_it_data.bo,
423                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
424                   (i965_h264_context->avc_it_data.write_offset << 6));
425
426     if (i965_h264_context->enable_avc_ildb)
427         OUT_BCS_RELOC(batch, i965_h264_context->avc_ildb_data.bo,
428                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
429                       0);
430     else
431         OUT_BCS_BATCH(batch, 0);
432
433     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
434         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
435             int found = 0;
436             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
437                 va_pic = &pic_param->ReferenceFrames[j];
438                 
439                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
440                     continue;
441
442                 if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
443                     found = 1;
444                     break;
445                 }
446             }
447
448             assert(found == 1);
449
450             if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
451                 obj_surface = SURFACE(va_pic->picture_id);
452                 assert(obj_surface);
453                 avc_bsd_surface = obj_surface->private_data;
454             
455                 if (avc_bsd_surface == NULL) {
456                     OUT_BCS_BATCH(batch, 0);
457                     OUT_BCS_BATCH(batch, 0);
458                 } else {
459                     OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
460                                   I915_GEM_DOMAIN_INSTRUCTION, 0,
461                                   0);
462
463                     if (avc_bsd_surface->dmv_bottom_flag == 1)
464                         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
465                                       I915_GEM_DOMAIN_INSTRUCTION, 0,
466                                       0);
467                     else
468                         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
469                                       I915_GEM_DOMAIN_INSTRUCTION, 0,
470                                       0);
471                 }
472             } 
473         } else {
474             OUT_BCS_BATCH(batch, 0);
475             OUT_BCS_BATCH(batch, 0);
476         }
477     }
478
479     va_pic = &pic_param->CurrPic;
480     assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
481     obj_surface = SURFACE(va_pic->picture_id);
482     assert(obj_surface);
483     obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
484     obj_surface->flags |= (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
485     i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
486     i965_avc_bsd_init_avc_bsd_surface(ctx, obj_surface, pic_param, i965_h264_context);
487     avc_bsd_surface = obj_surface->private_data;
488
489     OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
490                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
491                   0);
492
493     if (avc_bsd_surface->dmv_bottom_flag == 1)
494         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
495                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
496                       0);
497     else
498         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
499                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
500                       0);
501
502     /* POC List */
503     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
504         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
505             int found = 0;
506             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
507                 va_pic = &pic_param->ReferenceFrames[j];
508                 
509                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
510                     continue;
511
512                 if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
513                     found = 1;
514                     break;
515                 }
516             }
517
518             assert(found == 1);
519
520             if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
521                 OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
522                 OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
523             } 
524         } else {
525             OUT_BCS_BATCH(batch, 0);
526             OUT_BCS_BATCH(batch, 0);
527         }
528     }
529
530     va_pic = &pic_param->CurrPic;
531     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
532     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
533
534     ADVANCE_BCS_BATCH(batch);
535 }
536
537 /*
538  * Return the bit offset to the first bit of the slice data
539  *
540  * VASliceParameterBufferH264.slice_data_bit_offset will point into the part
541  * of slice header if there are some escaped bytes in the slice header. The offset 
542  * to slice data is needed for BSD unit so that BSD unit can fetch right slice data
543  * for processing. This fixes conformance case BASQP1_Sony_C.jsv
544  */
545 static int
546 i965_avc_bsd_get_slice_bit_offset(uint8_t *buf, int mode_flag, int in_slice_data_bit_offset)
547 {
548     int out_slice_data_bit_offset;
549     int slice_header_size = in_slice_data_bit_offset / 8;
550     int i, j;
551
552     for (i = 0, j = 0; i < slice_header_size; i++, j++) {
553         if (!buf[j] && !buf[j + 1] && buf[j + 2] == 3) {
554             i++, j += 2;
555         }
556     }
557
558     out_slice_data_bit_offset = 8 * j + in_slice_data_bit_offset % 8;
559
560     if (mode_flag == ENTROPY_CABAC)
561         out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
562
563     return out_slice_data_bit_offset;
564 }
565
566 static void
567 g4x_avc_bsd_object(VADriverContextP ctx, 
568                    struct decode_state *decode_state,
569                    VAPictureParameterBufferH264 *pic_param,
570                    VASliceParameterBufferH264 *slice_param,
571                    int slice_index,
572                    struct i965_h264_context *i965_h264_context)
573 {
574     struct intel_batchbuffer *batch = i965_h264_context->batch;
575     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
576     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
577
578     if (slice_param) {
579         int encrypted, counter_value, cmd_len;
580         int slice_hor_pos, slice_ver_pos;
581         int num_ref_idx_l0, num_ref_idx_l1;
582         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
583                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
584         int slice_data_bit_offset;
585         int weighted_pred_idc = 0;
586         int first_mb_in_slice = 0;
587         int slice_type;
588         uint8_t *slice_data = NULL;
589
590         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
591
592         if (encrypted) {
593             cmd_len = 9;
594             counter_value = 0; /* FIXME: ??? */
595         } else 
596             cmd_len = 8;
597
598         dri_bo_map(decode_state->slice_datas[slice_index]->bo, 0);
599         slice_data = (uint8_t *)(decode_state->slice_datas[slice_index]->bo->virtual + slice_param->slice_data_offset);
600         slice_data_bit_offset = i965_avc_bsd_get_slice_bit_offset(slice_data,
601                                                                   pic_param->pic_fields.bits.entropy_coding_mode_flag,
602                                                                   slice_param->slice_data_bit_offset);
603         dri_bo_unmap(decode_state->slice_datas[slice_index]->bo);
604
605         if (slice_param->slice_type == SLICE_TYPE_I ||
606             slice_param->slice_type == SLICE_TYPE_SI)
607             slice_type = SLICE_TYPE_I;
608         else if (slice_param->slice_type == SLICE_TYPE_P ||
609                  slice_param->slice_type == SLICE_TYPE_SP)
610             slice_type = SLICE_TYPE_P;
611         else {
612             assert(slice_param->slice_type == SLICE_TYPE_B);
613             slice_type = SLICE_TYPE_B;
614         }
615
616         if (slice_type == SLICE_TYPE_I) {
617             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
618             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
619             num_ref_idx_l0 = 0;
620             num_ref_idx_l1 = 0;
621         } else if (slice_type == SLICE_TYPE_P) {
622             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
623             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
624             num_ref_idx_l1 = 0;
625         } else {
626             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
627             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
628         }
629
630         if (slice_type == SLICE_TYPE_P)
631             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
632         else if (slice_type == SLICE_TYPE_B)
633             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
634
635         first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
636         slice_hor_pos = first_mb_in_slice % width_in_mbs; 
637         slice_ver_pos = first_mb_in_slice / width_in_mbs;
638
639         BEGIN_BCS_BATCH(batch, cmd_len);
640         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (cmd_len - 2));
641         OUT_BCS_BATCH(batch, 
642                       (encrypted << 31) |
643                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
644         OUT_BCS_BATCH(batch, 
645                       (slice_param->slice_data_offset +
646                        (slice_data_bit_offset >> 3)));
647         OUT_BCS_BATCH(batch, 
648                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
649                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
650                       (0 << 13) | /* FIXME: ??? */
651                       (0 << 12) | /* ignore MPR Error handling */
652                       (0 << 10) | /* ignore Entropy Error handling */
653                       (0 << 8)  | /* ignore MB Header Error handling */
654                       (slice_type << 0));
655         OUT_BCS_BATCH(batch, 
656                       (num_ref_idx_l1 << 24) |
657                       (num_ref_idx_l0 << 16) |
658                       (slice_param->chroma_log2_weight_denom << 8) |
659                       (slice_param->luma_log2_weight_denom << 0));
660         OUT_BCS_BATCH(batch, 
661                       (weighted_pred_idc << 30) |
662                       (slice_param->direct_spatial_mv_pred_flag << 29) |
663                       (slice_param->disable_deblocking_filter_idc << 27) |
664                       (slice_param->cabac_init_idc << 24) |
665                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
666                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
667                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
668         OUT_BCS_BATCH(batch, 
669                       (slice_ver_pos << 24) |
670                       (slice_hor_pos << 16) | 
671                       (first_mb_in_slice << 0));
672         OUT_BCS_BATCH(batch, 
673                       (0 << 7) | /* FIXME: ??? */
674                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
675
676         if (encrypted) {
677             OUT_BCS_BATCH(batch, counter_value);
678         }
679
680         ADVANCE_BCS_BATCH(batch); 
681     } else {
682         BEGIN_BCS_BATCH(batch, 8); 
683         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (8 - 2));
684         OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
685         OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
686         OUT_BCS_BATCH(batch, 0);
687         OUT_BCS_BATCH(batch, 0);
688         OUT_BCS_BATCH(batch, 0);
689         OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
690         OUT_BCS_BATCH(batch, 0);
691         ADVANCE_BCS_BATCH(batch);
692     }
693 }
694
695 static void
696 ironlake_avc_bsd_object(VADriverContextP ctx, 
697                         struct decode_state *decode_state,
698                         VAPictureParameterBufferH264 *pic_param,
699                         VASliceParameterBufferH264 *slice_param,
700                         int slice_index,
701                         struct i965_h264_context *i965_h264_context)
702 {
703     struct intel_batchbuffer *batch = i965_h264_context->batch;
704     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
705     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
706
707     if (slice_param) {
708         int encrypted, counter_value;
709         int slice_hor_pos, slice_ver_pos;
710         int num_ref_idx_l0, num_ref_idx_l1;
711         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
712                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
713         int slice_data_bit_offset;
714         int weighted_pred_idc = 0;
715         int first_mb_in_slice;
716         int slice_type;
717         uint8_t *slice_data = NULL;
718
719         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
720
721         if (encrypted) {
722             counter_value = 0; /* FIXME: ??? */
723         } else 
724             counter_value = 0;
725
726         dri_bo_map(decode_state->slice_datas[slice_index]->bo, 0);
727         slice_data = (uint8_t *)(decode_state->slice_datas[slice_index]->bo->virtual + slice_param->slice_data_offset);
728         slice_data_bit_offset = i965_avc_bsd_get_slice_bit_offset(slice_data,
729                                                                   pic_param->pic_fields.bits.entropy_coding_mode_flag,
730                                                                   slice_param->slice_data_bit_offset);
731         dri_bo_unmap(decode_state->slice_datas[slice_index]->bo);
732
733         if (slice_param->slice_type == SLICE_TYPE_I ||
734             slice_param->slice_type == SLICE_TYPE_SI)
735             slice_type = SLICE_TYPE_I;
736         else if (slice_param->slice_type == SLICE_TYPE_P ||
737                  slice_param->slice_type == SLICE_TYPE_SP)
738             slice_type = SLICE_TYPE_P;
739         else {
740             assert(slice_param->slice_type == SLICE_TYPE_B);
741             slice_type = SLICE_TYPE_B;
742         }
743
744         if (slice_type == SLICE_TYPE_I) {
745             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
746             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
747             num_ref_idx_l0 = 0;
748             num_ref_idx_l1 = 0;
749         } else if (slice_type == SLICE_TYPE_P) {
750             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
751             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
752             num_ref_idx_l1 = 0;
753         } else {
754             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
755             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
756         }
757
758         if (slice_type == SLICE_TYPE_P)
759             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
760         else if (slice_type == SLICE_TYPE_B)
761             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
762
763         first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
764         slice_hor_pos = first_mb_in_slice % width_in_mbs; 
765         slice_ver_pos = first_mb_in_slice / width_in_mbs;
766
767         BEGIN_BCS_BATCH(batch, 16);
768         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
769         OUT_BCS_BATCH(batch, 
770                       (encrypted << 31) |
771                       (0 << 30) | /* FIXME: packet based bit stream */
772                       (0 << 29) | /* FIXME: packet format */
773                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
774         OUT_BCS_BATCH(batch, 
775                       (slice_param->slice_data_offset +
776                        (slice_data_bit_offset >> 3)));
777         OUT_BCS_BATCH(batch, 
778                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
779                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
780                       (0 << 13) | /* FIXME: ??? */
781                       (0 << 12) | /* ignore MPR Error handling */
782                       (0 << 10) | /* ignore Entropy Error handling */
783                       (0 << 8)  | /* ignore MB Header Error handling */
784                       (slice_type << 0));
785         OUT_BCS_BATCH(batch, 
786                       (num_ref_idx_l1 << 24) |
787                       (num_ref_idx_l0 << 16) |
788                       (slice_param->chroma_log2_weight_denom << 8) |
789                       (slice_param->luma_log2_weight_denom << 0));
790         OUT_BCS_BATCH(batch, 
791                       (weighted_pred_idc << 30) |
792                       (slice_param->direct_spatial_mv_pred_flag << 29) |
793                       (slice_param->disable_deblocking_filter_idc << 27) |
794                       (slice_param->cabac_init_idc << 24) |
795                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
796                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
797                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
798         OUT_BCS_BATCH(batch, 
799                       (slice_ver_pos << 24) |
800                       (slice_hor_pos << 16) | 
801                       (first_mb_in_slice << 0));
802         OUT_BCS_BATCH(batch, 
803                       (0 << 7) | /* FIXME: ??? */
804                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
805         OUT_BCS_BATCH(batch, counter_value);
806         
807         /* FIXME: dw9-dw11 */
808         OUT_BCS_BATCH(batch, 0);
809         OUT_BCS_BATCH(batch, 0);
810         OUT_BCS_BATCH(batch, 0);
811         OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l0);
812         OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l1);
813         OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l0);
814         OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l1);
815
816         ADVANCE_BCS_BATCH(batch); 
817     } else {
818         BEGIN_BCS_BATCH(batch, 16);
819         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
820         OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
821         OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
822         OUT_BCS_BATCH(batch, 0);
823         OUT_BCS_BATCH(batch, 0);
824         OUT_BCS_BATCH(batch, 0);
825         OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
826         OUT_BCS_BATCH(batch, 0);
827         OUT_BCS_BATCH(batch, 0);
828         OUT_BCS_BATCH(batch, 0);
829         OUT_BCS_BATCH(batch, 0);
830         OUT_BCS_BATCH(batch, 0);
831         OUT_BCS_BATCH(batch, 0);
832         OUT_BCS_BATCH(batch, 0);
833         OUT_BCS_BATCH(batch, 0);
834         OUT_BCS_BATCH(batch, 0);
835         ADVANCE_BCS_BATCH(batch);
836     }
837 }
838
839 static void
840 i965_avc_bsd_object(VADriverContextP ctx, 
841                     struct decode_state *decode_state,
842                     VAPictureParameterBufferH264 *pic_param,
843                     VASliceParameterBufferH264 *slice_param,
844                     int slice_index,
845                     struct i965_h264_context *i965_h264_context)
846 {
847     struct i965_driver_data *i965 = i965_driver_data(ctx);
848
849     if (IS_IRONLAKE(i965->intel.device_id))
850         ironlake_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
851     else
852         g4x_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
853 }
854
855 static void
856 i965_avc_bsd_phantom_slice(VADriverContextP ctx, 
857                            struct decode_state *decode_state,
858                            VAPictureParameterBufferH264 *pic_param,
859                            struct i965_h264_context *i965_h264_context)
860 {
861     i965_avc_bsd_object(ctx, decode_state, pic_param, NULL, 0, i965_h264_context);
862 }
863
864 static void
865 i965_avc_bsd_frame_store_index(VADriverContextP ctx,
866                                VAPictureParameterBufferH264 *pic_param,
867                                struct i965_h264_context *i965_h264_context)
868 {
869     struct i965_driver_data *i965 = i965_driver_data(ctx);
870     int i, j;
871
872     assert(ARRAY_ELEMS(i965_h264_context->fsid_list) == ARRAY_ELEMS(pic_param->ReferenceFrames));
873
874     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
875         int found = 0;
876
877         if (i965_h264_context->fsid_list[i].surface_id == VA_INVALID_ID)
878             continue;
879
880         for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
881             VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
882             if (ref_pic->flags & VA_PICTURE_H264_INVALID)
883                 continue;
884
885             if (i965_h264_context->fsid_list[i].surface_id == ref_pic->picture_id) {
886                 found = 1;
887                 break;
888             }
889         }
890
891         if (!found) {
892             struct object_surface *obj_surface = SURFACE(i965_h264_context->fsid_list[i].surface_id);
893             obj_surface->flags &= ~SURFACE_REFERENCED;
894
895             if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
896                 dri_bo_unreference(obj_surface->bo);
897                 obj_surface->bo = NULL;
898                 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
899             }
900
901             if (obj_surface->free_private_data)
902                 obj_surface->free_private_data(&obj_surface->private_data);
903
904             i965_h264_context->fsid_list[i].surface_id = VA_INVALID_ID;
905             i965_h264_context->fsid_list[i].frame_store_id = -1;
906         }
907     }
908
909     for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
910         VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
911         int found = 0;
912
913         if (ref_pic->flags & VA_PICTURE_H264_INVALID)
914             continue;
915
916         for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
917             if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
918                 continue;
919             
920             if (i965_h264_context->fsid_list[j].surface_id == ref_pic->picture_id) {
921                 found = 1;
922                 break;
923             }
924         }
925
926         if (!found) {
927             int frame_idx;
928             struct object_surface *obj_surface = SURFACE(ref_pic->picture_id);
929             assert(obj_surface);
930             i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
931             
932             for (frame_idx = 0; frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list); frame_idx++) {
933                 for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
934                     if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
935                         continue;
936
937                     if (i965_h264_context->fsid_list[j].frame_store_id == frame_idx)
938                         break;
939                 }
940
941                 if (j == ARRAY_ELEMS(i965_h264_context->fsid_list))
942                     break;
943             }
944
945             assert(frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list));
946
947             for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
948                 if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID) {
949                     i965_h264_context->fsid_list[j].surface_id = ref_pic->picture_id;
950                     i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
951                     break;
952                 }
953             }
954         }
955     }
956
957     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list) - 1; i++) {
958         if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID &&
959             i965_h264_context->fsid_list[i].frame_store_id == i)
960             continue;
961
962         for (j = i + 1; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
963             if (i965_h264_context->fsid_list[j].surface_id != VA_INVALID_ID &&
964                 i965_h264_context->fsid_list[j].frame_store_id == i) {
965                 VASurfaceID id = i965_h264_context->fsid_list[i].surface_id;
966                 int frame_idx = i965_h264_context->fsid_list[i].frame_store_id;
967
968                 i965_h264_context->fsid_list[i].surface_id = i965_h264_context->fsid_list[j].surface_id;
969                 i965_h264_context->fsid_list[i].frame_store_id = i965_h264_context->fsid_list[j].frame_store_id;
970                 i965_h264_context->fsid_list[j].surface_id = id;
971                 i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
972                 break;
973             }
974         }
975     }
976 }
977
978 void 
979 i965_avc_bsd_pipeline(VADriverContextP ctx, struct decode_state *decode_state, void *h264_context)
980 {
981     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
982     struct intel_batchbuffer *batch = i965_h264_context->batch;
983     VAPictureParameterBufferH264 *pic_param;
984     VASliceParameterBufferH264 *slice_param;
985     int i, j;
986
987     assert(decode_state->pic_param && decode_state->pic_param->buffer);
988     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
989     i965_avc_bsd_frame_store_index(ctx, pic_param, i965_h264_context);
990
991     i965_h264_context->enable_avc_ildb = 0;
992     i965_h264_context->picture.i_flag = 1;
993
994     for (j = 0; j < decode_state->num_slice_params && i965_h264_context->enable_avc_ildb == 0; j++) {
995         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
996         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
997
998         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
999             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1000             assert((slice_param->slice_type == SLICE_TYPE_I) ||
1001                    (slice_param->slice_type == SLICE_TYPE_SI) ||
1002                    (slice_param->slice_type == SLICE_TYPE_P) ||
1003                    (slice_param->slice_type == SLICE_TYPE_SP) ||
1004                    (slice_param->slice_type == SLICE_TYPE_B));
1005
1006             if (slice_param->disable_deblocking_filter_idc != 1) {
1007                 i965_h264_context->enable_avc_ildb = 1;
1008                 break;
1009             }
1010
1011             slice_param++;
1012         }
1013     }
1014
1015     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1016
1017     i965_avc_bsd_img_state(ctx, decode_state, i965_h264_context);
1018     i965_avc_bsd_qm_state(ctx, decode_state, i965_h264_context);
1019
1020     for (j = 0; j < decode_state->num_slice_params; j++) {
1021         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1022         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1023
1024         i965_bsd_ind_obj_base_address(ctx, decode_state, j, i965_h264_context);
1025
1026         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1027             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1028             assert((slice_param->slice_type == SLICE_TYPE_I) ||
1029                    (slice_param->slice_type == SLICE_TYPE_SI) ||
1030                    (slice_param->slice_type == SLICE_TYPE_P) ||
1031                    (slice_param->slice_type == SLICE_TYPE_SP) ||
1032                    (slice_param->slice_type == SLICE_TYPE_B));
1033
1034             if (i965_h264_context->picture.i_flag && 
1035                 (slice_param->slice_type != SLICE_TYPE_I ||
1036                  slice_param->slice_type != SLICE_TYPE_SI))
1037                 i965_h264_context->picture.i_flag = 0;
1038
1039             i965_avc_bsd_slice_state(ctx, pic_param, slice_param, i965_h264_context);
1040             i965_avc_bsd_buf_base_state(ctx, pic_param, slice_param, i965_h264_context);
1041             i965_avc_bsd_object(ctx, decode_state, pic_param, slice_param, j, i965_h264_context);
1042             slice_param++;
1043         }
1044     }
1045
1046     i965_avc_bsd_phantom_slice(ctx, decode_state, pic_param, i965_h264_context);
1047     intel_batchbuffer_emit_mi_flush(batch);
1048     intel_batchbuffer_end_atomic(batch);
1049     intel_batchbuffer_flush(batch);
1050 }
1051
1052 void 
1053 i965_avc_bsd_decode_init(VADriverContextP ctx, void *h264_context)
1054 {
1055     struct i965_driver_data *i965 = i965_driver_data(ctx);
1056     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
1057     struct i965_avc_bsd_context *i965_avc_bsd_context;
1058     dri_bo *bo;
1059
1060     assert(i965_h264_context);
1061     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
1062
1063     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
1064     bo = dri_bo_alloc(i965->intel.bufmgr,
1065                       "bsd raw store",
1066                       0x3000, /* at least 11520 bytes to support 120 MBs per row */
1067                       64);
1068     assert(bo);
1069     i965_avc_bsd_context->bsd_raw_store.bo = bo;
1070
1071     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
1072     bo = dri_bo_alloc(i965->intel.bufmgr,
1073                       "mpr row store",
1074                       0x2000, /* at least 7680 bytes to support 120 MBs per row */
1075                       64);
1076     assert(bo);
1077     i965_avc_bsd_context->mpr_row_store.bo = bo;
1078 }
1079
1080 Bool 
1081 i965_avc_bsd_ternimate(struct i965_avc_bsd_context *i965_avc_bsd_context)
1082 {
1083     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
1084     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
1085
1086     return True;
1087 }