2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
27 #include "intel_batchbuffer.h"
28 #include "intel_media.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
33 static const int fptype_to_picture_type[8][2] = {
34 {VC1_I_PICTURE, VC1_I_PICTURE},
35 {VC1_I_PICTURE, VC1_P_PICTURE},
36 {VC1_P_PICTURE, VC1_I_PICTURE},
37 {VC1_P_PICTURE, VC1_P_PICTURE},
38 {VC1_B_PICTURE, VC1_B_PICTURE},
39 {VC1_B_PICTURE, VC1_BI_PICTURE},
40 {VC1_BI_PICTURE, VC1_B_PICTURE},
41 {VC1_BI_PICTURE, VC1_BI_PICTURE}
44 /* Set reference surface if backing store exists */
47 struct i965_driver_data *i965,
48 GenFrameStore *ref_frame,
49 VASurfaceID va_surface,
50 struct object_surface *obj_surface
53 if (va_surface == VA_INVALID_ID)
56 if (!obj_surface || !obj_surface->bo)
59 ref_frame->surface_id = va_surface;
60 ref_frame->obj_surface = obj_surface;
64 /* Check wether codec layer incorrectly fills in slice_vertical_position */
66 mpeg2_wa_slice_vertical_position(
67 struct decode_state *decode_state,
68 VAPictureParameterBufferMPEG2 *pic_param
71 unsigned int i, j, mb_height, vpos, last_vpos = 0;
73 /* Assume progressive sequence if we got a progressive frame */
74 if (pic_param->picture_coding_extension.bits.progressive_frame)
77 /* Wait for a field coded picture */
78 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
81 assert(decode_state && decode_state->slice_params);
83 mb_height = (pic_param->vertical_size + 31) / 32;
85 for (j = 0; j < decode_state->num_slice_params; j++) {
86 struct buffer_store * const buffer_store =
87 decode_state->slice_params[j];
89 for (i = 0; i < buffer_store->num_elements; i++) {
90 VASliceParameterBufferMPEG2 * const slice_param =
91 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
93 vpos = slice_param->slice_vertical_position;
94 if (vpos >= mb_height || vpos == last_vpos + 2) {
95 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
104 /* Build MPEG-2 reference frames array */
106 mpeg2_set_reference_surfaces(
107 VADriverContextP ctx,
108 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
109 struct decode_state *decode_state,
110 VAPictureParameterBufferMPEG2 *pic_param
113 struct i965_driver_data * const i965 = i965_driver_data(ctx);
114 VASurfaceID va_surface;
115 unsigned pic_structure, is_second_field, n = 0;
116 struct object_surface *obj_surface;
118 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
119 is_second_field = pic_structure != MPEG_FRAME &&
120 !pic_param->picture_coding_extension.bits.is_first_field;
122 ref_frames[0].surface_id = VA_INVALID_ID;
123 ref_frames[0].obj_surface = NULL;
125 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
126 switch (pic_param->picture_coding_type) {
128 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
129 va_surface = decode_state->current_render_target;
130 obj_surface = decode_state->render_object;
131 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
133 va_surface = pic_param->forward_reference_picture;
134 obj_surface = decode_state->reference_objects[0];
135 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
139 va_surface = pic_param->forward_reference_picture;
140 obj_surface = decode_state->reference_objects[0];
141 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
142 va_surface = pic_param->backward_reference_picture;
143 obj_surface = decode_state->reference_objects[1];
144 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
149 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
150 ref_frames[n++].surface_id = ref_frames[0].surface_id;
153 if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
156 ref_frames[2].surface_id = VA_INVALID_ID;
157 ref_frames[2].obj_surface = NULL;
159 /* Bottom field pictures used as reference */
160 switch (pic_param->picture_coding_type) {
162 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
163 va_surface = decode_state->current_render_target;
164 obj_surface = decode_state->render_object;
165 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
167 va_surface = pic_param->forward_reference_picture;
168 obj_surface = decode_state->reference_objects[0];
169 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
173 va_surface = pic_param->forward_reference_picture;
174 obj_surface = decode_state->reference_objects[0];
175 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
176 va_surface = pic_param->backward_reference_picture;
177 obj_surface = decode_state->reference_objects[1];
178 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
183 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
184 ref_frames[n++].surface_id = ref_frames[2].surface_id;
188 /* Ensure the supplied VA surface has valid storage for decoding the
191 avc_ensure_surface_bo(
192 VADriverContextP ctx,
193 struct decode_state *decode_state,
194 struct object_surface *obj_surface,
195 const VAPictureParameterBufferH264 *pic_param
199 uint32_t hw_fourcc, fourcc, subsample, chroma_format;
201 /* Validate chroma format */
202 switch (pic_param->seq_fields.bits.chroma_format_idc) {
204 fourcc = VA_FOURCC_Y800;
205 subsample = SUBSAMPLE_YUV400;
206 chroma_format = VA_RT_FORMAT_YUV400;
209 fourcc = VA_FOURCC_NV12;
210 subsample = SUBSAMPLE_YUV420;
211 chroma_format = VA_RT_FORMAT_YUV420;
214 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
217 /* Determine the HW surface format, bound to VA config needs */
218 if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
223 case VA_FOURCC_Y800: // Implement with an NV12 surface
224 if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
225 hw_fourcc = VA_FOURCC_NV12;
226 subsample = SUBSAMPLE_YUV420;
232 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
234 /* (Re-)allocate the underlying surface buffer store, if necessary */
235 if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
236 struct i965_driver_data * const i965 = i965_driver_data(ctx);
238 i965_destroy_surface_storage(obj_surface);
239 va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
240 i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
241 if (va_status != VA_STATUS_SUCCESS)
245 /* Fake chroma components if grayscale is implemented on top of NV12 */
246 if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
247 const uint32_t uv_offset = obj_surface->width * obj_surface->height;
248 const uint32_t uv_size = obj_surface->width * obj_surface->height / 2;
250 drm_intel_gem_bo_map_gtt(obj_surface->bo);
251 memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
252 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
254 return VA_STATUS_SUCCESS;
257 /* Generate flat scaling matrices for H.264 decoding */
259 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
262 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
265 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
268 /* Returns the POC of the supplied VA picture */
270 avc_get_picture_poc(const VAPictureH264 *va_pic)
272 int structure, field_poc[2];
274 structure = va_pic->flags &
275 (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD);
276 field_poc[0] = structure != VA_PICTURE_H264_BOTTOM_FIELD ?
277 va_pic->TopFieldOrderCnt : INT_MAX;
278 field_poc[1] = structure != VA_PICTURE_H264_TOP_FIELD ?
279 va_pic->BottomFieldOrderCnt : INT_MAX;
280 return MIN(field_poc[0], field_poc[1]);
283 /* Returns a unique picture ID that represents the supplied VA surface object */
285 avc_get_picture_id(struct object_surface *obj_surface)
289 /* This highly depends on how the internal VA objects are organized.
291 Theory of operations:
292 The VA objects are maintained in heaps so that any released VA
293 surface will become free again for future allocation. This means
294 that holes in there are filled in for subsequent allocations.
295 So, this ultimately means that we could just use the Heap ID of
296 the VA surface as the resulting picture ID (16 bits) */
297 pic_id = 1 + (obj_surface->base.id & OBJECT_HEAP_ID_MASK);
298 return (pic_id <= 0xffff) ? pic_id : -1;
301 /* Finds the VA/H264 picture associated with the specified VA surface id */
303 avc_find_picture(VASurfaceID id, VAPictureH264 *pic_list, int pic_list_count)
307 if (id != VA_INVALID_ID) {
308 for (i = 0; i < pic_list_count; i++) {
309 VAPictureH264 * const va_pic = &pic_list[i];
310 if (va_pic->picture_id == id &&
311 !(va_pic->flags & VA_PICTURE_H264_INVALID))
318 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
319 /* XXX: slice_data_bit_offset does not account for EPB */
321 avc_get_first_mb_bit_offset(
322 dri_bo *slice_data_bo,
323 VASliceParameterBufferH264 *slice_param,
324 unsigned int mode_flag
327 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
329 if (mode_flag == ENTROPY_CABAC)
330 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
331 return slice_data_bit_offset;
334 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
335 /* XXX: slice_data_bit_offset does not account for EPB */
337 avc_get_first_mb_bit_offset_with_epb(
338 dri_bo *slice_data_bo,
339 VASliceParameterBufferH264 *slice_param,
340 unsigned int mode_flag
343 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
344 unsigned int out_slice_data_bit_offset;
345 unsigned int i, j, n = 0, buf_size, data_size, header_size;
349 header_size = slice_param->slice_data_bit_offset / 8;
350 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
351 buf_size = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
353 if (buf_size > data_size)
354 buf_size = data_size;
356 buf = malloc(buf_size);
361 ret = dri_bo_get_subdata(
362 slice_data_bo, slice_param->slice_data_offset,
367 for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
368 if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
375 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
377 if (mode_flag == ENTROPY_CABAC)
378 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
379 return out_slice_data_bit_offset;
382 static inline uint8_t
383 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
385 /* The H.264 standard, and the VA-API specification, allows for at
386 least 3 states for a picture: "used for short-term reference",
387 "used for long-term reference", or considered as not used for
390 The latter is used in the MVC inter prediction and inter-view
391 prediction process (H.8.4). This has an incidence on the
392 colZeroFlag variable, as defined in 8.4.1.2.
394 Since it is not possible to directly program that flag, let's
395 make the hardware derive this value by assimilating "considered
396 as not used for reference" to a "not used for short-term
397 reference", and subsequently making it "used for long-term
398 reference" to fit the definition of Bit6 here */
399 const unsigned int ref_flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE |
400 VA_PICTURE_H264_LONG_TERM_REFERENCE;
401 const unsigned int is_long_term =
402 ((va_pic->flags & ref_flags) != VA_PICTURE_H264_SHORT_TERM_REFERENCE);
403 const unsigned int is_top_field =
404 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
405 const unsigned int is_bottom_field =
406 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
408 return ((is_long_term << 6) |
409 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
410 (frame_store_id << 1) |
411 ((is_top_field ^ 1) & is_bottom_field));
414 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
416 gen5_fill_avc_ref_idx_state(
418 const VAPictureH264 ref_list[32],
419 unsigned int ref_list_count,
420 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
425 for (i = 0; i < ref_list_count; i++) {
426 const VAPictureH264 * const va_pic = &ref_list[i];
428 if ((va_pic->flags & VA_PICTURE_H264_INVALID) ||
429 va_pic->picture_id == VA_INVALID_ID) {
434 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
435 if (frame_store[j].surface_id == va_pic->picture_id)
439 if (j != MAX_GEN_REFERENCE_FRAMES) { // Found picture in the Frame Store
440 const GenFrameStore * const fs = &frame_store[j];
441 assert(fs->frame_store_id == j); // Current architecture/assumption
442 state[i] = get_ref_idx_state_1(va_pic, fs->frame_store_id);
444 WARN_ONCE("Invalid RefPicListX[] entry!!! It is not included in DPB\n");
445 state[i] = get_ref_idx_state_1(va_pic, 0) | 0x80;
453 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
455 gen6_send_avc_ref_idx_state_1(
456 struct intel_batchbuffer *batch,
458 const VAPictureH264 *ref_list,
459 unsigned int ref_list_count,
460 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
463 uint8_t ref_idx_state[32];
465 BEGIN_BCS_BATCH(batch, 10);
466 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
467 OUT_BCS_BATCH(batch, list);
468 gen5_fill_avc_ref_idx_state(
470 ref_list, ref_list_count,
473 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
474 ADVANCE_BCS_BATCH(batch);
478 gen6_send_avc_ref_idx_state(
479 struct intel_batchbuffer *batch,
480 const VASliceParameterBufferH264 *slice_param,
481 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
484 if (slice_param->slice_type == SLICE_TYPE_I ||
485 slice_param->slice_type == SLICE_TYPE_SI)
489 gen6_send_avc_ref_idx_state_1(
491 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
495 if (slice_param->slice_type != SLICE_TYPE_B)
499 gen6_send_avc_ref_idx_state_1(
501 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
507 gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx,
508 VAPictureParameterBufferH264 *pic_param,
509 VASliceParameterBufferH264 *next_slice_param,
510 struct intel_batchbuffer *batch)
512 int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
513 int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
514 int slice_hor_pos, slice_ver_pos, slice_start_mb_num, next_slice_hor_pos, next_slice_ver_pos;
515 int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
516 pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
518 if (next_slice_param) {
519 int first_mb_in_next_slice;
523 slice_start_mb_num = 0;
524 first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
525 next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs;
526 next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
529 slice_ver_pos = height_in_mbs;
530 slice_start_mb_num = width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
531 next_slice_hor_pos = 0;
532 next_slice_ver_pos = 0;
535 BEGIN_BCS_BATCH(batch, 11);
536 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
537 OUT_BCS_BATCH(batch, 0);
538 OUT_BCS_BATCH(batch, 0);
539 OUT_BCS_BATCH(batch, 0);
541 slice_ver_pos << 24 |
542 slice_hor_pos << 16 |
543 slice_start_mb_num << 0);
545 next_slice_ver_pos << 16 |
546 next_slice_hor_pos << 0);
547 OUT_BCS_BATCH(batch, 0);
548 OUT_BCS_BATCH(batch, 0);
549 OUT_BCS_BATCH(batch, 0);
550 OUT_BCS_BATCH(batch, 0);
551 OUT_BCS_BATCH(batch, 0);
552 ADVANCE_BCS_BATCH(batch);
556 gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx,
557 VAPictureParameterBufferH264 *pic_param,
558 struct intel_batchbuffer *batch)
561 BEGIN_BCS_BATCH(batch, 6);
562 OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
563 OUT_BCS_BATCH(batch, 0);
564 OUT_BCS_BATCH(batch, 0);
565 OUT_BCS_BATCH(batch, 0);
566 OUT_BCS_BATCH(batch, 0);
567 OUT_BCS_BATCH(batch, 0);
568 ADVANCE_BCS_BATCH(batch);
572 gen6_mfd_avc_phantom_slice(VADriverContextP ctx,
573 VAPictureParameterBufferH264 *pic_param,
574 VASliceParameterBufferH264 *next_slice_param,
575 struct intel_batchbuffer *batch)
577 gen6_mfd_avc_phantom_slice_state(ctx, pic_param, next_slice_param, batch);
578 gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param, batch);
581 /* Comparison function for sorting out the array of free frame store entries */
583 compare_avc_ref_store_func(const void *p1, const void *p2)
585 const GenFrameStore * const fs1 = *((GenFrameStore **)p1);
586 const GenFrameStore * const fs2 = *((GenFrameStore **)p2);
588 return fs1->ref_age - fs2->ref_age;
592 intel_update_codec_frame_store_index(
593 VADriverContextP ctx,
594 struct decode_state *decode_state,
596 GenFrameStore frame_store[],
598 GenFrameStoreContext *fs_ctx
601 GenFrameStore **free_refs = calloc(num_elements, sizeof(GenFrameStore *));
602 uint32_t used_refs = 0, add_refs = 0;
604 int i, n, num_free_refs;
609 /* Detect changes of access unit */
610 if (fs_ctx->age == 0 || fs_ctx->prev_poc != poc)
612 fs_ctx->prev_poc = poc;
615 /* Tag entries that are still available in our Frame Store */
616 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
617 struct object_surface * const obj_surface =
618 decode_state->reference_objects[i];
622 GenCodecSurface * const codec_surface = obj_surface->private_data;
625 if (codec_surface->frame_store_id >= 0) {
626 GenFrameStore * const fs =
627 &frame_store[codec_surface->frame_store_id];
628 if (fs->surface_id == obj_surface->base.id) {
629 fs->obj_surface = obj_surface;
631 used_refs |= 1 << fs->frame_store_id;
638 /* Build and sort out the list of retired candidates. The resulting
639 list is ordered by increasing age when they were last used */
640 for (i = 0, n = 0; i < num_elements; i++) {
641 if (!(used_refs & (1 << i))) {
642 GenFrameStore * const fs = &frame_store[i];
643 fs->obj_surface = NULL;
648 qsort(&free_refs[0], n, sizeof(free_refs[0]), compare_avc_ref_store_func);
650 /* Append the new reference frames */
651 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
652 struct object_surface * const obj_surface =
653 decode_state->reference_objects[i];
654 if (!obj_surface || !(add_refs & (1 << i)))
657 GenCodecSurface * const codec_surface = obj_surface->private_data;
660 if (n < num_free_refs) {
661 GenFrameStore * const fs = free_refs[n++];
662 fs->surface_id = obj_surface->base.id;
663 fs->obj_surface = obj_surface;
664 fs->frame_store_id = fs - frame_store;
666 codec_surface->frame_store_id = fs->frame_store_id;
669 WARN_ONCE("No free slot found for DPB reference list!!!\n");
676 intel_update_avc_frame_store_index(
677 VADriverContextP ctx,
678 struct decode_state *decode_state,
679 VAPictureParameterBufferH264 *pic_param,
680 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES],
681 GenFrameStoreContext *fs_ctx
684 intel_update_codec_frame_store_index(ctx,
686 avc_get_picture_poc(&pic_param->CurrPic),
688 MAX_GEN_REFERENCE_FRAMES,
693 intel_update_hevc_frame_store_index(
694 VADriverContextP ctx,
695 struct decode_state *decode_state,
696 VAPictureParameterBufferHEVC *pic_param,
697 GenFrameStore frame_store[MAX_GEN_HCP_REFERENCE_FRAMES],
698 GenFrameStoreContext *fs_ctx
703 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
704 struct object_surface * const obj_surface = decode_state->reference_objects[i];
709 GenFrameStore * const fs = &frame_store[n];
710 fs->surface_id = obj_surface->base.id;
711 fs->obj_surface = obj_surface;
712 fs->frame_store_id = n++;
714 if (n == MAX_GEN_HCP_REFERENCE_FRAMES)
718 for (; n < MAX_GEN_HCP_REFERENCE_FRAMES; n++) {
719 GenFrameStore * const fs = &frame_store[n];
721 fs->surface_id = VA_INVALID_ID;
722 fs->obj_surface = NULL;
723 fs->frame_store_id = -1;
728 gen75_update_avc_frame_store_index(
729 VADriverContextP ctx,
730 struct decode_state *decode_state,
731 VAPictureParameterBufferH264 *pic_param,
732 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
737 /* Construct the Frame Store array, in compact form. i.e. empty or
738 invalid entries are discarded. */
739 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
740 struct object_surface * const obj_surface =
741 decode_state->reference_objects[i];
745 GenFrameStore * const fs = &frame_store[n];
746 fs->surface_id = obj_surface->base.id;
747 fs->obj_surface = obj_surface;
748 fs->frame_store_id = n++;
751 /* Any remaining entry is marked as invalid */
752 for (; n < MAX_GEN_REFERENCE_FRAMES; n++) {
753 GenFrameStore * const fs = &frame_store[n];
754 fs->surface_id = VA_INVALID_ID;
755 fs->obj_surface = NULL;
756 fs->frame_store_id = -1;
761 gen75_fill_avc_picid_list(
762 uint16_t pic_ids[16],
763 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
768 /* Fill in with known picture IDs. The Frame Store array is in
769 compact form, i.e. empty entries are only to be found at the
770 end of the array: there are no holes in the set of active
772 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
773 GenFrameStore * const fs = &frame_store[i];
774 if (!fs->obj_surface)
776 pic_id = avc_get_picture_id(fs->obj_surface);
782 /* When an element of the list is not relevant the value of the
783 picture ID shall be set to 0 */
784 for (; i < MAX_GEN_REFERENCE_FRAMES; i++)
790 gen75_send_avc_picid_state(
791 struct intel_batchbuffer *batch,
792 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
795 uint16_t pic_ids[16];
797 if (!gen75_fill_avc_picid_list(pic_ids, frame_store))
800 BEGIN_BCS_BATCH(batch, 10);
801 OUT_BCS_BATCH(batch, MFD_AVC_PICID_STATE | (10 - 2));
802 OUT_BCS_BATCH(batch, 0); // enable Picture ID Remapping
803 intel_batchbuffer_data(batch, pic_ids, sizeof(pic_ids));
804 ADVANCE_BCS_BATCH(batch);
809 intel_update_vc1_frame_store_index(VADriverContextP ctx,
810 struct decode_state *decode_state,
811 VAPictureParameterBufferVC1 *pic_param,
812 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
814 struct object_surface *obj_surface;
817 obj_surface = decode_state->reference_objects[0];
819 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
822 frame_store[0].surface_id = VA_INVALID_ID;
823 frame_store[0].obj_surface = NULL;
824 frame_store[2].surface_id = VA_INVALID_ID;
825 frame_store[2].obj_surface = NULL;
827 frame_store[0].surface_id = pic_param->forward_reference_picture;
828 frame_store[0].obj_surface = obj_surface;
829 frame_store[2].surface_id = pic_param->forward_reference_picture;
830 frame_store[2].obj_surface = obj_surface;
833 if (pic_param->sequence_fields.bits.interlace &&
834 pic_param->picture_fields.bits.frame_coding_mode == 2 && /* Field-Interlace */
835 !pic_param->picture_fields.bits.is_first_field) {
836 if (pic_param->picture_fields.bits.top_field_first) {
837 frame_store[0].surface_id = decode_state->current_render_target;
838 frame_store[0].obj_surface = decode_state->render_object;
840 frame_store[2].surface_id = decode_state->current_render_target;
841 frame_store[2].obj_surface = decode_state->render_object;
845 obj_surface= decode_state->reference_objects[1];
847 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
850 frame_store[1].surface_id = frame_store[0].surface_id;
851 frame_store[1].obj_surface = frame_store[0].obj_surface;
852 frame_store[3].surface_id = frame_store[2].surface_id;
853 frame_store[3].obj_surface = frame_store[2].obj_surface;
855 frame_store[1].surface_id = pic_param->backward_reference_picture;
856 frame_store[1].obj_surface = obj_surface;
857 frame_store[3].surface_id = pic_param->backward_reference_picture;
858 frame_store[3].obj_surface = obj_surface;
861 for (i = 4; i < MAX_GEN_REFERENCE_FRAMES; i++) {
862 frame_store[i].surface_id = frame_store[i % 4].surface_id;
863 frame_store[i].obj_surface = frame_store[i % 4].obj_surface;
868 intel_update_vp8_frame_store_index(VADriverContextP ctx,
869 struct decode_state *decode_state,
870 VAPictureParameterBufferVP8 *pic_param,
871 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
873 struct object_surface *obj_surface;
876 obj_surface = decode_state->reference_objects[0];
878 if (pic_param->last_ref_frame == VA_INVALID_ID ||
881 frame_store[0].surface_id = VA_INVALID_ID;
882 frame_store[0].obj_surface = NULL;
884 frame_store[0].surface_id = pic_param->last_ref_frame;
885 frame_store[0].obj_surface = obj_surface;
888 obj_surface = decode_state->reference_objects[1];
890 if (pic_param->golden_ref_frame == VA_INVALID_ID ||
893 frame_store[1].surface_id = frame_store[0].surface_id;
894 frame_store[1].obj_surface = frame_store[0].obj_surface;
896 frame_store[1].surface_id = pic_param->golden_ref_frame;
897 frame_store[1].obj_surface = obj_surface;
900 obj_surface = decode_state->reference_objects[2];
902 if (pic_param->alt_ref_frame == VA_INVALID_ID ||
905 frame_store[2].surface_id = frame_store[0].surface_id;
906 frame_store[2].obj_surface = frame_store[0].obj_surface;
908 frame_store[2].surface_id = pic_param->alt_ref_frame;
909 frame_store[2].obj_surface = obj_surface;
912 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
913 frame_store[i].surface_id = frame_store[i % 2].surface_id;
914 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
919 //Obtain the reference frames from the decode state and store them in frame store.
921 intel_update_vp9_frame_store_index(VADriverContextP ctx,
922 struct decode_state *decode_state,
923 VADecPictureParameterBufferVP9 *pic_param,
924 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
926 struct object_surface *obj_surface;
927 int i = 0, index = 0;
929 //Check for the validity of the last reference frame
930 obj_surface = decode_state->reference_objects[0];
932 index = pic_param->pic_fields.bits.last_ref_frame;
933 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
936 frame_store[0].surface_id = VA_INVALID_ID;
937 frame_store[0].obj_surface = NULL;
939 frame_store[0].surface_id = pic_param->reference_frames[index];
940 frame_store[0].obj_surface = obj_surface;
943 //Check for the validity of the golden reference frame
944 obj_surface = decode_state->reference_objects[1];
946 index = pic_param->pic_fields.bits.golden_ref_frame;
947 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
950 frame_store[1].surface_id = frame_store[0].surface_id;
951 frame_store[1].obj_surface = frame_store[0].obj_surface;
953 frame_store[1].surface_id = pic_param->reference_frames[index];
954 frame_store[1].obj_surface = obj_surface;
957 //Check for the validity of the altref reference frame
958 obj_surface = decode_state->reference_objects[2];
960 index = pic_param->pic_fields.bits.alt_ref_frame;
961 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
964 frame_store[2].surface_id = frame_store[0].surface_id;
965 frame_store[2].obj_surface = frame_store[0].obj_surface;
967 frame_store[2].surface_id = pic_param->reference_frames[index];
968 frame_store[2].obj_surface = obj_surface;
971 //Set the remaining framestores to either last/golden/altref
972 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
973 frame_store[i].surface_id = frame_store[i % 2].surface_id;
974 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
980 intel_decoder_check_avc_parameter(VADriverContextP ctx,
981 VAProfile h264_profile,
982 struct decode_state *decode_state)
984 struct i965_driver_data *i965 = i965_driver_data(ctx);
985 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
987 struct object_surface *obj_surface;
989 VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
992 ASSERT_RET(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID), VA_STATUS_ERROR_INVALID_PARAMETER);
993 ASSERT_RET((pic_param->CurrPic.picture_id != VA_INVALID_SURFACE), VA_STATUS_ERROR_INVALID_PARAMETER);
994 ASSERT_RET((pic_param->CurrPic.picture_id == decode_state->current_render_target), VA_STATUS_ERROR_INVALID_PARAMETER);
996 if (pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
997 WARN_ONCE("Unsupported the ASO constraints!!!\n");
1001 /* Fill in the reference objects array with the actual VA surface
1002 objects with 1:1 correspondance with any entry in ReferenceFrames[],
1003 i.e. including "holes" for invalid entries, that are expanded
1004 to NULL in the reference_objects[] array */
1005 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
1006 const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i];
1009 if (!(va_pic->flags & VA_PICTURE_H264_INVALID) &&
1010 va_pic->picture_id != VA_INVALID_ID) {
1011 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
1013 return VA_STATUS_ERROR_INVALID_SURFACE;
1016 * Sometimes a dummy frame comes from the upper layer
1017 * library, call i965_check_alloc_surface_bo() to make
1018 * sure the store buffer is allocated for this reference
1021 va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface,
1023 if (va_status != VA_STATUS_SUCCESS)
1026 decode_state->reference_objects[i] = obj_surface;
1029 for (j = 0; j < decode_state->num_slice_params; j++) {
1030 ASSERT_RET((decode_state->slice_params && decode_state->slice_params[j]->buffer), VA_STATUS_ERROR_INVALID_PARAMETER);
1031 slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1033 if (j == decode_state->num_slice_params - 1)
1034 next_slice_group_param = NULL;
1036 next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
1038 for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1040 if (i < decode_state->slice_params[j]->num_elements - 1)
1041 next_slice_param = slice_param + 1;
1043 next_slice_param = next_slice_group_param;
1045 if (next_slice_param != NULL) {
1046 /* If the mb position of next_slice is less than or equal to the current slice,
1047 * discard the current frame.
1049 if (next_slice_param->first_mb_in_slice <= slice_param->first_mb_in_slice) {
1050 next_slice_param = NULL;
1051 WARN_ONCE("!!!incorrect slice_param. The first_mb_in_slice of next_slice is less"
1052 " than or equal to that in current slice\n");
1059 return VA_STATUS_SUCCESS;
1062 return VA_STATUS_ERROR_INVALID_PARAMETER;
1066 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
1067 struct decode_state *decode_state)
1069 struct i965_driver_data *i965 = i965_driver_data(ctx);
1070 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1071 struct object_surface *obj_surface;
1074 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
1075 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
1076 obj_surface = SURFACE(pic_param->forward_reference_picture);
1078 if (!obj_surface || !obj_surface->bo)
1079 decode_state->reference_objects[i++] = NULL;
1081 decode_state->reference_objects[i++] = obj_surface;
1082 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
1083 obj_surface = SURFACE(pic_param->forward_reference_picture);
1085 if (!obj_surface || !obj_surface->bo)
1086 decode_state->reference_objects[i++] = NULL;
1088 decode_state->reference_objects[i++] = obj_surface;
1090 obj_surface = SURFACE(pic_param->backward_reference_picture);
1092 if (!obj_surface || !obj_surface->bo)
1093 decode_state->reference_objects[i++] = NULL;
1095 decode_state->reference_objects[i++] = obj_surface;
1100 decode_state->reference_objects[i] = NULL;
1102 return VA_STATUS_SUCCESS;
1105 return VA_STATUS_ERROR_INVALID_PARAMETER;
1109 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
1110 struct decode_state *decode_state)
1112 struct i965_driver_data *i965 = i965_driver_data(ctx);
1113 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1114 struct object_surface *obj_surface;
1116 int is_first_field = 1;
1119 if (!pic_param->sequence_fields.bits.interlace ||
1120 (pic_param->picture_fields.bits.frame_coding_mode < 2)) { /* Progressive or Frame-Interlace */
1121 picture_type = pic_param->picture_fields.bits.picture_type;
1122 } else {/* Field-Interlace */
1123 is_first_field = pic_param->picture_fields.bits.is_first_field;
1124 picture_type = fptype_to_picture_type[pic_param->picture_fields.bits.picture_type][!is_first_field];
1127 if (picture_type == VC1_I_PICTURE ||
1128 picture_type == VC1_BI_PICTURE) {
1129 } else if (picture_type == VC1_P_PICTURE ||
1130 picture_type == VC1_SKIPPED_PICTURE) {
1131 obj_surface = SURFACE(pic_param->forward_reference_picture);
1133 if (!obj_surface || !obj_surface->bo)
1134 decode_state->reference_objects[i++] = NULL;
1136 decode_state->reference_objects[i++] = obj_surface;
1137 } else if (picture_type == VC1_B_PICTURE) {
1138 obj_surface = SURFACE(pic_param->forward_reference_picture);
1140 if (!obj_surface || !obj_surface->bo)
1141 decode_state->reference_objects[i++] = NULL;
1143 decode_state->reference_objects[i++] = obj_surface;
1145 obj_surface = SURFACE(pic_param->backward_reference_picture);
1147 if (!obj_surface || !obj_surface->bo)
1148 decode_state->reference_objects[i++] = NULL;
1150 decode_state->reference_objects[i++] = obj_surface;
1155 decode_state->reference_objects[i] = NULL;
1157 return VA_STATUS_SUCCESS;
1160 return VA_STATUS_ERROR_INVALID_PARAMETER;
1164 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
1165 struct decode_state *decode_state)
1167 struct i965_driver_data *i965 = i965_driver_data(ctx);
1168 VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
1169 struct object_surface *obj_surface;
1172 if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
1173 obj_surface = SURFACE(pic_param->last_ref_frame);
1175 if (obj_surface && obj_surface->bo)
1176 decode_state->reference_objects[i++] = obj_surface;
1178 decode_state->reference_objects[i++] = NULL;
1181 if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
1182 obj_surface = SURFACE(pic_param->golden_ref_frame);
1184 if (obj_surface && obj_surface->bo)
1185 decode_state->reference_objects[i++] = obj_surface;
1187 decode_state->reference_objects[i++] = NULL;
1190 if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
1191 obj_surface = SURFACE(pic_param->alt_ref_frame);
1193 if (obj_surface && obj_surface->bo)
1194 decode_state->reference_objects[i++] = obj_surface;
1196 decode_state->reference_objects[i++] = NULL;
1200 decode_state->reference_objects[i] = NULL;
1202 return VA_STATUS_SUCCESS;
1206 hevc_ensure_surface_bo(
1207 VADriverContextP ctx,
1208 struct decode_state *decode_state,
1209 struct object_surface *obj_surface,
1210 const VAPictureParameterBufferHEVC *pic_param
1213 VAStatus va_status = VA_STATUS_SUCCESS;
1215 unsigned int fourcc = VA_FOURCC_NV12;
1217 if ((pic_param->bit_depth_luma_minus8 > 0)
1218 || (pic_param->bit_depth_chroma_minus8 > 0)) {
1219 if (obj_surface->fourcc != VA_FOURCC_P010) {
1221 fourcc = VA_FOURCC_P010;
1223 } else if (obj_surface->fourcc != VA_FOURCC_NV12) {
1225 fourcc = VA_FOURCC_NV12;
1228 /* (Re-)allocate the underlying surface buffer store, if necessary */
1229 if (!obj_surface->bo || update) {
1230 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1232 i965_destroy_surface_storage(obj_surface);
1234 va_status = i965_check_alloc_surface_bo(ctx,
1236 i965->codec_info->has_tiled_surface,
1244 //Ensure there is a tiled render surface in NV12 format. If not, create one.
1246 vp9_ensure_surface_bo(
1247 VADriverContextP ctx,
1248 struct decode_state *decode_state,
1249 struct object_surface *obj_surface,
1250 const VADecPictureParameterBufferVP9 *pic_param
1253 VAStatus va_status = VA_STATUS_SUCCESS;
1255 unsigned int fourcc = VA_FOURCC_NV12;
1257 if (pic_param->profile >= 2) {
1258 if (obj_surface->fourcc != VA_FOURCC_P010) {
1260 fourcc = VA_FOURCC_P010;
1262 } else if (obj_surface->fourcc != VA_FOURCC_NV12) {
1264 fourcc = VA_FOURCC_NV12;
1267 /* (Re-)allocate the underlying surface buffer store, if necessary */
1268 if (!obj_surface->bo || update) {
1269 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1271 i965_destroy_surface_storage(obj_surface);
1273 va_status = i965_check_alloc_surface_bo(ctx,
1275 i965->codec_info->has_tiled_surface,
1284 intel_decoder_check_hevc_parameter(VADriverContextP ctx,
1285 struct decode_state *decode_state)
1287 struct i965_driver_data *i965 = i965_driver_data(ctx);
1288 VAPictureParameterBufferHEVC *pic_param = (VAPictureParameterBufferHEVC *)decode_state->pic_param->buffer;
1289 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1290 struct object_surface *obj_surface;
1294 if (pic_param->CurrPic.flags & VA_PICTURE_HEVC_INVALID ||
1295 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
1298 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
1301 min_cb_size = (1 << (pic_param->log2_min_luma_coding_block_size_minus3 + 3));
1303 if (pic_param->pic_width_in_luma_samples % min_cb_size ||
1304 pic_param->pic_height_in_luma_samples % min_cb_size)
1307 /* Fill in the reference objects array with the actual VA surface
1308 objects with 1:1 correspondance with any entry in ReferenceFrames[],
1309 i.e. including "holes" for invalid entries, that are expanded
1310 to NULL in the reference_objects[] array */
1311 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
1312 const VAPictureHEVC * const va_pic = &pic_param->ReferenceFrames[i];
1317 * Only the index with (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1318 * VA_PICTURE_HEVC_RPS_ST_CURR_AFTER | VA_PICTURE_HEVC_RPS_LT_CURR)
1321 if (!(va_pic->flags & VA_PICTURE_HEVC_INVALID) &&
1322 (va_pic->picture_id != VA_INVALID_ID) &&
1323 (va_pic->flags & (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1324 VA_PICTURE_HEVC_RPS_ST_CURR_AFTER |
1325 VA_PICTURE_HEVC_RPS_LT_CURR))) {
1327 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
1330 va_status = VA_STATUS_ERROR_INVALID_SURFACE;
1334 va_status = hevc_ensure_surface_bo(ctx, decode_state, obj_surface,
1337 if (va_status != VA_STATUS_SUCCESS)
1341 decode_state->reference_objects[i] = obj_surface;
1344 va_status = VA_STATUS_SUCCESS;
1350 //Obtains reference frames from the picture parameter and
1351 //then sets the reference frames in the decode_state
1353 intel_decoder_check_vp9_parameter(VADriverContextP ctx,
1355 struct decode_state *decode_state)
1357 struct i965_driver_data *i965 = i965_driver_data(ctx);
1358 VADecPictureParameterBufferVP9 *pic_param = (VADecPictureParameterBufferVP9 *)decode_state->pic_param->buffer;
1359 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1360 struct object_surface *obj_surface;
1361 int i = 0, index = 0;
1363 if ((profile - VAProfileVP9Profile0) < pic_param->profile)
1366 //Max support upto 4k for BXT
1367 if ((pic_param->frame_width - 1 < 0) || (pic_param->frame_width - 1 > 4095))
1370 if ((pic_param->frame_height - 1 < 0) || (pic_param->frame_height - 1 > 4095))
1373 //Set the reference object in decode state for last reference
1374 index = pic_param->pic_fields.bits.last_ref_frame;
1375 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1376 obj_surface = SURFACE(pic_param->reference_frames[index]);
1378 if (obj_surface && obj_surface->bo)
1379 decode_state->reference_objects[i++] = obj_surface;
1381 decode_state->reference_objects[i++] = NULL;
1384 //Set the reference object in decode state for golden reference
1385 index = pic_param->pic_fields.bits.golden_ref_frame;
1386 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1387 obj_surface = SURFACE(pic_param->reference_frames[index]);
1389 if (obj_surface && obj_surface->bo)
1390 decode_state->reference_objects[i++] = obj_surface;
1392 decode_state->reference_objects[i++] = NULL;
1395 //Set the reference object in decode state for altref reference
1396 index = pic_param->pic_fields.bits.alt_ref_frame;
1397 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1398 obj_surface = SURFACE(pic_param->reference_frames[index]);
1400 if (obj_surface && obj_surface->bo)
1401 decode_state->reference_objects[i++] = obj_surface;
1403 decode_state->reference_objects[i++] = NULL;
1407 decode_state->reference_objects[i] = NULL;
1409 return VA_STATUS_SUCCESS;
1413 intel_decoder_sanity_check_input(VADriverContextP ctx,
1415 struct decode_state *decode_state)
1417 struct i965_driver_data *i965 = i965_driver_data(ctx);
1418 struct object_surface *obj_surface;
1419 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1421 if (decode_state->current_render_target == VA_INVALID_SURFACE)
1424 obj_surface = SURFACE(decode_state->current_render_target);
1429 decode_state->render_object = obj_surface;
1432 case VAProfileMPEG2Simple:
1433 case VAProfileMPEG2Main:
1434 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
1437 case VAProfileH264ConstrainedBaseline:
1438 case VAProfileH264Main:
1439 case VAProfileH264High:
1440 case VAProfileH264StereoHigh:
1441 case VAProfileH264MultiviewHigh:
1442 vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
1445 case VAProfileVC1Simple:
1446 case VAProfileVC1Main:
1447 case VAProfileVC1Advanced:
1448 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
1451 case VAProfileJPEGBaseline:
1452 vaStatus = VA_STATUS_SUCCESS;
1455 case VAProfileVP8Version0_3:
1456 vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
1459 case VAProfileHEVCMain:
1460 case VAProfileHEVCMain10:
1461 vaStatus = intel_decoder_check_hevc_parameter(ctx, decode_state);
1464 case VAProfileVP9Profile0:
1465 case VAProfileVP9Profile2:
1466 vaStatus = intel_decoder_check_vp9_parameter(ctx, profile, decode_state);
1470 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1479 * Return the next slice paramter
1482 * slice_param: the current slice
1483 * *group_idx & *element_idx the current slice position in slice groups
1485 * Return the next slice parameter
1486 * *group_idx & *element_idx the next slice position in slice groups,
1487 * if the next slice is NULL, *group_idx & *element_idx will be ignored
1489 VASliceParameterBufferMPEG2 *
1490 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
1491 VAPictureParameterBufferMPEG2 *pic_param,
1492 VASliceParameterBufferMPEG2 *slice_param,
1496 VASliceParameterBufferMPEG2 *next_slice_param;
1497 unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1498 int j = *group_idx, i = *element_idx + 1;
1500 for (; j < decode_state->num_slice_params; j++) {
1501 for (; i < decode_state->slice_params[j]->num_elements; i++) {
1502 next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
1504 if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
1505 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
1509 return next_slice_param;
1519 /* Ensure the segmentation buffer is large enough for the supplied
1520 number of MBs, or re-allocate it */
1522 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
1523 unsigned int mb_width, unsigned int mb_height)
1525 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1526 /* The segmentation map is a 64-byte aligned linear buffer, with
1527 each cache line holding only 8 bits for 4 continuous MBs */
1528 const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
1531 if (buf->bo && buf->bo->size >= buf_size)
1533 drm_intel_bo_unreference(buf->bo);
1537 buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
1539 buf->valid = buf->bo != NULL;
1544 hevc_gen_default_iq_matrix(VAIQMatrixBufferHEVC *iq_matrix)
1547 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
1550 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
1553 memset(&iq_matrix->ScalingList16x16, 16, sizeof(iq_matrix->ScalingList16x16));
1556 memset(&iq_matrix->ScalingList32x32, 16, sizeof(iq_matrix->ScalingList32x32));
1558 /* Flat_16x16_dc_16 */
1559 memset(&iq_matrix->ScalingListDC16x16, 16, sizeof(iq_matrix->ScalingListDC16x16));
1561 /* Flat_32x32_dc_16 */
1562 memset(&iq_matrix->ScalingListDC32x32, 16, sizeof(iq_matrix->ScalingListDC32x32));