2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
28 #include "intel_batchbuffer.h"
29 #include "intel_media.h"
30 #include "i965_drv_video.h"
31 #include "i965_decoder_utils.h"
32 #include "i965_defines.h"
34 /* Set reference surface if backing store exists */
37 struct i965_driver_data *i965,
38 GenFrameStore *ref_frame,
39 VASurfaceID va_surface,
40 struct object_surface *obj_surface
43 if (va_surface == VA_INVALID_ID)
46 if (!obj_surface || !obj_surface->bo)
49 ref_frame->surface_id = va_surface;
50 ref_frame->obj_surface = obj_surface;
54 /* Check wether codec layer incorrectly fills in slice_vertical_position */
56 mpeg2_wa_slice_vertical_position(
57 struct decode_state *decode_state,
58 VAPictureParameterBufferMPEG2 *pic_param
61 unsigned int i, j, mb_height, vpos, last_vpos = 0;
63 /* Assume progressive sequence if we got a progressive frame */
64 if (pic_param->picture_coding_extension.bits.progressive_frame)
67 /* Wait for a field coded picture */
68 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
71 assert(decode_state && decode_state->slice_params);
73 mb_height = (pic_param->vertical_size + 31) / 32;
75 for (j = 0; j < decode_state->num_slice_params; j++) {
76 struct buffer_store * const buffer_store =
77 decode_state->slice_params[j];
79 for (i = 0; i < buffer_store->num_elements; i++) {
80 VASliceParameterBufferMPEG2 * const slice_param =
81 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
83 vpos = slice_param->slice_vertical_position;
84 if (vpos >= mb_height || vpos == last_vpos + 2) {
85 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
94 /* Build MPEG-2 reference frames array */
96 mpeg2_set_reference_surfaces(
98 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
99 struct decode_state *decode_state,
100 VAPictureParameterBufferMPEG2 *pic_param
103 struct i965_driver_data * const i965 = i965_driver_data(ctx);
104 VASurfaceID va_surface;
105 unsigned pic_structure, is_second_field, n = 0;
106 struct object_surface *obj_surface;
108 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
109 is_second_field = pic_structure != MPEG_FRAME &&
110 !pic_param->picture_coding_extension.bits.is_first_field;
112 ref_frames[0].surface_id = VA_INVALID_ID;
113 ref_frames[0].obj_surface = NULL;
115 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
116 switch (pic_param->picture_coding_type) {
118 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
119 va_surface = decode_state->current_render_target;
120 obj_surface = decode_state->render_object;
121 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
123 va_surface = pic_param->forward_reference_picture;
124 obj_surface = decode_state->reference_objects[0];
125 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
129 va_surface = pic_param->forward_reference_picture;
130 obj_surface = decode_state->reference_objects[0];
131 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
132 va_surface = pic_param->backward_reference_picture;
133 obj_surface = decode_state->reference_objects[1];
134 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
139 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
140 ref_frames[n++].surface_id = ref_frames[0].surface_id;
143 if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
146 ref_frames[2].surface_id = VA_INVALID_ID;
147 ref_frames[2].obj_surface = NULL;
149 /* Bottom field pictures used as reference */
150 switch (pic_param->picture_coding_type) {
152 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
153 va_surface = decode_state->current_render_target;
154 obj_surface = decode_state->render_object;
155 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
157 va_surface = pic_param->forward_reference_picture;
158 obj_surface = decode_state->reference_objects[0];
159 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
163 va_surface = pic_param->forward_reference_picture;
164 obj_surface = decode_state->reference_objects[0];
165 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
166 va_surface = pic_param->backward_reference_picture;
167 obj_surface = decode_state->reference_objects[1];
168 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
173 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
174 ref_frames[n++].surface_id = ref_frames[2].surface_id;
178 /* Ensure the supplied VA surface has valid storage for decoding the
181 avc_ensure_surface_bo(
182 VADriverContextP ctx,
183 struct decode_state *decode_state,
184 struct object_surface *obj_surface,
185 const VAPictureParameterBufferH264 *pic_param
189 uint32_t hw_fourcc, fourcc, subsample, chroma_format;
191 /* Validate chroma format */
192 switch (pic_param->seq_fields.bits.chroma_format_idc) {
194 fourcc = VA_FOURCC_Y800;
195 subsample = SUBSAMPLE_YUV400;
196 chroma_format = VA_RT_FORMAT_YUV400;
199 fourcc = VA_FOURCC_NV12;
200 subsample = SUBSAMPLE_YUV420;
201 chroma_format = VA_RT_FORMAT_YUV420;
204 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
207 /* Determine the HW surface format, bound to VA config needs */
208 if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
213 case VA_FOURCC_Y800: // Implement with an NV12 surface
214 if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
215 hw_fourcc = VA_FOURCC_NV12;
216 subsample = SUBSAMPLE_YUV420;
222 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
224 /* (Re-)allocate the underlying surface buffer store, if necessary */
225 if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
226 struct i965_driver_data * const i965 = i965_driver_data(ctx);
228 i965_destroy_surface_storage(obj_surface);
229 va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
230 i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
231 if (va_status != VA_STATUS_SUCCESS)
235 /* Fake chroma components if grayscale is implemented on top of NV12 */
236 if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
237 const uint32_t uv_offset = obj_surface->width * obj_surface->height;
238 const uint32_t uv_size = obj_surface->width * obj_surface->height / 2;
240 drm_intel_gem_bo_map_gtt(obj_surface->bo);
241 memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
242 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
244 return VA_STATUS_SUCCESS;
247 /* Generate flat scaling matrices for H.264 decoding */
249 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
252 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
255 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
258 /* Returns the POC of the supplied VA picture */
260 avc_get_picture_poc(const VAPictureH264 *va_pic)
262 int structure, field_poc[2];
264 structure = va_pic->flags &
265 (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD);
266 field_poc[0] = structure != VA_PICTURE_H264_BOTTOM_FIELD ?
267 va_pic->TopFieldOrderCnt : INT_MAX;
268 field_poc[1] = structure != VA_PICTURE_H264_TOP_FIELD ?
269 va_pic->BottomFieldOrderCnt : INT_MAX;
270 return MIN(field_poc[0], field_poc[1]);
273 /* Returns a unique picture ID that represents the supplied VA surface object */
275 avc_get_picture_id(struct object_surface *obj_surface)
279 /* This highly depends on how the internal VA objects are organized.
281 Theory of operations:
282 The VA objects are maintained in heaps so that any released VA
283 surface will become free again for future allocation. This means
284 that holes in there are filled in for subsequent allocations.
285 So, this ultimately means that we could just use the Heap ID of
286 the VA surface as the resulting picture ID (16 bits) */
287 pic_id = 1 + (obj_surface->base.id & OBJECT_HEAP_ID_MASK);
288 return (pic_id <= 0xffff) ? pic_id : -1;
291 /* Finds the VA/H264 picture associated with the specified VA surface id */
293 avc_find_picture(VASurfaceID id, VAPictureH264 *pic_list, int pic_list_count)
297 if (id != VA_INVALID_ID) {
298 for (i = 0; i < pic_list_count; i++) {
299 VAPictureH264 * const va_pic = &pic_list[i];
300 if (va_pic->picture_id == id &&
301 !(va_pic->flags & VA_PICTURE_H264_INVALID))
308 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
309 /* XXX: slice_data_bit_offset does not account for EPB */
311 avc_get_first_mb_bit_offset(
312 dri_bo *slice_data_bo,
313 VASliceParameterBufferH264 *slice_param,
314 unsigned int mode_flag
317 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
319 if (mode_flag == ENTROPY_CABAC)
320 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
321 return slice_data_bit_offset;
324 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
325 /* XXX: slice_data_bit_offset does not account for EPB */
327 avc_get_first_mb_bit_offset_with_epb(
328 dri_bo *slice_data_bo,
329 VASliceParameterBufferH264 *slice_param,
330 unsigned int mode_flag
333 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
334 unsigned int out_slice_data_bit_offset;
335 unsigned int i, j, n, buf_size, data_size, header_size;
339 header_size = slice_param->slice_data_bit_offset / 8;
340 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
341 buf_size = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
343 if (buf_size > data_size)
344 buf_size = data_size;
346 buf = alloca(buf_size);
347 ret = dri_bo_get_subdata(
348 slice_data_bo, slice_param->slice_data_offset,
353 for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
354 if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
358 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
360 if (mode_flag == ENTROPY_CABAC)
361 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
362 return out_slice_data_bit_offset;
365 static inline uint8_t
366 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
368 /* The H.264 standard, and the VA-API specification, allows for at
369 least 3 states for a picture: "used for short-term reference",
370 "used for long-term reference", or considered as not used for
373 The latter is used in the MVC inter prediction and inter-view
374 prediction process (H.8.4). This has an incidence on the
375 colZeroFlag variable, as defined in 8.4.1.2.
377 Since it is not possible to directly program that flag, let's
378 make the hardware derive this value by assimilating "considered
379 as not used for reference" to a "not used for short-term
380 reference", and subsequently making it "used for long-term
381 reference" to fit the definition of Bit6 here */
382 const unsigned int ref_flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE |
383 VA_PICTURE_H264_LONG_TERM_REFERENCE;
384 const unsigned int is_long_term =
385 ((va_pic->flags & ref_flags) != VA_PICTURE_H264_SHORT_TERM_REFERENCE);
386 const unsigned int is_top_field =
387 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
388 const unsigned int is_bottom_field =
389 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
391 return ((is_long_term << 6) |
392 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
393 (frame_store_id << 1) |
394 ((is_top_field ^ 1) & is_bottom_field));
397 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
399 gen5_fill_avc_ref_idx_state(
401 const VAPictureH264 ref_list[32],
402 unsigned int ref_list_count,
403 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
408 for (i = 0; i < ref_list_count; i++) {
409 const VAPictureH264 * const va_pic = &ref_list[i];
411 if ((va_pic->flags & VA_PICTURE_H264_INVALID) ||
412 va_pic->picture_id == VA_INVALID_ID) {
417 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
418 if (frame_store[j].surface_id == va_pic->picture_id)
422 if (j != MAX_GEN_REFERENCE_FRAMES) { // Found picture in the Frame Store
423 const GenFrameStore * const fs = &frame_store[j];
424 assert(fs->frame_store_id == j); // Current architecture/assumption
425 state[i] = get_ref_idx_state_1(va_pic, fs->frame_store_id);
428 WARN_ONCE("Invalid RefPicListX[] entry!!! It is not included in DPB\n");
429 state[i] = get_ref_idx_state_1(va_pic, 0) | 0x80;
437 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
439 gen6_send_avc_ref_idx_state_1(
440 struct intel_batchbuffer *batch,
442 const VAPictureH264 *ref_list,
443 unsigned int ref_list_count,
444 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
447 uint8_t ref_idx_state[32];
449 BEGIN_BCS_BATCH(batch, 10);
450 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
451 OUT_BCS_BATCH(batch, list);
452 gen5_fill_avc_ref_idx_state(
454 ref_list, ref_list_count,
457 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
458 ADVANCE_BCS_BATCH(batch);
462 gen6_send_avc_ref_idx_state(
463 struct intel_batchbuffer *batch,
464 const VASliceParameterBufferH264 *slice_param,
465 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
468 if (slice_param->slice_type == SLICE_TYPE_I ||
469 slice_param->slice_type == SLICE_TYPE_SI)
473 gen6_send_avc_ref_idx_state_1(
475 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
479 if (slice_param->slice_type != SLICE_TYPE_B)
483 gen6_send_avc_ref_idx_state_1(
485 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
491 gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx,
492 VAPictureParameterBufferH264 *pic_param,
493 VASliceParameterBufferH264 *next_slice_param,
494 struct intel_batchbuffer *batch)
496 int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
497 int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
498 int slice_hor_pos, slice_ver_pos, slice_start_mb_num, next_slice_hor_pos, next_slice_ver_pos;
499 int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
500 pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
502 if (next_slice_param) {
503 int first_mb_in_next_slice;
507 slice_start_mb_num = 0;
508 first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
509 next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs;
510 next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
513 slice_ver_pos = height_in_mbs;
514 slice_start_mb_num = width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
515 next_slice_hor_pos = 0;
516 next_slice_ver_pos = 0;
519 BEGIN_BCS_BATCH(batch, 11);
520 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
521 OUT_BCS_BATCH(batch, 0);
522 OUT_BCS_BATCH(batch, 0);
523 OUT_BCS_BATCH(batch, 0);
525 slice_ver_pos << 24 |
526 slice_hor_pos << 16 |
527 slice_start_mb_num << 0);
529 next_slice_ver_pos << 16 |
530 next_slice_hor_pos << 0);
531 OUT_BCS_BATCH(batch, 0);
532 OUT_BCS_BATCH(batch, 0);
533 OUT_BCS_BATCH(batch, 0);
534 OUT_BCS_BATCH(batch, 0);
535 OUT_BCS_BATCH(batch, 0);
536 ADVANCE_BCS_BATCH(batch);
540 gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx,
541 VAPictureParameterBufferH264 *pic_param,
542 struct intel_batchbuffer *batch)
545 BEGIN_BCS_BATCH(batch, 6);
546 OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
547 OUT_BCS_BATCH(batch, 0);
548 OUT_BCS_BATCH(batch, 0);
549 OUT_BCS_BATCH(batch, 0);
550 OUT_BCS_BATCH(batch, 0);
551 OUT_BCS_BATCH(batch, 0);
552 ADVANCE_BCS_BATCH(batch);
556 gen6_mfd_avc_phantom_slice(VADriverContextP ctx,
557 VAPictureParameterBufferH264 *pic_param,
558 VASliceParameterBufferH264 *next_slice_param,
559 struct intel_batchbuffer *batch)
561 gen6_mfd_avc_phantom_slice_state(ctx, pic_param, next_slice_param, batch);
562 gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param, batch);
565 /* Comparison function for sorting out the array of free frame store entries */
567 compare_avc_ref_store_func(const void *p1, const void *p2)
569 const GenFrameStore * const fs1 = *((GenFrameStore **)p1);
570 const GenFrameStore * const fs2 = *((GenFrameStore **)p2);
572 return fs1->ref_age - fs2->ref_age;
576 intel_update_codec_frame_store_index(
577 VADriverContextP ctx,
578 struct decode_state *decode_state,
580 GenFrameStore frame_store[],
582 GenFrameStoreContext *fs_ctx
585 GenFrameStore **free_refs = calloc(num_elements, sizeof(GenFrameStore *));
586 uint32_t used_refs = 0, add_refs = 0;
588 int i, n, num_free_refs;
593 /* Detect changes of access unit */
594 if (fs_ctx->age == 0 || fs_ctx->prev_poc != poc)
596 fs_ctx->prev_poc = poc;
599 /* Tag entries that are still available in our Frame Store */
600 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
601 struct object_surface * const obj_surface =
602 decode_state->reference_objects[i];
606 GenCodecSurface * const codec_surface = obj_surface->private_data;
609 if (codec_surface->frame_store_id >= 0) {
610 GenFrameStore * const fs =
611 &frame_store[codec_surface->frame_store_id];
612 if (fs->surface_id == obj_surface->base.id) {
613 fs->obj_surface = obj_surface;
615 used_refs |= 1 << fs->frame_store_id;
622 /* Build and sort out the list of retired candidates. The resulting
623 list is ordered by increasing age when they were last used */
624 for (i = 0, n = 0; i < num_elements; i++) {
625 if (!(used_refs & (1 << i))) {
626 GenFrameStore * const fs = &frame_store[i];
627 fs->obj_surface = NULL;
632 qsort(&free_refs[0], n, sizeof(free_refs[0]), compare_avc_ref_store_func);
634 /* Append the new reference frames */
635 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
636 struct object_surface * const obj_surface =
637 decode_state->reference_objects[i];
638 if (!obj_surface || !(add_refs & (1 << i)))
641 GenCodecSurface * const codec_surface = obj_surface->private_data;
644 if (n < num_free_refs) {
645 GenFrameStore * const fs = free_refs[n++];
646 fs->surface_id = obj_surface->base.id;
647 fs->obj_surface = obj_surface;
648 fs->frame_store_id = fs - frame_store;
650 codec_surface->frame_store_id = fs->frame_store_id;
653 WARN_ONCE("No free slot found for DPB reference list!!!\n");
660 intel_update_avc_frame_store_index(
661 VADriverContextP ctx,
662 struct decode_state *decode_state,
663 VAPictureParameterBufferH264 *pic_param,
664 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES],
665 GenFrameStoreContext *fs_ctx
668 intel_update_codec_frame_store_index(ctx,
670 avc_get_picture_poc(&pic_param->CurrPic),
672 MAX_GEN_REFERENCE_FRAMES,
677 intel_update_hevc_frame_store_index(
678 VADriverContextP ctx,
679 struct decode_state *decode_state,
680 VAPictureParameterBufferHEVC *pic_param,
681 GenFrameStore frame_store[MAX_GEN_HCP_REFERENCE_FRAMES],
682 GenFrameStoreContext *fs_ctx
687 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
688 struct object_surface * const obj_surface = decode_state->reference_objects[i];
693 GenFrameStore * const fs = &frame_store[n];
694 fs->surface_id = obj_surface->base.id;
695 fs->obj_surface = obj_surface;
696 fs->frame_store_id = n++;
698 if (n == MAX_GEN_HCP_REFERENCE_FRAMES)
702 for (; n < MAX_GEN_HCP_REFERENCE_FRAMES; n++) {
703 GenFrameStore * const fs = &frame_store[n];
705 fs->surface_id = VA_INVALID_ID;
706 fs->obj_surface = NULL;
707 fs->frame_store_id = -1;
712 gen75_update_avc_frame_store_index(
713 VADriverContextP ctx,
714 struct decode_state *decode_state,
715 VAPictureParameterBufferH264 *pic_param,
716 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
721 /* Construct the Frame Store array, in compact form. i.e. empty or
722 invalid entries are discarded. */
723 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
724 struct object_surface * const obj_surface =
725 decode_state->reference_objects[i];
729 GenFrameStore * const fs = &frame_store[n];
730 fs->surface_id = obj_surface->base.id;
731 fs->obj_surface = obj_surface;
732 fs->frame_store_id = n++;
735 /* Any remaining entry is marked as invalid */
736 for (; n < MAX_GEN_REFERENCE_FRAMES; n++) {
737 GenFrameStore * const fs = &frame_store[n];
738 fs->surface_id = VA_INVALID_ID;
739 fs->obj_surface = NULL;
740 fs->frame_store_id = -1;
745 gen75_fill_avc_picid_list(
746 uint16_t pic_ids[16],
747 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
752 /* Fill in with known picture IDs. The Frame Store array is in
753 compact form, i.e. empty entries are only to be found at the
754 end of the array: there are no holes in the set of active
756 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
757 GenFrameStore * const fs = &frame_store[i];
758 if (!fs->obj_surface)
760 pic_id = avc_get_picture_id(fs->obj_surface);
766 /* When an element of the list is not relevant the value of the
767 picture ID shall be set to 0 */
768 for (; i < MAX_GEN_REFERENCE_FRAMES; i++)
774 gen75_send_avc_picid_state(
775 struct intel_batchbuffer *batch,
776 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
779 uint16_t pic_ids[16];
781 if (!gen75_fill_avc_picid_list(pic_ids, frame_store))
784 BEGIN_BCS_BATCH(batch, 10);
785 OUT_BCS_BATCH(batch, MFD_AVC_PICID_STATE | (10 - 2));
786 OUT_BCS_BATCH(batch, 0); // enable Picture ID Remapping
787 intel_batchbuffer_data(batch, pic_ids, sizeof(pic_ids));
788 ADVANCE_BCS_BATCH(batch);
793 intel_update_vc1_frame_store_index(VADriverContextP ctx,
794 struct decode_state *decode_state,
795 VAPictureParameterBufferVC1 *pic_param,
796 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
798 struct object_surface *obj_surface;
801 obj_surface = decode_state->reference_objects[0];
803 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
806 frame_store[0].surface_id = VA_INVALID_ID;
807 frame_store[0].obj_surface = NULL;
809 frame_store[0].surface_id = pic_param->forward_reference_picture;
810 frame_store[0].obj_surface = obj_surface;
813 obj_surface = decode_state->reference_objects[1];
815 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
818 frame_store[1].surface_id = frame_store[0].surface_id;
819 frame_store[1].obj_surface = frame_store[0].obj_surface;
821 frame_store[1].surface_id = pic_param->backward_reference_picture;
822 frame_store[1].obj_surface = obj_surface;
824 for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
825 frame_store[i].surface_id = frame_store[i % 2].surface_id;
826 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
832 intel_update_vp8_frame_store_index(VADriverContextP ctx,
833 struct decode_state *decode_state,
834 VAPictureParameterBufferVP8 *pic_param,
835 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
837 struct object_surface *obj_surface;
840 obj_surface = decode_state->reference_objects[0];
842 if (pic_param->last_ref_frame == VA_INVALID_ID ||
845 frame_store[0].surface_id = VA_INVALID_ID;
846 frame_store[0].obj_surface = NULL;
848 frame_store[0].surface_id = pic_param->last_ref_frame;
849 frame_store[0].obj_surface = obj_surface;
852 obj_surface = decode_state->reference_objects[1];
854 if (pic_param->golden_ref_frame == VA_INVALID_ID ||
857 frame_store[1].surface_id = frame_store[0].surface_id;
858 frame_store[1].obj_surface = frame_store[0].obj_surface;
860 frame_store[1].surface_id = pic_param->golden_ref_frame;
861 frame_store[1].obj_surface = obj_surface;
864 obj_surface = decode_state->reference_objects[2];
866 if (pic_param->alt_ref_frame == VA_INVALID_ID ||
869 frame_store[2].surface_id = frame_store[0].surface_id;
870 frame_store[2].obj_surface = frame_store[0].obj_surface;
872 frame_store[2].surface_id = pic_param->alt_ref_frame;
873 frame_store[2].obj_surface = obj_surface;
876 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
877 frame_store[i].surface_id = frame_store[i % 2].surface_id;
878 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
883 //Obtain the reference frames from the decode state and store them in frame store.
885 intel_update_vp9_frame_store_index(VADriverContextP ctx,
886 struct decode_state *decode_state,
887 VADecPictureParameterBufferVP9 *pic_param,
888 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
890 struct object_surface *obj_surface;
893 //Check for the validity of the last reference frame
894 obj_surface = decode_state->reference_objects[0];
896 index = pic_param->pic_fields.bits.last_ref_frame;
897 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
900 frame_store[0].surface_id = VA_INVALID_ID;
901 frame_store[0].obj_surface = NULL;
903 frame_store[0].surface_id = pic_param->reference_frames[index];
904 frame_store[0].obj_surface = obj_surface;
907 //Check for the validity of the golden reference frame
908 obj_surface = decode_state->reference_objects[1];
910 index = pic_param->pic_fields.bits.golden_ref_frame;
911 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
914 frame_store[1].surface_id = frame_store[0].surface_id;
915 frame_store[1].obj_surface = frame_store[0].obj_surface;
917 frame_store[1].surface_id = pic_param->reference_frames[index];
918 frame_store[1].obj_surface = obj_surface;
921 //Check for the validity of the altref reference frame
922 obj_surface = decode_state->reference_objects[2];
924 index = pic_param->pic_fields.bits.alt_ref_frame;
925 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
928 frame_store[2].surface_id = frame_store[0].surface_id;
929 frame_store[2].obj_surface = frame_store[0].obj_surface;
931 frame_store[2].surface_id = pic_param->reference_frames[index];
932 frame_store[2].obj_surface = obj_surface;
935 //Set the remaining framestores to either last/golden/altref
936 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
937 frame_store[i].surface_id = frame_store[i % 2].surface_id;
938 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
944 intel_decoder_check_avc_parameter(VADriverContextP ctx,
945 VAProfile h264_profile,
946 struct decode_state *decode_state)
948 struct i965_driver_data *i965 = i965_driver_data(ctx);
949 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
951 struct object_surface *obj_surface;
953 VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
956 ASSERT_RET(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID), VA_STATUS_ERROR_INVALID_PARAMETER);
957 ASSERT_RET((pic_param->CurrPic.picture_id != VA_INVALID_SURFACE), VA_STATUS_ERROR_INVALID_PARAMETER);
958 ASSERT_RET((pic_param->CurrPic.picture_id == decode_state->current_render_target), VA_STATUS_ERROR_INVALID_PARAMETER);
960 if ((h264_profile != VAProfileH264Baseline)) {
961 if (pic_param->num_slice_groups_minus1 ||
962 pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
963 WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n");
968 /* Fill in the reference objects array with the actual VA surface
969 objects with 1:1 correspondance with any entry in ReferenceFrames[],
970 i.e. including "holes" for invalid entries, that are expanded
971 to NULL in the reference_objects[] array */
972 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
973 const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i];
976 if (!(va_pic->flags & VA_PICTURE_H264_INVALID) &&
977 va_pic->picture_id != VA_INVALID_ID) {
978 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
980 return VA_STATUS_ERROR_INVALID_SURFACE;
983 * Sometimes a dummy frame comes from the upper layer
984 * library, call i965_check_alloc_surface_bo() to make
985 * sure the store buffer is allocated for this reference
988 va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface,
990 if (va_status != VA_STATUS_SUCCESS)
993 decode_state->reference_objects[i] = obj_surface;
996 for (j = 0; j < decode_state->num_slice_params; j++) {
997 ASSERT_RET((decode_state->slice_params && decode_state->slice_params[j]->buffer), VA_STATUS_ERROR_INVALID_PARAMETER);
998 slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1000 if (j == decode_state->num_slice_params - 1)
1001 next_slice_group_param = NULL;
1003 next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
1005 for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1007 if (i < decode_state->slice_params[j]->num_elements - 1)
1008 next_slice_param = slice_param + 1;
1010 next_slice_param = next_slice_group_param;
1012 if (next_slice_param != NULL) {
1013 /* If the mb position of next_slice is less than or equal to the current slice,
1014 * discard the current frame.
1016 if (next_slice_param->first_mb_in_slice <= slice_param->first_mb_in_slice) {
1017 next_slice_param = NULL;
1018 WARN_ONCE("!!!incorrect slice_param. The first_mb_in_slice of next_slice is less"
1019 " than or equal to that in current slice\n");
1026 return VA_STATUS_SUCCESS;
1029 return VA_STATUS_ERROR_INVALID_PARAMETER;
1033 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
1034 struct decode_state *decode_state)
1036 struct i965_driver_data *i965 = i965_driver_data(ctx);
1037 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1038 struct object_surface *obj_surface;
1041 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
1042 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
1043 obj_surface = SURFACE(pic_param->forward_reference_picture);
1045 if (!obj_surface || !obj_surface->bo)
1046 decode_state->reference_objects[i++] = NULL;
1048 decode_state->reference_objects[i++] = obj_surface;
1049 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
1050 obj_surface = SURFACE(pic_param->forward_reference_picture);
1052 if (!obj_surface || !obj_surface->bo)
1053 decode_state->reference_objects[i++] = NULL;
1055 decode_state->reference_objects[i++] = obj_surface;
1057 obj_surface = SURFACE(pic_param->backward_reference_picture);
1059 if (!obj_surface || !obj_surface->bo)
1060 decode_state->reference_objects[i++] = NULL;
1062 decode_state->reference_objects[i++] = obj_surface;
1066 for ( ; i < 16; i++)
1067 decode_state->reference_objects[i] = NULL;
1069 return VA_STATUS_SUCCESS;
1072 return VA_STATUS_ERROR_INVALID_PARAMETER;
1076 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
1077 struct decode_state *decode_state)
1079 struct i965_driver_data *i965 = i965_driver_data(ctx);
1080 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1081 struct object_surface *obj_surface;
1084 if (pic_param->sequence_fields.bits.interlace == 1 &&
1085 pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1086 return VA_STATUS_ERROR_DECODING_ERROR;
1089 if (pic_param->picture_fields.bits.picture_type == 0 ||
1090 pic_param->picture_fields.bits.picture_type == 3) {
1091 } else if (pic_param->picture_fields.bits.picture_type == 1 ||
1092 pic_param->picture_fields.bits.picture_type == 4) {
1093 obj_surface = SURFACE(pic_param->forward_reference_picture);
1095 if (!obj_surface || !obj_surface->bo)
1096 decode_state->reference_objects[i++] = NULL;
1098 decode_state->reference_objects[i++] = obj_surface;
1099 } else if (pic_param->picture_fields.bits.picture_type == 2) {
1100 obj_surface = SURFACE(pic_param->forward_reference_picture);
1102 if (!obj_surface || !obj_surface->bo)
1103 decode_state->reference_objects[i++] = NULL;
1105 decode_state->reference_objects[i++] = obj_surface;
1107 obj_surface = SURFACE(pic_param->backward_reference_picture);
1109 if (!obj_surface || !obj_surface->bo)
1110 decode_state->reference_objects[i++] = NULL;
1112 decode_state->reference_objects[i++] = obj_surface;
1116 for ( ; i < 16; i++)
1117 decode_state->reference_objects[i] = NULL;
1119 return VA_STATUS_SUCCESS;
1122 return VA_STATUS_ERROR_INVALID_PARAMETER;
1126 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
1127 struct decode_state *decode_state)
1129 struct i965_driver_data *i965 = i965_driver_data(ctx);
1130 VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
1131 struct object_surface *obj_surface;
1134 if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
1135 obj_surface = SURFACE(pic_param->last_ref_frame);
1137 if (obj_surface && obj_surface->bo)
1138 decode_state->reference_objects[i++] = obj_surface;
1140 decode_state->reference_objects[i++] = NULL;
1143 if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
1144 obj_surface = SURFACE(pic_param->golden_ref_frame);
1146 if (obj_surface && obj_surface->bo)
1147 decode_state->reference_objects[i++] = obj_surface;
1149 decode_state->reference_objects[i++] = NULL;
1152 if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
1153 obj_surface = SURFACE(pic_param->alt_ref_frame);
1155 if (obj_surface && obj_surface->bo)
1156 decode_state->reference_objects[i++] = obj_surface;
1158 decode_state->reference_objects[i++] = NULL;
1161 for ( ; i < 16; i++)
1162 decode_state->reference_objects[i] = NULL;
1164 return VA_STATUS_SUCCESS;
1168 hevc_ensure_surface_bo(
1169 VADriverContextP ctx,
1170 struct decode_state *decode_state,
1171 struct object_surface *obj_surface,
1172 const VAPictureParameterBufferHEVC *pic_param
1175 VAStatus va_status = VA_STATUS_SUCCESS;
1177 unsigned int fourcc = VA_FOURCC_NV12;
1179 if((pic_param->bit_depth_luma_minus8 > 0)
1180 || (pic_param->bit_depth_chroma_minus8 > 0))
1182 if(obj_surface->fourcc != VA_FOURCC_P010)
1185 fourcc = VA_FOURCC_P010;
1188 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1191 fourcc = VA_FOURCC_NV12;
1194 /* (Re-)allocate the underlying surface buffer store, if necessary */
1195 if (!obj_surface->bo || update) {
1196 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1198 i965_destroy_surface_storage(obj_surface);
1200 va_status = i965_check_alloc_surface_bo(ctx,
1202 i965->codec_info->has_tiled_surface,
1210 //Ensure there is a tiled render surface in NV12 format. If not, create one.
1212 vp9_ensure_surface_bo(
1213 VADriverContextP ctx,
1214 struct decode_state *decode_state,
1215 struct object_surface *obj_surface,
1216 const VADecPictureParameterBufferVP9 *pic_param
1219 VAStatus va_status = VA_STATUS_SUCCESS;
1221 unsigned int fourcc = VA_FOURCC_NV12;
1223 if(pic_param->profile >= 2)
1225 if(obj_surface->fourcc != VA_FOURCC_P010)
1228 fourcc = VA_FOURCC_P010;
1231 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1234 fourcc = VA_FOURCC_NV12;
1237 /* (Re-)allocate the underlying surface buffer store, if necessary */
1238 if (!obj_surface->bo || update) {
1239 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1241 i965_destroy_surface_storage(obj_surface);
1243 va_status = i965_check_alloc_surface_bo(ctx,
1245 i965->codec_info->has_tiled_surface,
1254 intel_decoder_check_hevc_parameter(VADriverContextP ctx,
1255 struct decode_state *decode_state)
1257 struct i965_driver_data *i965 = i965_driver_data(ctx);
1258 VAPictureParameterBufferHEVC *pic_param = (VAPictureParameterBufferHEVC *)decode_state->pic_param->buffer;
1259 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1260 struct object_surface *obj_surface;
1264 if (pic_param->CurrPic.flags & VA_PICTURE_HEVC_INVALID ||
1265 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
1268 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
1271 min_cb_size = (1 << (pic_param->log2_min_luma_coding_block_size_minus3 + 3));
1273 if (pic_param->pic_width_in_luma_samples % min_cb_size ||
1274 pic_param->pic_height_in_luma_samples % min_cb_size)
1277 /* Fill in the reference objects array with the actual VA surface
1278 objects with 1:1 correspondance with any entry in ReferenceFrames[],
1279 i.e. including "holes" for invalid entries, that are expanded
1280 to NULL in the reference_objects[] array */
1281 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
1282 const VAPictureHEVC * const va_pic = &pic_param->ReferenceFrames[i];
1287 * Only the index with (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1288 * VA_PICTURE_HEVC_RPS_ST_CURR_AFTER | VA_PICTURE_HEVC_RPS_LT_CURR)
1291 if (!(va_pic->flags & VA_PICTURE_HEVC_INVALID) &&
1292 (va_pic->picture_id != VA_INVALID_ID) &&
1293 (va_pic->flags & (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1294 VA_PICTURE_HEVC_RPS_ST_CURR_AFTER |
1295 VA_PICTURE_HEVC_RPS_LT_CURR))) {
1297 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
1300 va_status = VA_STATUS_ERROR_INVALID_SURFACE;
1304 va_status = hevc_ensure_surface_bo(ctx, decode_state, obj_surface,
1307 if (va_status != VA_STATUS_SUCCESS)
1311 decode_state->reference_objects[i] = obj_surface;
1314 va_status = VA_STATUS_SUCCESS;
1320 //Obtains reference frames from the picture parameter and
1321 //then sets the reference frames in the decode_state
1323 intel_decoder_check_vp9_parameter(VADriverContextP ctx,
1325 struct decode_state *decode_state)
1327 struct i965_driver_data *i965 = i965_driver_data(ctx);
1328 VADecPictureParameterBufferVP9 *pic_param = (VADecPictureParameterBufferVP9 *)decode_state->pic_param->buffer;
1329 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1330 struct object_surface *obj_surface;
1333 if((profile - VAProfileVP9Profile0) < pic_param->profile)
1336 //Max support upto 4k for BXT
1337 if ((pic_param->frame_width-1 < 0) || (pic_param->frame_width-1 > 4095))
1340 if ((pic_param->frame_height-1 < 0) || (pic_param->frame_height-1 > 4095))
1343 //Set the reference object in decode state for last reference
1344 index = pic_param->pic_fields.bits.last_ref_frame;
1345 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1346 obj_surface = SURFACE(pic_param->reference_frames[index]);
1348 if (obj_surface && obj_surface->bo)
1349 decode_state->reference_objects[i++] = obj_surface;
1351 decode_state->reference_objects[i++] = NULL;
1354 //Set the reference object in decode state for golden reference
1355 index = pic_param->pic_fields.bits.golden_ref_frame;
1356 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1357 obj_surface = SURFACE(pic_param->reference_frames[index]);
1359 if (obj_surface && obj_surface->bo)
1360 decode_state->reference_objects[i++] = obj_surface;
1362 decode_state->reference_objects[i++] = NULL;
1365 //Set the reference object in decode state for altref reference
1366 index = pic_param->pic_fields.bits.alt_ref_frame;
1367 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1368 obj_surface = SURFACE(pic_param->reference_frames[index]);
1370 if (obj_surface && obj_surface->bo)
1371 decode_state->reference_objects[i++] = obj_surface;
1373 decode_state->reference_objects[i++] = NULL;
1376 for ( ; i < 16; i++)
1377 decode_state->reference_objects[i] = NULL;
1379 return VA_STATUS_SUCCESS;
1383 intel_decoder_sanity_check_input(VADriverContextP ctx,
1385 struct decode_state *decode_state)
1387 struct i965_driver_data *i965 = i965_driver_data(ctx);
1388 struct object_surface *obj_surface;
1389 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1391 if (decode_state->current_render_target == VA_INVALID_SURFACE)
1394 obj_surface = SURFACE(decode_state->current_render_target);
1399 decode_state->render_object = obj_surface;
1402 case VAProfileMPEG2Simple:
1403 case VAProfileMPEG2Main:
1404 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
1407 case VAProfileH264ConstrainedBaseline:
1408 case VAProfileH264Main:
1409 case VAProfileH264High:
1410 case VAProfileH264StereoHigh:
1411 case VAProfileH264MultiviewHigh:
1412 vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
1415 case VAProfileVC1Simple:
1416 case VAProfileVC1Main:
1417 case VAProfileVC1Advanced:
1418 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
1421 case VAProfileJPEGBaseline:
1422 vaStatus = VA_STATUS_SUCCESS;
1425 case VAProfileVP8Version0_3:
1426 vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
1429 case VAProfileHEVCMain:
1430 case VAProfileHEVCMain10:
1431 vaStatus = intel_decoder_check_hevc_parameter(ctx, decode_state);
1434 case VAProfileVP9Profile0:
1435 case VAProfileVP9Profile2:
1436 vaStatus = intel_decoder_check_vp9_parameter(ctx, profile, decode_state);
1440 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1449 * Return the next slice paramter
1452 * slice_param: the current slice
1453 * *group_idx & *element_idx the current slice position in slice groups
1455 * Return the next slice parameter
1456 * *group_idx & *element_idx the next slice position in slice groups,
1457 * if the next slice is NULL, *group_idx & *element_idx will be ignored
1459 VASliceParameterBufferMPEG2 *
1460 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
1461 VAPictureParameterBufferMPEG2 *pic_param,
1462 VASliceParameterBufferMPEG2 *slice_param,
1466 VASliceParameterBufferMPEG2 *next_slice_param;
1467 unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1468 int j = *group_idx, i = *element_idx + 1;
1470 for (; j < decode_state->num_slice_params; j++) {
1471 for (; i < decode_state->slice_params[j]->num_elements; i++) {
1472 next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
1474 if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
1475 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
1479 return next_slice_param;
1489 /* Ensure the segmentation buffer is large enough for the supplied
1490 number of MBs, or re-allocate it */
1492 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
1493 unsigned int mb_width, unsigned int mb_height)
1495 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1496 /* The segmentation map is a 64-byte aligned linear buffer, with
1497 each cache line holding only 8 bits for 4 continuous MBs */
1498 const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
1501 if (buf->bo && buf->bo->size >= buf_size)
1503 drm_intel_bo_unreference(buf->bo);
1507 buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
1509 buf->valid = buf->bo != NULL;
1514 hevc_gen_default_iq_matrix(VAIQMatrixBufferHEVC *iq_matrix)
1517 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
1520 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
1523 memset(&iq_matrix->ScalingList16x16, 16, sizeof(iq_matrix->ScalingList16x16));
1526 memset(&iq_matrix->ScalingList32x32, 16, sizeof(iq_matrix->ScalingList32x32));
1528 /* Flat_16x16_dc_16 */
1529 memset(&iq_matrix->ScalingListDC16x16, 16, sizeof(iq_matrix->ScalingListDC16x16));
1531 /* Flat_32x32_dc_16 */
1532 memset(&iq_matrix->ScalingListDC32x32, 16, sizeof(iq_matrix->ScalingListDC32x32));