2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
27 #include "intel_batchbuffer.h"
28 #include "intel_media.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
33 /* Set reference surface if backing store exists */
36 struct i965_driver_data *i965,
37 GenFrameStore *ref_frame,
38 VASurfaceID va_surface,
39 struct object_surface *obj_surface
42 if (va_surface == VA_INVALID_ID)
45 if (!obj_surface || !obj_surface->bo)
48 ref_frame->surface_id = va_surface;
49 ref_frame->obj_surface = obj_surface;
53 /* Check wether codec layer incorrectly fills in slice_vertical_position */
55 mpeg2_wa_slice_vertical_position(
56 struct decode_state *decode_state,
57 VAPictureParameterBufferMPEG2 *pic_param
60 unsigned int i, j, mb_height, vpos, last_vpos = 0;
62 /* Assume progressive sequence if we got a progressive frame */
63 if (pic_param->picture_coding_extension.bits.progressive_frame)
66 /* Wait for a field coded picture */
67 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
70 assert(decode_state && decode_state->slice_params);
72 mb_height = (pic_param->vertical_size + 31) / 32;
74 for (j = 0; j < decode_state->num_slice_params; j++) {
75 struct buffer_store * const buffer_store =
76 decode_state->slice_params[j];
78 for (i = 0; i < buffer_store->num_elements; i++) {
79 VASliceParameterBufferMPEG2 * const slice_param =
80 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
82 vpos = slice_param->slice_vertical_position;
83 if (vpos >= mb_height || vpos == last_vpos + 2) {
84 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
93 /* Build MPEG-2 reference frames array */
95 mpeg2_set_reference_surfaces(
97 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
98 struct decode_state *decode_state,
99 VAPictureParameterBufferMPEG2 *pic_param
102 struct i965_driver_data * const i965 = i965_driver_data(ctx);
103 VASurfaceID va_surface;
104 unsigned pic_structure, is_second_field, n = 0;
105 struct object_surface *obj_surface;
107 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
108 is_second_field = pic_structure != MPEG_FRAME &&
109 !pic_param->picture_coding_extension.bits.is_first_field;
111 ref_frames[0].surface_id = VA_INVALID_ID;
112 ref_frames[0].obj_surface = NULL;
114 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
115 switch (pic_param->picture_coding_type) {
117 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
118 va_surface = decode_state->current_render_target;
119 obj_surface = decode_state->render_object;
120 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
122 va_surface = pic_param->forward_reference_picture;
123 obj_surface = decode_state->reference_objects[0];
124 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
128 va_surface = pic_param->forward_reference_picture;
129 obj_surface = decode_state->reference_objects[0];
130 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
131 va_surface = pic_param->backward_reference_picture;
132 obj_surface = decode_state->reference_objects[1];
133 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
138 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
139 ref_frames[n++].surface_id = ref_frames[0].surface_id;
142 if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
145 ref_frames[2].surface_id = VA_INVALID_ID;
146 ref_frames[2].obj_surface = NULL;
148 /* Bottom field pictures used as reference */
149 switch (pic_param->picture_coding_type) {
151 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
152 va_surface = decode_state->current_render_target;
153 obj_surface = decode_state->render_object;
154 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
156 va_surface = pic_param->forward_reference_picture;
157 obj_surface = decode_state->reference_objects[0];
158 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
162 va_surface = pic_param->forward_reference_picture;
163 obj_surface = decode_state->reference_objects[0];
164 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
165 va_surface = pic_param->backward_reference_picture;
166 obj_surface = decode_state->reference_objects[1];
167 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
172 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
173 ref_frames[n++].surface_id = ref_frames[2].surface_id;
177 /* Ensure the supplied VA surface has valid storage for decoding the
180 avc_ensure_surface_bo(
181 VADriverContextP ctx,
182 struct decode_state *decode_state,
183 struct object_surface *obj_surface,
184 const VAPictureParameterBufferH264 *pic_param
188 uint32_t hw_fourcc, fourcc, subsample, chroma_format;
190 /* Validate chroma format */
191 switch (pic_param->seq_fields.bits.chroma_format_idc) {
193 fourcc = VA_FOURCC_Y800;
194 subsample = SUBSAMPLE_YUV400;
195 chroma_format = VA_RT_FORMAT_YUV400;
198 fourcc = VA_FOURCC_NV12;
199 subsample = SUBSAMPLE_YUV420;
200 chroma_format = VA_RT_FORMAT_YUV420;
203 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
206 /* Determine the HW surface format, bound to VA config needs */
207 if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
212 case VA_FOURCC_Y800: // Implement with an NV12 surface
213 if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
214 hw_fourcc = VA_FOURCC_NV12;
215 subsample = SUBSAMPLE_YUV420;
221 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
223 /* (Re-)allocate the underlying surface buffer store, if necessary */
224 if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
225 struct i965_driver_data * const i965 = i965_driver_data(ctx);
227 i965_destroy_surface_storage(obj_surface);
228 va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
229 i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
230 if (va_status != VA_STATUS_SUCCESS)
234 /* Fake chroma components if grayscale is implemented on top of NV12 */
235 if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
236 const uint32_t uv_offset = obj_surface->width * obj_surface->height;
237 const uint32_t uv_size = obj_surface->width * obj_surface->height / 2;
239 drm_intel_gem_bo_map_gtt(obj_surface->bo);
240 memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
241 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
243 return VA_STATUS_SUCCESS;
246 /* Generate flat scaling matrices for H.264 decoding */
248 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
251 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
254 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
257 /* Returns the POC of the supplied VA picture */
259 avc_get_picture_poc(const VAPictureH264 *va_pic)
261 int structure, field_poc[2];
263 structure = va_pic->flags &
264 (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD);
265 field_poc[0] = structure != VA_PICTURE_H264_BOTTOM_FIELD ?
266 va_pic->TopFieldOrderCnt : INT_MAX;
267 field_poc[1] = structure != VA_PICTURE_H264_TOP_FIELD ?
268 va_pic->BottomFieldOrderCnt : INT_MAX;
269 return MIN(field_poc[0], field_poc[1]);
272 /* Returns a unique picture ID that represents the supplied VA surface object */
274 avc_get_picture_id(struct object_surface *obj_surface)
278 /* This highly depends on how the internal VA objects are organized.
280 Theory of operations:
281 The VA objects are maintained in heaps so that any released VA
282 surface will become free again for future allocation. This means
283 that holes in there are filled in for subsequent allocations.
284 So, this ultimately means that we could just use the Heap ID of
285 the VA surface as the resulting picture ID (16 bits) */
286 pic_id = 1 + (obj_surface->base.id & OBJECT_HEAP_ID_MASK);
287 return (pic_id <= 0xffff) ? pic_id : -1;
290 /* Finds the VA/H264 picture associated with the specified VA surface id */
292 avc_find_picture(VASurfaceID id, VAPictureH264 *pic_list, int pic_list_count)
296 if (id != VA_INVALID_ID) {
297 for (i = 0; i < pic_list_count; i++) {
298 VAPictureH264 * const va_pic = &pic_list[i];
299 if (va_pic->picture_id == id &&
300 !(va_pic->flags & VA_PICTURE_H264_INVALID))
307 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
308 /* XXX: slice_data_bit_offset does not account for EPB */
310 avc_get_first_mb_bit_offset(
311 dri_bo *slice_data_bo,
312 VASliceParameterBufferH264 *slice_param,
313 unsigned int mode_flag
316 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
318 if (mode_flag == ENTROPY_CABAC)
319 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
320 return slice_data_bit_offset;
323 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
324 /* XXX: slice_data_bit_offset does not account for EPB */
326 avc_get_first_mb_bit_offset_with_epb(
327 dri_bo *slice_data_bo,
328 VASliceParameterBufferH264 *slice_param,
329 unsigned int mode_flag
332 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
333 unsigned int out_slice_data_bit_offset;
334 unsigned int i, j, n, buf_size, data_size, header_size;
338 header_size = slice_param->slice_data_bit_offset / 8;
339 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
340 buf_size = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
342 if (buf_size > data_size)
343 buf_size = data_size;
345 buf = malloc(buf_size);
346 ret = dri_bo_get_subdata(
347 slice_data_bo, slice_param->slice_data_offset,
352 for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
353 if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
359 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
361 if (mode_flag == ENTROPY_CABAC)
362 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
363 return out_slice_data_bit_offset;
366 static inline uint8_t
367 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
369 /* The H.264 standard, and the VA-API specification, allows for at
370 least 3 states for a picture: "used for short-term reference",
371 "used for long-term reference", or considered as not used for
374 The latter is used in the MVC inter prediction and inter-view
375 prediction process (H.8.4). This has an incidence on the
376 colZeroFlag variable, as defined in 8.4.1.2.
378 Since it is not possible to directly program that flag, let's
379 make the hardware derive this value by assimilating "considered
380 as not used for reference" to a "not used for short-term
381 reference", and subsequently making it "used for long-term
382 reference" to fit the definition of Bit6 here */
383 const unsigned int ref_flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE |
384 VA_PICTURE_H264_LONG_TERM_REFERENCE;
385 const unsigned int is_long_term =
386 ((va_pic->flags & ref_flags) != VA_PICTURE_H264_SHORT_TERM_REFERENCE);
387 const unsigned int is_top_field =
388 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
389 const unsigned int is_bottom_field =
390 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
392 return ((is_long_term << 6) |
393 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
394 (frame_store_id << 1) |
395 ((is_top_field ^ 1) & is_bottom_field));
398 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
400 gen5_fill_avc_ref_idx_state(
402 const VAPictureH264 ref_list[32],
403 unsigned int ref_list_count,
404 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
409 for (i = 0; i < ref_list_count; i++) {
410 const VAPictureH264 * const va_pic = &ref_list[i];
412 if ((va_pic->flags & VA_PICTURE_H264_INVALID) ||
413 va_pic->picture_id == VA_INVALID_ID) {
418 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
419 if (frame_store[j].surface_id == va_pic->picture_id)
423 if (j != MAX_GEN_REFERENCE_FRAMES) { // Found picture in the Frame Store
424 const GenFrameStore * const fs = &frame_store[j];
425 assert(fs->frame_store_id == j); // Current architecture/assumption
426 state[i] = get_ref_idx_state_1(va_pic, fs->frame_store_id);
429 WARN_ONCE("Invalid RefPicListX[] entry!!! It is not included in DPB\n");
430 state[i] = get_ref_idx_state_1(va_pic, 0) | 0x80;
438 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
440 gen6_send_avc_ref_idx_state_1(
441 struct intel_batchbuffer *batch,
443 const VAPictureH264 *ref_list,
444 unsigned int ref_list_count,
445 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
448 uint8_t ref_idx_state[32];
450 BEGIN_BCS_BATCH(batch, 10);
451 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
452 OUT_BCS_BATCH(batch, list);
453 gen5_fill_avc_ref_idx_state(
455 ref_list, ref_list_count,
458 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
459 ADVANCE_BCS_BATCH(batch);
463 gen6_send_avc_ref_idx_state(
464 struct intel_batchbuffer *batch,
465 const VASliceParameterBufferH264 *slice_param,
466 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
469 if (slice_param->slice_type == SLICE_TYPE_I ||
470 slice_param->slice_type == SLICE_TYPE_SI)
474 gen6_send_avc_ref_idx_state_1(
476 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
480 if (slice_param->slice_type != SLICE_TYPE_B)
484 gen6_send_avc_ref_idx_state_1(
486 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
492 gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx,
493 VAPictureParameterBufferH264 *pic_param,
494 VASliceParameterBufferH264 *next_slice_param,
495 struct intel_batchbuffer *batch)
497 int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
498 int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
499 int slice_hor_pos, slice_ver_pos, slice_start_mb_num, next_slice_hor_pos, next_slice_ver_pos;
500 int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
501 pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
503 if (next_slice_param) {
504 int first_mb_in_next_slice;
508 slice_start_mb_num = 0;
509 first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
510 next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs;
511 next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
514 slice_ver_pos = height_in_mbs;
515 slice_start_mb_num = width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
516 next_slice_hor_pos = 0;
517 next_slice_ver_pos = 0;
520 BEGIN_BCS_BATCH(batch, 11);
521 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
522 OUT_BCS_BATCH(batch, 0);
523 OUT_BCS_BATCH(batch, 0);
524 OUT_BCS_BATCH(batch, 0);
526 slice_ver_pos << 24 |
527 slice_hor_pos << 16 |
528 slice_start_mb_num << 0);
530 next_slice_ver_pos << 16 |
531 next_slice_hor_pos << 0);
532 OUT_BCS_BATCH(batch, 0);
533 OUT_BCS_BATCH(batch, 0);
534 OUT_BCS_BATCH(batch, 0);
535 OUT_BCS_BATCH(batch, 0);
536 OUT_BCS_BATCH(batch, 0);
537 ADVANCE_BCS_BATCH(batch);
541 gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx,
542 VAPictureParameterBufferH264 *pic_param,
543 struct intel_batchbuffer *batch)
546 BEGIN_BCS_BATCH(batch, 6);
547 OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
548 OUT_BCS_BATCH(batch, 0);
549 OUT_BCS_BATCH(batch, 0);
550 OUT_BCS_BATCH(batch, 0);
551 OUT_BCS_BATCH(batch, 0);
552 OUT_BCS_BATCH(batch, 0);
553 ADVANCE_BCS_BATCH(batch);
557 gen6_mfd_avc_phantom_slice(VADriverContextP ctx,
558 VAPictureParameterBufferH264 *pic_param,
559 VASliceParameterBufferH264 *next_slice_param,
560 struct intel_batchbuffer *batch)
562 gen6_mfd_avc_phantom_slice_state(ctx, pic_param, next_slice_param, batch);
563 gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param, batch);
566 /* Comparison function for sorting out the array of free frame store entries */
568 compare_avc_ref_store_func(const void *p1, const void *p2)
570 const GenFrameStore * const fs1 = *((GenFrameStore **)p1);
571 const GenFrameStore * const fs2 = *((GenFrameStore **)p2);
573 return fs1->ref_age - fs2->ref_age;
577 intel_update_codec_frame_store_index(
578 VADriverContextP ctx,
579 struct decode_state *decode_state,
581 GenFrameStore frame_store[],
583 GenFrameStoreContext *fs_ctx
586 GenFrameStore **free_refs = calloc(num_elements, sizeof(GenFrameStore *));
587 uint32_t used_refs = 0, add_refs = 0;
589 int i, n, num_free_refs;
594 /* Detect changes of access unit */
595 if (fs_ctx->age == 0 || fs_ctx->prev_poc != poc)
597 fs_ctx->prev_poc = poc;
600 /* Tag entries that are still available in our Frame Store */
601 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
602 struct object_surface * const obj_surface =
603 decode_state->reference_objects[i];
607 GenCodecSurface * const codec_surface = obj_surface->private_data;
610 if (codec_surface->frame_store_id >= 0) {
611 GenFrameStore * const fs =
612 &frame_store[codec_surface->frame_store_id];
613 if (fs->surface_id == obj_surface->base.id) {
614 fs->obj_surface = obj_surface;
616 used_refs |= 1 << fs->frame_store_id;
623 /* Build and sort out the list of retired candidates. The resulting
624 list is ordered by increasing age when they were last used */
625 for (i = 0, n = 0; i < num_elements; i++) {
626 if (!(used_refs & (1 << i))) {
627 GenFrameStore * const fs = &frame_store[i];
628 fs->obj_surface = NULL;
633 qsort(&free_refs[0], n, sizeof(free_refs[0]), compare_avc_ref_store_func);
635 /* Append the new reference frames */
636 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
637 struct object_surface * const obj_surface =
638 decode_state->reference_objects[i];
639 if (!obj_surface || !(add_refs & (1 << i)))
642 GenCodecSurface * const codec_surface = obj_surface->private_data;
645 if (n < num_free_refs) {
646 GenFrameStore * const fs = free_refs[n++];
647 fs->surface_id = obj_surface->base.id;
648 fs->obj_surface = obj_surface;
649 fs->frame_store_id = fs - frame_store;
651 codec_surface->frame_store_id = fs->frame_store_id;
654 WARN_ONCE("No free slot found for DPB reference list!!!\n");
661 intel_update_avc_frame_store_index(
662 VADriverContextP ctx,
663 struct decode_state *decode_state,
664 VAPictureParameterBufferH264 *pic_param,
665 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES],
666 GenFrameStoreContext *fs_ctx
669 intel_update_codec_frame_store_index(ctx,
671 avc_get_picture_poc(&pic_param->CurrPic),
673 MAX_GEN_REFERENCE_FRAMES,
678 intel_update_hevc_frame_store_index(
679 VADriverContextP ctx,
680 struct decode_state *decode_state,
681 VAPictureParameterBufferHEVC *pic_param,
682 GenFrameStore frame_store[MAX_GEN_HCP_REFERENCE_FRAMES],
683 GenFrameStoreContext *fs_ctx
688 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
689 struct object_surface * const obj_surface = decode_state->reference_objects[i];
694 GenFrameStore * const fs = &frame_store[n];
695 fs->surface_id = obj_surface->base.id;
696 fs->obj_surface = obj_surface;
697 fs->frame_store_id = n++;
699 if (n == MAX_GEN_HCP_REFERENCE_FRAMES)
703 for (; n < MAX_GEN_HCP_REFERENCE_FRAMES; n++) {
704 GenFrameStore * const fs = &frame_store[n];
706 fs->surface_id = VA_INVALID_ID;
707 fs->obj_surface = NULL;
708 fs->frame_store_id = -1;
713 gen75_update_avc_frame_store_index(
714 VADriverContextP ctx,
715 struct decode_state *decode_state,
716 VAPictureParameterBufferH264 *pic_param,
717 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
722 /* Construct the Frame Store array, in compact form. i.e. empty or
723 invalid entries are discarded. */
724 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
725 struct object_surface * const obj_surface =
726 decode_state->reference_objects[i];
730 GenFrameStore * const fs = &frame_store[n];
731 fs->surface_id = obj_surface->base.id;
732 fs->obj_surface = obj_surface;
733 fs->frame_store_id = n++;
736 /* Any remaining entry is marked as invalid */
737 for (; n < MAX_GEN_REFERENCE_FRAMES; n++) {
738 GenFrameStore * const fs = &frame_store[n];
739 fs->surface_id = VA_INVALID_ID;
740 fs->obj_surface = NULL;
741 fs->frame_store_id = -1;
746 gen75_fill_avc_picid_list(
747 uint16_t pic_ids[16],
748 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
753 /* Fill in with known picture IDs. The Frame Store array is in
754 compact form, i.e. empty entries are only to be found at the
755 end of the array: there are no holes in the set of active
757 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
758 GenFrameStore * const fs = &frame_store[i];
759 if (!fs->obj_surface)
761 pic_id = avc_get_picture_id(fs->obj_surface);
767 /* When an element of the list is not relevant the value of the
768 picture ID shall be set to 0 */
769 for (; i < MAX_GEN_REFERENCE_FRAMES; i++)
775 gen75_send_avc_picid_state(
776 struct intel_batchbuffer *batch,
777 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
780 uint16_t pic_ids[16];
782 if (!gen75_fill_avc_picid_list(pic_ids, frame_store))
785 BEGIN_BCS_BATCH(batch, 10);
786 OUT_BCS_BATCH(batch, MFD_AVC_PICID_STATE | (10 - 2));
787 OUT_BCS_BATCH(batch, 0); // enable Picture ID Remapping
788 intel_batchbuffer_data(batch, pic_ids, sizeof(pic_ids));
789 ADVANCE_BCS_BATCH(batch);
794 intel_update_vc1_frame_store_index(VADriverContextP ctx,
795 struct decode_state *decode_state,
796 VAPictureParameterBufferVC1 *pic_param,
797 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
799 struct object_surface *obj_surface;
802 obj_surface = decode_state->reference_objects[0];
804 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
807 frame_store[0].surface_id = VA_INVALID_ID;
808 frame_store[0].obj_surface = NULL;
810 frame_store[0].surface_id = pic_param->forward_reference_picture;
811 frame_store[0].obj_surface = obj_surface;
814 obj_surface = decode_state->reference_objects[1];
816 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
819 frame_store[1].surface_id = frame_store[0].surface_id;
820 frame_store[1].obj_surface = frame_store[0].obj_surface;
822 frame_store[1].surface_id = pic_param->backward_reference_picture;
823 frame_store[1].obj_surface = obj_surface;
825 for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
826 frame_store[i].surface_id = frame_store[i % 2].surface_id;
827 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
833 intel_update_vp8_frame_store_index(VADriverContextP ctx,
834 struct decode_state *decode_state,
835 VAPictureParameterBufferVP8 *pic_param,
836 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
838 struct object_surface *obj_surface;
841 obj_surface = decode_state->reference_objects[0];
843 if (pic_param->last_ref_frame == VA_INVALID_ID ||
846 frame_store[0].surface_id = VA_INVALID_ID;
847 frame_store[0].obj_surface = NULL;
849 frame_store[0].surface_id = pic_param->last_ref_frame;
850 frame_store[0].obj_surface = obj_surface;
853 obj_surface = decode_state->reference_objects[1];
855 if (pic_param->golden_ref_frame == VA_INVALID_ID ||
858 frame_store[1].surface_id = frame_store[0].surface_id;
859 frame_store[1].obj_surface = frame_store[0].obj_surface;
861 frame_store[1].surface_id = pic_param->golden_ref_frame;
862 frame_store[1].obj_surface = obj_surface;
865 obj_surface = decode_state->reference_objects[2];
867 if (pic_param->alt_ref_frame == VA_INVALID_ID ||
870 frame_store[2].surface_id = frame_store[0].surface_id;
871 frame_store[2].obj_surface = frame_store[0].obj_surface;
873 frame_store[2].surface_id = pic_param->alt_ref_frame;
874 frame_store[2].obj_surface = obj_surface;
877 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
878 frame_store[i].surface_id = frame_store[i % 2].surface_id;
879 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
884 //Obtain the reference frames from the decode state and store them in frame store.
886 intel_update_vp9_frame_store_index(VADriverContextP ctx,
887 struct decode_state *decode_state,
888 VADecPictureParameterBufferVP9 *pic_param,
889 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
891 struct object_surface *obj_surface;
894 //Check for the validity of the last reference frame
895 obj_surface = decode_state->reference_objects[0];
897 index = pic_param->pic_fields.bits.last_ref_frame;
898 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
901 frame_store[0].surface_id = VA_INVALID_ID;
902 frame_store[0].obj_surface = NULL;
904 frame_store[0].surface_id = pic_param->reference_frames[index];
905 frame_store[0].obj_surface = obj_surface;
908 //Check for the validity of the golden reference frame
909 obj_surface = decode_state->reference_objects[1];
911 index = pic_param->pic_fields.bits.golden_ref_frame;
912 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
915 frame_store[1].surface_id = frame_store[0].surface_id;
916 frame_store[1].obj_surface = frame_store[0].obj_surface;
918 frame_store[1].surface_id = pic_param->reference_frames[index];
919 frame_store[1].obj_surface = obj_surface;
922 //Check for the validity of the altref reference frame
923 obj_surface = decode_state->reference_objects[2];
925 index = pic_param->pic_fields.bits.alt_ref_frame;
926 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
929 frame_store[2].surface_id = frame_store[0].surface_id;
930 frame_store[2].obj_surface = frame_store[0].obj_surface;
932 frame_store[2].surface_id = pic_param->reference_frames[index];
933 frame_store[2].obj_surface = obj_surface;
936 //Set the remaining framestores to either last/golden/altref
937 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
938 frame_store[i].surface_id = frame_store[i % 2].surface_id;
939 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
945 intel_decoder_check_avc_parameter(VADriverContextP ctx,
946 VAProfile h264_profile,
947 struct decode_state *decode_state)
949 struct i965_driver_data *i965 = i965_driver_data(ctx);
950 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
952 struct object_surface *obj_surface;
954 VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
957 ASSERT_RET(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID), VA_STATUS_ERROR_INVALID_PARAMETER);
958 ASSERT_RET((pic_param->CurrPic.picture_id != VA_INVALID_SURFACE), VA_STATUS_ERROR_INVALID_PARAMETER);
959 ASSERT_RET((pic_param->CurrPic.picture_id == decode_state->current_render_target), VA_STATUS_ERROR_INVALID_PARAMETER);
961 if ((h264_profile != VAProfileH264Baseline)) {
962 if (pic_param->num_slice_groups_minus1 ||
963 pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
964 WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n");
969 /* Fill in the reference objects array with the actual VA surface
970 objects with 1:1 correspondance with any entry in ReferenceFrames[],
971 i.e. including "holes" for invalid entries, that are expanded
972 to NULL in the reference_objects[] array */
973 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
974 const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i];
977 if (!(va_pic->flags & VA_PICTURE_H264_INVALID) &&
978 va_pic->picture_id != VA_INVALID_ID) {
979 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
981 return VA_STATUS_ERROR_INVALID_SURFACE;
984 * Sometimes a dummy frame comes from the upper layer
985 * library, call i965_check_alloc_surface_bo() to make
986 * sure the store buffer is allocated for this reference
989 va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface,
991 if (va_status != VA_STATUS_SUCCESS)
994 decode_state->reference_objects[i] = obj_surface;
997 for (j = 0; j < decode_state->num_slice_params; j++) {
998 ASSERT_RET((decode_state->slice_params && decode_state->slice_params[j]->buffer), VA_STATUS_ERROR_INVALID_PARAMETER);
999 slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1001 if (j == decode_state->num_slice_params - 1)
1002 next_slice_group_param = NULL;
1004 next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
1006 for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1008 if (i < decode_state->slice_params[j]->num_elements - 1)
1009 next_slice_param = slice_param + 1;
1011 next_slice_param = next_slice_group_param;
1013 if (next_slice_param != NULL) {
1014 /* If the mb position of next_slice is less than or equal to the current slice,
1015 * discard the current frame.
1017 if (next_slice_param->first_mb_in_slice <= slice_param->first_mb_in_slice) {
1018 next_slice_param = NULL;
1019 WARN_ONCE("!!!incorrect slice_param. The first_mb_in_slice of next_slice is less"
1020 " than or equal to that in current slice\n");
1027 return VA_STATUS_SUCCESS;
1030 return VA_STATUS_ERROR_INVALID_PARAMETER;
1034 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
1035 struct decode_state *decode_state)
1037 struct i965_driver_data *i965 = i965_driver_data(ctx);
1038 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1039 struct object_surface *obj_surface;
1042 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
1043 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
1044 obj_surface = SURFACE(pic_param->forward_reference_picture);
1046 if (!obj_surface || !obj_surface->bo)
1047 decode_state->reference_objects[i++] = NULL;
1049 decode_state->reference_objects[i++] = obj_surface;
1050 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
1051 obj_surface = SURFACE(pic_param->forward_reference_picture);
1053 if (!obj_surface || !obj_surface->bo)
1054 decode_state->reference_objects[i++] = NULL;
1056 decode_state->reference_objects[i++] = obj_surface;
1058 obj_surface = SURFACE(pic_param->backward_reference_picture);
1060 if (!obj_surface || !obj_surface->bo)
1061 decode_state->reference_objects[i++] = NULL;
1063 decode_state->reference_objects[i++] = obj_surface;
1067 for ( ; i < 16; i++)
1068 decode_state->reference_objects[i] = NULL;
1070 return VA_STATUS_SUCCESS;
1073 return VA_STATUS_ERROR_INVALID_PARAMETER;
1077 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
1078 struct decode_state *decode_state)
1080 struct i965_driver_data *i965 = i965_driver_data(ctx);
1081 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1082 struct object_surface *obj_surface;
1085 if (pic_param->sequence_fields.bits.interlace == 1 &&
1086 pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1087 return VA_STATUS_ERROR_DECODING_ERROR;
1090 if (pic_param->picture_fields.bits.picture_type == 0 ||
1091 pic_param->picture_fields.bits.picture_type == 3) {
1092 } else if (pic_param->picture_fields.bits.picture_type == 1 ||
1093 pic_param->picture_fields.bits.picture_type == 4) {
1094 obj_surface = SURFACE(pic_param->forward_reference_picture);
1096 if (!obj_surface || !obj_surface->bo)
1097 decode_state->reference_objects[i++] = NULL;
1099 decode_state->reference_objects[i++] = obj_surface;
1100 } else if (pic_param->picture_fields.bits.picture_type == 2) {
1101 obj_surface = SURFACE(pic_param->forward_reference_picture);
1103 if (!obj_surface || !obj_surface->bo)
1104 decode_state->reference_objects[i++] = NULL;
1106 decode_state->reference_objects[i++] = obj_surface;
1108 obj_surface = SURFACE(pic_param->backward_reference_picture);
1110 if (!obj_surface || !obj_surface->bo)
1111 decode_state->reference_objects[i++] = NULL;
1113 decode_state->reference_objects[i++] = obj_surface;
1117 for ( ; i < 16; i++)
1118 decode_state->reference_objects[i] = NULL;
1120 return VA_STATUS_SUCCESS;
1123 return VA_STATUS_ERROR_INVALID_PARAMETER;
1127 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
1128 struct decode_state *decode_state)
1130 struct i965_driver_data *i965 = i965_driver_data(ctx);
1131 VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
1132 struct object_surface *obj_surface;
1135 if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
1136 obj_surface = SURFACE(pic_param->last_ref_frame);
1138 if (obj_surface && obj_surface->bo)
1139 decode_state->reference_objects[i++] = obj_surface;
1141 decode_state->reference_objects[i++] = NULL;
1144 if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
1145 obj_surface = SURFACE(pic_param->golden_ref_frame);
1147 if (obj_surface && obj_surface->bo)
1148 decode_state->reference_objects[i++] = obj_surface;
1150 decode_state->reference_objects[i++] = NULL;
1153 if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
1154 obj_surface = SURFACE(pic_param->alt_ref_frame);
1156 if (obj_surface && obj_surface->bo)
1157 decode_state->reference_objects[i++] = obj_surface;
1159 decode_state->reference_objects[i++] = NULL;
1162 for ( ; i < 16; i++)
1163 decode_state->reference_objects[i] = NULL;
1165 return VA_STATUS_SUCCESS;
1169 hevc_ensure_surface_bo(
1170 VADriverContextP ctx,
1171 struct decode_state *decode_state,
1172 struct object_surface *obj_surface,
1173 const VAPictureParameterBufferHEVC *pic_param
1176 VAStatus va_status = VA_STATUS_SUCCESS;
1178 unsigned int fourcc = VA_FOURCC_NV12;
1180 if((pic_param->bit_depth_luma_minus8 > 0)
1181 || (pic_param->bit_depth_chroma_minus8 > 0))
1183 if(obj_surface->fourcc != VA_FOURCC_P010)
1186 fourcc = VA_FOURCC_P010;
1189 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1192 fourcc = VA_FOURCC_NV12;
1195 /* (Re-)allocate the underlying surface buffer store, if necessary */
1196 if (!obj_surface->bo || update) {
1197 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1199 i965_destroy_surface_storage(obj_surface);
1201 va_status = i965_check_alloc_surface_bo(ctx,
1203 i965->codec_info->has_tiled_surface,
1211 //Ensure there is a tiled render surface in NV12 format. If not, create one.
1213 vp9_ensure_surface_bo(
1214 VADriverContextP ctx,
1215 struct decode_state *decode_state,
1216 struct object_surface *obj_surface,
1217 const VADecPictureParameterBufferVP9 *pic_param
1220 VAStatus va_status = VA_STATUS_SUCCESS;
1222 unsigned int fourcc = VA_FOURCC_NV12;
1224 if(pic_param->profile >= 2)
1226 if(obj_surface->fourcc != VA_FOURCC_P010)
1229 fourcc = VA_FOURCC_P010;
1232 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1235 fourcc = VA_FOURCC_NV12;
1238 /* (Re-)allocate the underlying surface buffer store, if necessary */
1239 if (!obj_surface->bo || update) {
1240 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1242 i965_destroy_surface_storage(obj_surface);
1244 va_status = i965_check_alloc_surface_bo(ctx,
1246 i965->codec_info->has_tiled_surface,
1255 intel_decoder_check_hevc_parameter(VADriverContextP ctx,
1256 struct decode_state *decode_state)
1258 struct i965_driver_data *i965 = i965_driver_data(ctx);
1259 VAPictureParameterBufferHEVC *pic_param = (VAPictureParameterBufferHEVC *)decode_state->pic_param->buffer;
1260 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1261 struct object_surface *obj_surface;
1265 if (pic_param->CurrPic.flags & VA_PICTURE_HEVC_INVALID ||
1266 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
1269 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
1272 min_cb_size = (1 << (pic_param->log2_min_luma_coding_block_size_minus3 + 3));
1274 if (pic_param->pic_width_in_luma_samples % min_cb_size ||
1275 pic_param->pic_height_in_luma_samples % min_cb_size)
1278 /* Fill in the reference objects array with the actual VA surface
1279 objects with 1:1 correspondance with any entry in ReferenceFrames[],
1280 i.e. including "holes" for invalid entries, that are expanded
1281 to NULL in the reference_objects[] array */
1282 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
1283 const VAPictureHEVC * const va_pic = &pic_param->ReferenceFrames[i];
1288 * Only the index with (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1289 * VA_PICTURE_HEVC_RPS_ST_CURR_AFTER | VA_PICTURE_HEVC_RPS_LT_CURR)
1292 if (!(va_pic->flags & VA_PICTURE_HEVC_INVALID) &&
1293 (va_pic->picture_id != VA_INVALID_ID) &&
1294 (va_pic->flags & (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1295 VA_PICTURE_HEVC_RPS_ST_CURR_AFTER |
1296 VA_PICTURE_HEVC_RPS_LT_CURR))) {
1298 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
1301 va_status = VA_STATUS_ERROR_INVALID_SURFACE;
1305 va_status = hevc_ensure_surface_bo(ctx, decode_state, obj_surface,
1308 if (va_status != VA_STATUS_SUCCESS)
1312 decode_state->reference_objects[i] = obj_surface;
1315 va_status = VA_STATUS_SUCCESS;
1321 //Obtains reference frames from the picture parameter and
1322 //then sets the reference frames in the decode_state
1324 intel_decoder_check_vp9_parameter(VADriverContextP ctx,
1326 struct decode_state *decode_state)
1328 struct i965_driver_data *i965 = i965_driver_data(ctx);
1329 VADecPictureParameterBufferVP9 *pic_param = (VADecPictureParameterBufferVP9 *)decode_state->pic_param->buffer;
1330 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1331 struct object_surface *obj_surface;
1334 if((profile - VAProfileVP9Profile0) < pic_param->profile)
1337 //Max support upto 4k for BXT
1338 if ((pic_param->frame_width-1 < 0) || (pic_param->frame_width-1 > 4095))
1341 if ((pic_param->frame_height-1 < 0) || (pic_param->frame_height-1 > 4095))
1344 //Set the reference object in decode state for last reference
1345 index = pic_param->pic_fields.bits.last_ref_frame;
1346 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1347 obj_surface = SURFACE(pic_param->reference_frames[index]);
1349 if (obj_surface && obj_surface->bo)
1350 decode_state->reference_objects[i++] = obj_surface;
1352 decode_state->reference_objects[i++] = NULL;
1355 //Set the reference object in decode state for golden reference
1356 index = pic_param->pic_fields.bits.golden_ref_frame;
1357 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1358 obj_surface = SURFACE(pic_param->reference_frames[index]);
1360 if (obj_surface && obj_surface->bo)
1361 decode_state->reference_objects[i++] = obj_surface;
1363 decode_state->reference_objects[i++] = NULL;
1366 //Set the reference object in decode state for altref reference
1367 index = pic_param->pic_fields.bits.alt_ref_frame;
1368 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1369 obj_surface = SURFACE(pic_param->reference_frames[index]);
1371 if (obj_surface && obj_surface->bo)
1372 decode_state->reference_objects[i++] = obj_surface;
1374 decode_state->reference_objects[i++] = NULL;
1377 for ( ; i < 16; i++)
1378 decode_state->reference_objects[i] = NULL;
1380 return VA_STATUS_SUCCESS;
1384 intel_decoder_sanity_check_input(VADriverContextP ctx,
1386 struct decode_state *decode_state)
1388 struct i965_driver_data *i965 = i965_driver_data(ctx);
1389 struct object_surface *obj_surface;
1390 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1392 if (decode_state->current_render_target == VA_INVALID_SURFACE)
1395 obj_surface = SURFACE(decode_state->current_render_target);
1400 decode_state->render_object = obj_surface;
1403 case VAProfileMPEG2Simple:
1404 case VAProfileMPEG2Main:
1405 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
1408 case VAProfileH264ConstrainedBaseline:
1409 case VAProfileH264Main:
1410 case VAProfileH264High:
1411 case VAProfileH264StereoHigh:
1412 case VAProfileH264MultiviewHigh:
1413 vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
1416 case VAProfileVC1Simple:
1417 case VAProfileVC1Main:
1418 case VAProfileVC1Advanced:
1419 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
1422 case VAProfileJPEGBaseline:
1423 vaStatus = VA_STATUS_SUCCESS;
1426 case VAProfileVP8Version0_3:
1427 vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
1430 case VAProfileHEVCMain:
1431 case VAProfileHEVCMain10:
1432 vaStatus = intel_decoder_check_hevc_parameter(ctx, decode_state);
1435 case VAProfileVP9Profile0:
1436 case VAProfileVP9Profile2:
1437 vaStatus = intel_decoder_check_vp9_parameter(ctx, profile, decode_state);
1441 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1450 * Return the next slice paramter
1453 * slice_param: the current slice
1454 * *group_idx & *element_idx the current slice position in slice groups
1456 * Return the next slice parameter
1457 * *group_idx & *element_idx the next slice position in slice groups,
1458 * if the next slice is NULL, *group_idx & *element_idx will be ignored
1460 VASliceParameterBufferMPEG2 *
1461 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
1462 VAPictureParameterBufferMPEG2 *pic_param,
1463 VASliceParameterBufferMPEG2 *slice_param,
1467 VASliceParameterBufferMPEG2 *next_slice_param;
1468 unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1469 int j = *group_idx, i = *element_idx + 1;
1471 for (; j < decode_state->num_slice_params; j++) {
1472 for (; i < decode_state->slice_params[j]->num_elements; i++) {
1473 next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
1475 if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
1476 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
1480 return next_slice_param;
1490 /* Ensure the segmentation buffer is large enough for the supplied
1491 number of MBs, or re-allocate it */
1493 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
1494 unsigned int mb_width, unsigned int mb_height)
1496 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1497 /* The segmentation map is a 64-byte aligned linear buffer, with
1498 each cache line holding only 8 bits for 4 continuous MBs */
1499 const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
1502 if (buf->bo && buf->bo->size >= buf_size)
1504 drm_intel_bo_unreference(buf->bo);
1508 buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
1510 buf->valid = buf->bo != NULL;
1515 hevc_gen_default_iq_matrix(VAIQMatrixBufferHEVC *iq_matrix)
1518 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
1521 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
1524 memset(&iq_matrix->ScalingList16x16, 16, sizeof(iq_matrix->ScalingList16x16));
1527 memset(&iq_matrix->ScalingList32x32, 16, sizeof(iq_matrix->ScalingList32x32));
1529 /* Flat_16x16_dc_16 */
1530 memset(&iq_matrix->ScalingListDC16x16, 16, sizeof(iq_matrix->ScalingListDC16x16));
1532 /* Flat_32x32_dc_16 */
1533 memset(&iq_matrix->ScalingListDC32x32, 16, sizeof(iq_matrix->ScalingListDC32x32));