2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
27 #include "intel_batchbuffer.h"
28 #include "intel_media.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
33 /* Set reference surface if backing store exists */
36 struct i965_driver_data *i965,
37 GenFrameStore *ref_frame,
38 VASurfaceID va_surface,
39 struct object_surface *obj_surface
42 if (va_surface == VA_INVALID_ID)
45 if (!obj_surface || !obj_surface->bo)
48 ref_frame->surface_id = va_surface;
49 ref_frame->obj_surface = obj_surface;
53 /* Check wether codec layer incorrectly fills in slice_vertical_position */
55 mpeg2_wa_slice_vertical_position(
56 struct decode_state *decode_state,
57 VAPictureParameterBufferMPEG2 *pic_param
60 unsigned int i, j, mb_height, vpos, last_vpos = 0;
62 /* Assume progressive sequence if we got a progressive frame */
63 if (pic_param->picture_coding_extension.bits.progressive_frame)
66 /* Wait for a field coded picture */
67 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
70 assert(decode_state && decode_state->slice_params);
72 mb_height = (pic_param->vertical_size + 31) / 32;
74 for (j = 0; j < decode_state->num_slice_params; j++) {
75 struct buffer_store * const buffer_store =
76 decode_state->slice_params[j];
78 for (i = 0; i < buffer_store->num_elements; i++) {
79 VASliceParameterBufferMPEG2 * const slice_param =
80 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
82 vpos = slice_param->slice_vertical_position;
83 if (vpos >= mb_height || vpos == last_vpos + 2) {
84 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
93 /* Build MPEG-2 reference frames array */
95 mpeg2_set_reference_surfaces(
97 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
98 struct decode_state *decode_state,
99 VAPictureParameterBufferMPEG2 *pic_param
102 struct i965_driver_data * const i965 = i965_driver_data(ctx);
103 VASurfaceID va_surface;
104 unsigned pic_structure, is_second_field, n = 0;
105 struct object_surface *obj_surface;
107 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
108 is_second_field = pic_structure != MPEG_FRAME &&
109 !pic_param->picture_coding_extension.bits.is_first_field;
111 ref_frames[0].surface_id = VA_INVALID_ID;
112 ref_frames[0].obj_surface = NULL;
114 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
115 switch (pic_param->picture_coding_type) {
117 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
118 va_surface = decode_state->current_render_target;
119 obj_surface = decode_state->render_object;
120 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
122 va_surface = pic_param->forward_reference_picture;
123 obj_surface = decode_state->reference_objects[0];
124 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
128 va_surface = pic_param->forward_reference_picture;
129 obj_surface = decode_state->reference_objects[0];
130 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
131 va_surface = pic_param->backward_reference_picture;
132 obj_surface = decode_state->reference_objects[1];
133 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
138 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
139 ref_frames[n++].surface_id = ref_frames[0].surface_id;
142 if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
145 ref_frames[2].surface_id = VA_INVALID_ID;
146 ref_frames[2].obj_surface = NULL;
148 /* Bottom field pictures used as reference */
149 switch (pic_param->picture_coding_type) {
151 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
152 va_surface = decode_state->current_render_target;
153 obj_surface = decode_state->render_object;
154 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
156 va_surface = pic_param->forward_reference_picture;
157 obj_surface = decode_state->reference_objects[0];
158 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
162 va_surface = pic_param->forward_reference_picture;
163 obj_surface = decode_state->reference_objects[0];
164 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
165 va_surface = pic_param->backward_reference_picture;
166 obj_surface = decode_state->reference_objects[1];
167 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
172 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
173 ref_frames[n++].surface_id = ref_frames[2].surface_id;
177 /* Ensure the supplied VA surface has valid storage for decoding the
180 avc_ensure_surface_bo(
181 VADriverContextP ctx,
182 struct decode_state *decode_state,
183 struct object_surface *obj_surface,
184 const VAPictureParameterBufferH264 *pic_param
188 uint32_t hw_fourcc, fourcc, subsample, chroma_format;
190 /* Validate chroma format */
191 switch (pic_param->seq_fields.bits.chroma_format_idc) {
193 fourcc = VA_FOURCC_Y800;
194 subsample = SUBSAMPLE_YUV400;
195 chroma_format = VA_RT_FORMAT_YUV400;
198 fourcc = VA_FOURCC_NV12;
199 subsample = SUBSAMPLE_YUV420;
200 chroma_format = VA_RT_FORMAT_YUV420;
203 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
206 /* Determine the HW surface format, bound to VA config needs */
207 if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
212 case VA_FOURCC_Y800: // Implement with an NV12 surface
213 if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
214 hw_fourcc = VA_FOURCC_NV12;
215 subsample = SUBSAMPLE_YUV420;
221 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
223 /* (Re-)allocate the underlying surface buffer store, if necessary */
224 if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
225 struct i965_driver_data * const i965 = i965_driver_data(ctx);
227 i965_destroy_surface_storage(obj_surface);
228 va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
229 i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
230 if (va_status != VA_STATUS_SUCCESS)
234 /* Fake chroma components if grayscale is implemented on top of NV12 */
235 if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
236 const uint32_t uv_offset = obj_surface->width * obj_surface->height;
237 const uint32_t uv_size = obj_surface->width * obj_surface->height / 2;
239 drm_intel_gem_bo_map_gtt(obj_surface->bo);
240 memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
241 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
243 return VA_STATUS_SUCCESS;
246 /* Generate flat scaling matrices for H.264 decoding */
248 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
251 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
254 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
257 /* Returns the POC of the supplied VA picture */
259 avc_get_picture_poc(const VAPictureH264 *va_pic)
261 int structure, field_poc[2];
263 structure = va_pic->flags &
264 (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD);
265 field_poc[0] = structure != VA_PICTURE_H264_BOTTOM_FIELD ?
266 va_pic->TopFieldOrderCnt : INT_MAX;
267 field_poc[1] = structure != VA_PICTURE_H264_TOP_FIELD ?
268 va_pic->BottomFieldOrderCnt : INT_MAX;
269 return MIN(field_poc[0], field_poc[1]);
272 /* Returns a unique picture ID that represents the supplied VA surface object */
274 avc_get_picture_id(struct object_surface *obj_surface)
278 /* This highly depends on how the internal VA objects are organized.
280 Theory of operations:
281 The VA objects are maintained in heaps so that any released VA
282 surface will become free again for future allocation. This means
283 that holes in there are filled in for subsequent allocations.
284 So, this ultimately means that we could just use the Heap ID of
285 the VA surface as the resulting picture ID (16 bits) */
286 pic_id = 1 + (obj_surface->base.id & OBJECT_HEAP_ID_MASK);
287 return (pic_id <= 0xffff) ? pic_id : -1;
290 /* Finds the VA/H264 picture associated with the specified VA surface id */
292 avc_find_picture(VASurfaceID id, VAPictureH264 *pic_list, int pic_list_count)
296 if (id != VA_INVALID_ID) {
297 for (i = 0; i < pic_list_count; i++) {
298 VAPictureH264 * const va_pic = &pic_list[i];
299 if (va_pic->picture_id == id &&
300 !(va_pic->flags & VA_PICTURE_H264_INVALID))
307 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
308 /* XXX: slice_data_bit_offset does not account for EPB */
310 avc_get_first_mb_bit_offset(
311 dri_bo *slice_data_bo,
312 VASliceParameterBufferH264 *slice_param,
313 unsigned int mode_flag
316 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
318 if (mode_flag == ENTROPY_CABAC)
319 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
320 return slice_data_bit_offset;
323 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
324 /* XXX: slice_data_bit_offset does not account for EPB */
326 avc_get_first_mb_bit_offset_with_epb(
327 dri_bo *slice_data_bo,
328 VASliceParameterBufferH264 *slice_param,
329 unsigned int mode_flag
332 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
333 unsigned int out_slice_data_bit_offset;
334 unsigned int i, j, n = 0, buf_size, data_size, header_size;
338 header_size = slice_param->slice_data_bit_offset / 8;
339 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
340 buf_size = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
342 if (buf_size > data_size)
343 buf_size = data_size;
345 buf = malloc(buf_size);
350 ret = dri_bo_get_subdata(
351 slice_data_bo, slice_param->slice_data_offset,
356 for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
357 if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
364 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
366 if (mode_flag == ENTROPY_CABAC)
367 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
368 return out_slice_data_bit_offset;
371 static inline uint8_t
372 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
374 /* The H.264 standard, and the VA-API specification, allows for at
375 least 3 states for a picture: "used for short-term reference",
376 "used for long-term reference", or considered as not used for
379 The latter is used in the MVC inter prediction and inter-view
380 prediction process (H.8.4). This has an incidence on the
381 colZeroFlag variable, as defined in 8.4.1.2.
383 Since it is not possible to directly program that flag, let's
384 make the hardware derive this value by assimilating "considered
385 as not used for reference" to a "not used for short-term
386 reference", and subsequently making it "used for long-term
387 reference" to fit the definition of Bit6 here */
388 const unsigned int ref_flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE |
389 VA_PICTURE_H264_LONG_TERM_REFERENCE;
390 const unsigned int is_long_term =
391 ((va_pic->flags & ref_flags) != VA_PICTURE_H264_SHORT_TERM_REFERENCE);
392 const unsigned int is_top_field =
393 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
394 const unsigned int is_bottom_field =
395 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
397 return ((is_long_term << 6) |
398 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
399 (frame_store_id << 1) |
400 ((is_top_field ^ 1) & is_bottom_field));
403 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
405 gen5_fill_avc_ref_idx_state(
407 const VAPictureH264 ref_list[32],
408 unsigned int ref_list_count,
409 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
414 for (i = 0; i < ref_list_count; i++) {
415 const VAPictureH264 * const va_pic = &ref_list[i];
417 if ((va_pic->flags & VA_PICTURE_H264_INVALID) ||
418 va_pic->picture_id == VA_INVALID_ID) {
423 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
424 if (frame_store[j].surface_id == va_pic->picture_id)
428 if (j != MAX_GEN_REFERENCE_FRAMES) { // Found picture in the Frame Store
429 const GenFrameStore * const fs = &frame_store[j];
430 assert(fs->frame_store_id == j); // Current architecture/assumption
431 state[i] = get_ref_idx_state_1(va_pic, fs->frame_store_id);
434 WARN_ONCE("Invalid RefPicListX[] entry!!! It is not included in DPB\n");
435 state[i] = get_ref_idx_state_1(va_pic, 0) | 0x80;
443 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
445 gen6_send_avc_ref_idx_state_1(
446 struct intel_batchbuffer *batch,
448 const VAPictureH264 *ref_list,
449 unsigned int ref_list_count,
450 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
453 uint8_t ref_idx_state[32];
455 BEGIN_BCS_BATCH(batch, 10);
456 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
457 OUT_BCS_BATCH(batch, list);
458 gen5_fill_avc_ref_idx_state(
460 ref_list, ref_list_count,
463 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
464 ADVANCE_BCS_BATCH(batch);
468 gen6_send_avc_ref_idx_state(
469 struct intel_batchbuffer *batch,
470 const VASliceParameterBufferH264 *slice_param,
471 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
474 if (slice_param->slice_type == SLICE_TYPE_I ||
475 slice_param->slice_type == SLICE_TYPE_SI)
479 gen6_send_avc_ref_idx_state_1(
481 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
485 if (slice_param->slice_type != SLICE_TYPE_B)
489 gen6_send_avc_ref_idx_state_1(
491 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
497 gen6_mfd_avc_phantom_slice_state(VADriverContextP ctx,
498 VAPictureParameterBufferH264 *pic_param,
499 VASliceParameterBufferH264 *next_slice_param,
500 struct intel_batchbuffer *batch)
502 int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
503 int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
504 int slice_hor_pos, slice_ver_pos, slice_start_mb_num, next_slice_hor_pos, next_slice_ver_pos;
505 int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
506 pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
508 if (next_slice_param) {
509 int first_mb_in_next_slice;
513 slice_start_mb_num = 0;
514 first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
515 next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs;
516 next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
519 slice_ver_pos = height_in_mbs;
520 slice_start_mb_num = width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
521 next_slice_hor_pos = 0;
522 next_slice_ver_pos = 0;
525 BEGIN_BCS_BATCH(batch, 11);
526 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
527 OUT_BCS_BATCH(batch, 0);
528 OUT_BCS_BATCH(batch, 0);
529 OUT_BCS_BATCH(batch, 0);
531 slice_ver_pos << 24 |
532 slice_hor_pos << 16 |
533 slice_start_mb_num << 0);
535 next_slice_ver_pos << 16 |
536 next_slice_hor_pos << 0);
537 OUT_BCS_BATCH(batch, 0);
538 OUT_BCS_BATCH(batch, 0);
539 OUT_BCS_BATCH(batch, 0);
540 OUT_BCS_BATCH(batch, 0);
541 OUT_BCS_BATCH(batch, 0);
542 ADVANCE_BCS_BATCH(batch);
546 gen6_mfd_avc_phantom_slice_bsd_object(VADriverContextP ctx,
547 VAPictureParameterBufferH264 *pic_param,
548 struct intel_batchbuffer *batch)
551 BEGIN_BCS_BATCH(batch, 6);
552 OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
553 OUT_BCS_BATCH(batch, 0);
554 OUT_BCS_BATCH(batch, 0);
555 OUT_BCS_BATCH(batch, 0);
556 OUT_BCS_BATCH(batch, 0);
557 OUT_BCS_BATCH(batch, 0);
558 ADVANCE_BCS_BATCH(batch);
562 gen6_mfd_avc_phantom_slice(VADriverContextP ctx,
563 VAPictureParameterBufferH264 *pic_param,
564 VASliceParameterBufferH264 *next_slice_param,
565 struct intel_batchbuffer *batch)
567 gen6_mfd_avc_phantom_slice_state(ctx, pic_param, next_slice_param, batch);
568 gen6_mfd_avc_phantom_slice_bsd_object(ctx, pic_param, batch);
571 /* Comparison function for sorting out the array of free frame store entries */
573 compare_avc_ref_store_func(const void *p1, const void *p2)
575 const GenFrameStore * const fs1 = *((GenFrameStore **)p1);
576 const GenFrameStore * const fs2 = *((GenFrameStore **)p2);
578 return fs1->ref_age - fs2->ref_age;
582 intel_update_codec_frame_store_index(
583 VADriverContextP ctx,
584 struct decode_state *decode_state,
586 GenFrameStore frame_store[],
588 GenFrameStoreContext *fs_ctx
591 GenFrameStore **free_refs = calloc(num_elements, sizeof(GenFrameStore *));
592 uint32_t used_refs = 0, add_refs = 0;
594 int i, n, num_free_refs;
599 /* Detect changes of access unit */
600 if (fs_ctx->age == 0 || fs_ctx->prev_poc != poc)
602 fs_ctx->prev_poc = poc;
605 /* Tag entries that are still available in our Frame Store */
606 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
607 struct object_surface * const obj_surface =
608 decode_state->reference_objects[i];
612 GenCodecSurface * const codec_surface = obj_surface->private_data;
615 if (codec_surface->frame_store_id >= 0) {
616 GenFrameStore * const fs =
617 &frame_store[codec_surface->frame_store_id];
618 if (fs->surface_id == obj_surface->base.id) {
619 fs->obj_surface = obj_surface;
621 used_refs |= 1 << fs->frame_store_id;
628 /* Build and sort out the list of retired candidates. The resulting
629 list is ordered by increasing age when they were last used */
630 for (i = 0, n = 0; i < num_elements; i++) {
631 if (!(used_refs & (1 << i))) {
632 GenFrameStore * const fs = &frame_store[i];
633 fs->obj_surface = NULL;
638 qsort(&free_refs[0], n, sizeof(free_refs[0]), compare_avc_ref_store_func);
640 /* Append the new reference frames */
641 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
642 struct object_surface * const obj_surface =
643 decode_state->reference_objects[i];
644 if (!obj_surface || !(add_refs & (1 << i)))
647 GenCodecSurface * const codec_surface = obj_surface->private_data;
650 if (n < num_free_refs) {
651 GenFrameStore * const fs = free_refs[n++];
652 fs->surface_id = obj_surface->base.id;
653 fs->obj_surface = obj_surface;
654 fs->frame_store_id = fs - frame_store;
656 codec_surface->frame_store_id = fs->frame_store_id;
659 WARN_ONCE("No free slot found for DPB reference list!!!\n");
666 intel_update_avc_frame_store_index(
667 VADriverContextP ctx,
668 struct decode_state *decode_state,
669 VAPictureParameterBufferH264 *pic_param,
670 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES],
671 GenFrameStoreContext *fs_ctx
674 intel_update_codec_frame_store_index(ctx,
676 avc_get_picture_poc(&pic_param->CurrPic),
678 MAX_GEN_REFERENCE_FRAMES,
683 intel_update_hevc_frame_store_index(
684 VADriverContextP ctx,
685 struct decode_state *decode_state,
686 VAPictureParameterBufferHEVC *pic_param,
687 GenFrameStore frame_store[MAX_GEN_HCP_REFERENCE_FRAMES],
688 GenFrameStoreContext *fs_ctx
693 for (i = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
694 struct object_surface * const obj_surface = decode_state->reference_objects[i];
699 GenFrameStore * const fs = &frame_store[n];
700 fs->surface_id = obj_surface->base.id;
701 fs->obj_surface = obj_surface;
702 fs->frame_store_id = n++;
704 if (n == MAX_GEN_HCP_REFERENCE_FRAMES)
708 for (; n < MAX_GEN_HCP_REFERENCE_FRAMES; n++) {
709 GenFrameStore * const fs = &frame_store[n];
711 fs->surface_id = VA_INVALID_ID;
712 fs->obj_surface = NULL;
713 fs->frame_store_id = -1;
718 gen75_update_avc_frame_store_index(
719 VADriverContextP ctx,
720 struct decode_state *decode_state,
721 VAPictureParameterBufferH264 *pic_param,
722 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
727 /* Construct the Frame Store array, in compact form. i.e. empty or
728 invalid entries are discarded. */
729 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
730 struct object_surface * const obj_surface =
731 decode_state->reference_objects[i];
735 GenFrameStore * const fs = &frame_store[n];
736 fs->surface_id = obj_surface->base.id;
737 fs->obj_surface = obj_surface;
738 fs->frame_store_id = n++;
741 /* Any remaining entry is marked as invalid */
742 for (; n < MAX_GEN_REFERENCE_FRAMES; n++) {
743 GenFrameStore * const fs = &frame_store[n];
744 fs->surface_id = VA_INVALID_ID;
745 fs->obj_surface = NULL;
746 fs->frame_store_id = -1;
751 gen75_fill_avc_picid_list(
752 uint16_t pic_ids[16],
753 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
758 /* Fill in with known picture IDs. The Frame Store array is in
759 compact form, i.e. empty entries are only to be found at the
760 end of the array: there are no holes in the set of active
762 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
763 GenFrameStore * const fs = &frame_store[i];
764 if (!fs->obj_surface)
766 pic_id = avc_get_picture_id(fs->obj_surface);
772 /* When an element of the list is not relevant the value of the
773 picture ID shall be set to 0 */
774 for (; i < MAX_GEN_REFERENCE_FRAMES; i++)
780 gen75_send_avc_picid_state(
781 struct intel_batchbuffer *batch,
782 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
785 uint16_t pic_ids[16];
787 if (!gen75_fill_avc_picid_list(pic_ids, frame_store))
790 BEGIN_BCS_BATCH(batch, 10);
791 OUT_BCS_BATCH(batch, MFD_AVC_PICID_STATE | (10 - 2));
792 OUT_BCS_BATCH(batch, 0); // enable Picture ID Remapping
793 intel_batchbuffer_data(batch, pic_ids, sizeof(pic_ids));
794 ADVANCE_BCS_BATCH(batch);
799 intel_update_vc1_frame_store_index(VADriverContextP ctx,
800 struct decode_state *decode_state,
801 VAPictureParameterBufferVC1 *pic_param,
802 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
804 struct object_surface *obj_surface;
807 obj_surface = decode_state->reference_objects[0];
809 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
812 frame_store[0].surface_id = VA_INVALID_ID;
813 frame_store[0].obj_surface = NULL;
815 frame_store[0].surface_id = pic_param->forward_reference_picture;
816 frame_store[0].obj_surface = obj_surface;
819 obj_surface = decode_state->reference_objects[1];
821 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
824 frame_store[1].surface_id = frame_store[0].surface_id;
825 frame_store[1].obj_surface = frame_store[0].obj_surface;
827 frame_store[1].surface_id = pic_param->backward_reference_picture;
828 frame_store[1].obj_surface = obj_surface;
830 for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
831 frame_store[i].surface_id = frame_store[i % 2].surface_id;
832 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
838 intel_update_vp8_frame_store_index(VADriverContextP ctx,
839 struct decode_state *decode_state,
840 VAPictureParameterBufferVP8 *pic_param,
841 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
843 struct object_surface *obj_surface;
846 obj_surface = decode_state->reference_objects[0];
848 if (pic_param->last_ref_frame == VA_INVALID_ID ||
851 frame_store[0].surface_id = VA_INVALID_ID;
852 frame_store[0].obj_surface = NULL;
854 frame_store[0].surface_id = pic_param->last_ref_frame;
855 frame_store[0].obj_surface = obj_surface;
858 obj_surface = decode_state->reference_objects[1];
860 if (pic_param->golden_ref_frame == VA_INVALID_ID ||
863 frame_store[1].surface_id = frame_store[0].surface_id;
864 frame_store[1].obj_surface = frame_store[0].obj_surface;
866 frame_store[1].surface_id = pic_param->golden_ref_frame;
867 frame_store[1].obj_surface = obj_surface;
870 obj_surface = decode_state->reference_objects[2];
872 if (pic_param->alt_ref_frame == VA_INVALID_ID ||
875 frame_store[2].surface_id = frame_store[0].surface_id;
876 frame_store[2].obj_surface = frame_store[0].obj_surface;
878 frame_store[2].surface_id = pic_param->alt_ref_frame;
879 frame_store[2].obj_surface = obj_surface;
882 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
883 frame_store[i].surface_id = frame_store[i % 2].surface_id;
884 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
889 //Obtain the reference frames from the decode state and store them in frame store.
891 intel_update_vp9_frame_store_index(VADriverContextP ctx,
892 struct decode_state *decode_state,
893 VADecPictureParameterBufferVP9 *pic_param,
894 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
896 struct object_surface *obj_surface;
899 //Check for the validity of the last reference frame
900 obj_surface = decode_state->reference_objects[0];
902 index = pic_param->pic_fields.bits.last_ref_frame;
903 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
906 frame_store[0].surface_id = VA_INVALID_ID;
907 frame_store[0].obj_surface = NULL;
909 frame_store[0].surface_id = pic_param->reference_frames[index];
910 frame_store[0].obj_surface = obj_surface;
913 //Check for the validity of the golden reference frame
914 obj_surface = decode_state->reference_objects[1];
916 index = pic_param->pic_fields.bits.golden_ref_frame;
917 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
920 frame_store[1].surface_id = frame_store[0].surface_id;
921 frame_store[1].obj_surface = frame_store[0].obj_surface;
923 frame_store[1].surface_id = pic_param->reference_frames[index];
924 frame_store[1].obj_surface = obj_surface;
927 //Check for the validity of the altref reference frame
928 obj_surface = decode_state->reference_objects[2];
930 index = pic_param->pic_fields.bits.alt_ref_frame;
931 if (pic_param->reference_frames[index] == VA_INVALID_ID ||
934 frame_store[2].surface_id = frame_store[0].surface_id;
935 frame_store[2].obj_surface = frame_store[0].obj_surface;
937 frame_store[2].surface_id = pic_param->reference_frames[index];
938 frame_store[2].obj_surface = obj_surface;
941 //Set the remaining framestores to either last/golden/altref
942 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
943 frame_store[i].surface_id = frame_store[i % 2].surface_id;
944 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
950 intel_decoder_check_avc_parameter(VADriverContextP ctx,
951 VAProfile h264_profile,
952 struct decode_state *decode_state)
954 struct i965_driver_data *i965 = i965_driver_data(ctx);
955 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
957 struct object_surface *obj_surface;
959 VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
962 ASSERT_RET(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID), VA_STATUS_ERROR_INVALID_PARAMETER);
963 ASSERT_RET((pic_param->CurrPic.picture_id != VA_INVALID_SURFACE), VA_STATUS_ERROR_INVALID_PARAMETER);
964 ASSERT_RET((pic_param->CurrPic.picture_id == decode_state->current_render_target), VA_STATUS_ERROR_INVALID_PARAMETER);
966 if ((h264_profile != VAProfileH264Baseline)) {
967 if (pic_param->num_slice_groups_minus1 ||
968 pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
969 WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n");
974 /* Fill in the reference objects array with the actual VA surface
975 objects with 1:1 correspondance with any entry in ReferenceFrames[],
976 i.e. including "holes" for invalid entries, that are expanded
977 to NULL in the reference_objects[] array */
978 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
979 const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i];
982 if (!(va_pic->flags & VA_PICTURE_H264_INVALID) &&
983 va_pic->picture_id != VA_INVALID_ID) {
984 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
986 return VA_STATUS_ERROR_INVALID_SURFACE;
989 * Sometimes a dummy frame comes from the upper layer
990 * library, call i965_check_alloc_surface_bo() to make
991 * sure the store buffer is allocated for this reference
994 va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface,
996 if (va_status != VA_STATUS_SUCCESS)
999 decode_state->reference_objects[i] = obj_surface;
1002 for (j = 0; j < decode_state->num_slice_params; j++) {
1003 ASSERT_RET((decode_state->slice_params && decode_state->slice_params[j]->buffer), VA_STATUS_ERROR_INVALID_PARAMETER);
1004 slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1006 if (j == decode_state->num_slice_params - 1)
1007 next_slice_group_param = NULL;
1009 next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
1011 for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1013 if (i < decode_state->slice_params[j]->num_elements - 1)
1014 next_slice_param = slice_param + 1;
1016 next_slice_param = next_slice_group_param;
1018 if (next_slice_param != NULL) {
1019 /* If the mb position of next_slice is less than or equal to the current slice,
1020 * discard the current frame.
1022 if (next_slice_param->first_mb_in_slice <= slice_param->first_mb_in_slice) {
1023 next_slice_param = NULL;
1024 WARN_ONCE("!!!incorrect slice_param. The first_mb_in_slice of next_slice is less"
1025 " than or equal to that in current slice\n");
1032 return VA_STATUS_SUCCESS;
1035 return VA_STATUS_ERROR_INVALID_PARAMETER;
1039 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
1040 struct decode_state *decode_state)
1042 struct i965_driver_data *i965 = i965_driver_data(ctx);
1043 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1044 struct object_surface *obj_surface;
1047 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
1048 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
1049 obj_surface = SURFACE(pic_param->forward_reference_picture);
1051 if (!obj_surface || !obj_surface->bo)
1052 decode_state->reference_objects[i++] = NULL;
1054 decode_state->reference_objects[i++] = obj_surface;
1055 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
1056 obj_surface = SURFACE(pic_param->forward_reference_picture);
1058 if (!obj_surface || !obj_surface->bo)
1059 decode_state->reference_objects[i++] = NULL;
1061 decode_state->reference_objects[i++] = obj_surface;
1063 obj_surface = SURFACE(pic_param->backward_reference_picture);
1065 if (!obj_surface || !obj_surface->bo)
1066 decode_state->reference_objects[i++] = NULL;
1068 decode_state->reference_objects[i++] = obj_surface;
1072 for ( ; i < 16; i++)
1073 decode_state->reference_objects[i] = NULL;
1075 return VA_STATUS_SUCCESS;
1078 return VA_STATUS_ERROR_INVALID_PARAMETER;
1082 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
1083 struct decode_state *decode_state)
1085 struct i965_driver_data *i965 = i965_driver_data(ctx);
1086 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1087 struct object_surface *obj_surface;
1090 if (pic_param->sequence_fields.bits.interlace == 1 &&
1091 pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1092 return VA_STATUS_ERROR_DECODING_ERROR;
1095 if (pic_param->picture_fields.bits.picture_type == 0 ||
1096 pic_param->picture_fields.bits.picture_type == 3) {
1097 } else if (pic_param->picture_fields.bits.picture_type == 1 ||
1098 pic_param->picture_fields.bits.picture_type == 4) {
1099 obj_surface = SURFACE(pic_param->forward_reference_picture);
1101 if (!obj_surface || !obj_surface->bo)
1102 decode_state->reference_objects[i++] = NULL;
1104 decode_state->reference_objects[i++] = obj_surface;
1105 } else if (pic_param->picture_fields.bits.picture_type == 2) {
1106 obj_surface = SURFACE(pic_param->forward_reference_picture);
1108 if (!obj_surface || !obj_surface->bo)
1109 decode_state->reference_objects[i++] = NULL;
1111 decode_state->reference_objects[i++] = obj_surface;
1113 obj_surface = SURFACE(pic_param->backward_reference_picture);
1115 if (!obj_surface || !obj_surface->bo)
1116 decode_state->reference_objects[i++] = NULL;
1118 decode_state->reference_objects[i++] = obj_surface;
1122 for ( ; i < 16; i++)
1123 decode_state->reference_objects[i] = NULL;
1125 return VA_STATUS_SUCCESS;
1128 return VA_STATUS_ERROR_INVALID_PARAMETER;
1132 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
1133 struct decode_state *decode_state)
1135 struct i965_driver_data *i965 = i965_driver_data(ctx);
1136 VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
1137 struct object_surface *obj_surface;
1140 if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
1141 obj_surface = SURFACE(pic_param->last_ref_frame);
1143 if (obj_surface && obj_surface->bo)
1144 decode_state->reference_objects[i++] = obj_surface;
1146 decode_state->reference_objects[i++] = NULL;
1149 if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
1150 obj_surface = SURFACE(pic_param->golden_ref_frame);
1152 if (obj_surface && obj_surface->bo)
1153 decode_state->reference_objects[i++] = obj_surface;
1155 decode_state->reference_objects[i++] = NULL;
1158 if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
1159 obj_surface = SURFACE(pic_param->alt_ref_frame);
1161 if (obj_surface && obj_surface->bo)
1162 decode_state->reference_objects[i++] = obj_surface;
1164 decode_state->reference_objects[i++] = NULL;
1167 for ( ; i < 16; i++)
1168 decode_state->reference_objects[i] = NULL;
1170 return VA_STATUS_SUCCESS;
1174 hevc_ensure_surface_bo(
1175 VADriverContextP ctx,
1176 struct decode_state *decode_state,
1177 struct object_surface *obj_surface,
1178 const VAPictureParameterBufferHEVC *pic_param
1181 VAStatus va_status = VA_STATUS_SUCCESS;
1183 unsigned int fourcc = VA_FOURCC_NV12;
1185 if((pic_param->bit_depth_luma_minus8 > 0)
1186 || (pic_param->bit_depth_chroma_minus8 > 0))
1188 if(obj_surface->fourcc != VA_FOURCC_P010)
1191 fourcc = VA_FOURCC_P010;
1194 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1197 fourcc = VA_FOURCC_NV12;
1200 /* (Re-)allocate the underlying surface buffer store, if necessary */
1201 if (!obj_surface->bo || update) {
1202 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1204 i965_destroy_surface_storage(obj_surface);
1206 va_status = i965_check_alloc_surface_bo(ctx,
1208 i965->codec_info->has_tiled_surface,
1216 //Ensure there is a tiled render surface in NV12 format. If not, create one.
1218 vp9_ensure_surface_bo(
1219 VADriverContextP ctx,
1220 struct decode_state *decode_state,
1221 struct object_surface *obj_surface,
1222 const VADecPictureParameterBufferVP9 *pic_param
1225 VAStatus va_status = VA_STATUS_SUCCESS;
1227 unsigned int fourcc = VA_FOURCC_NV12;
1229 if(pic_param->profile >= 2)
1231 if(obj_surface->fourcc != VA_FOURCC_P010)
1234 fourcc = VA_FOURCC_P010;
1237 else if(obj_surface->fourcc != VA_FOURCC_NV12)
1240 fourcc = VA_FOURCC_NV12;
1243 /* (Re-)allocate the underlying surface buffer store, if necessary */
1244 if (!obj_surface->bo || update) {
1245 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1247 i965_destroy_surface_storage(obj_surface);
1249 va_status = i965_check_alloc_surface_bo(ctx,
1251 i965->codec_info->has_tiled_surface,
1260 intel_decoder_check_hevc_parameter(VADriverContextP ctx,
1261 struct decode_state *decode_state)
1263 struct i965_driver_data *i965 = i965_driver_data(ctx);
1264 VAPictureParameterBufferHEVC *pic_param = (VAPictureParameterBufferHEVC *)decode_state->pic_param->buffer;
1265 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1266 struct object_surface *obj_surface;
1270 if (pic_param->CurrPic.flags & VA_PICTURE_HEVC_INVALID ||
1271 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
1274 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
1277 min_cb_size = (1 << (pic_param->log2_min_luma_coding_block_size_minus3 + 3));
1279 if (pic_param->pic_width_in_luma_samples % min_cb_size ||
1280 pic_param->pic_height_in_luma_samples % min_cb_size)
1283 /* Fill in the reference objects array with the actual VA surface
1284 objects with 1:1 correspondance with any entry in ReferenceFrames[],
1285 i.e. including "holes" for invalid entries, that are expanded
1286 to NULL in the reference_objects[] array */
1287 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
1288 const VAPictureHEVC * const va_pic = &pic_param->ReferenceFrames[i];
1293 * Only the index with (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1294 * VA_PICTURE_HEVC_RPS_ST_CURR_AFTER | VA_PICTURE_HEVC_RPS_LT_CURR)
1297 if (!(va_pic->flags & VA_PICTURE_HEVC_INVALID) &&
1298 (va_pic->picture_id != VA_INVALID_ID) &&
1299 (va_pic->flags & (VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE |
1300 VA_PICTURE_HEVC_RPS_ST_CURR_AFTER |
1301 VA_PICTURE_HEVC_RPS_LT_CURR))) {
1303 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
1306 va_status = VA_STATUS_ERROR_INVALID_SURFACE;
1310 va_status = hevc_ensure_surface_bo(ctx, decode_state, obj_surface,
1313 if (va_status != VA_STATUS_SUCCESS)
1317 decode_state->reference_objects[i] = obj_surface;
1320 va_status = VA_STATUS_SUCCESS;
1326 //Obtains reference frames from the picture parameter and
1327 //then sets the reference frames in the decode_state
1329 intel_decoder_check_vp9_parameter(VADriverContextP ctx,
1331 struct decode_state *decode_state)
1333 struct i965_driver_data *i965 = i965_driver_data(ctx);
1334 VADecPictureParameterBufferVP9 *pic_param = (VADecPictureParameterBufferVP9 *)decode_state->pic_param->buffer;
1335 VAStatus va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1336 struct object_surface *obj_surface;
1339 if((profile - VAProfileVP9Profile0) < pic_param->profile)
1342 //Max support upto 4k for BXT
1343 if ((pic_param->frame_width-1 < 0) || (pic_param->frame_width-1 > 4095))
1346 if ((pic_param->frame_height-1 < 0) || (pic_param->frame_height-1 > 4095))
1349 //Set the reference object in decode state for last reference
1350 index = pic_param->pic_fields.bits.last_ref_frame;
1351 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1352 obj_surface = SURFACE(pic_param->reference_frames[index]);
1354 if (obj_surface && obj_surface->bo)
1355 decode_state->reference_objects[i++] = obj_surface;
1357 decode_state->reference_objects[i++] = NULL;
1360 //Set the reference object in decode state for golden reference
1361 index = pic_param->pic_fields.bits.golden_ref_frame;
1362 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1363 obj_surface = SURFACE(pic_param->reference_frames[index]);
1365 if (obj_surface && obj_surface->bo)
1366 decode_state->reference_objects[i++] = obj_surface;
1368 decode_state->reference_objects[i++] = NULL;
1371 //Set the reference object in decode state for altref reference
1372 index = pic_param->pic_fields.bits.alt_ref_frame;
1373 if (pic_param->reference_frames[index] != VA_INVALID_SURFACE) {
1374 obj_surface = SURFACE(pic_param->reference_frames[index]);
1376 if (obj_surface && obj_surface->bo)
1377 decode_state->reference_objects[i++] = obj_surface;
1379 decode_state->reference_objects[i++] = NULL;
1382 for ( ; i < 16; i++)
1383 decode_state->reference_objects[i] = NULL;
1385 return VA_STATUS_SUCCESS;
1389 intel_decoder_sanity_check_input(VADriverContextP ctx,
1391 struct decode_state *decode_state)
1393 struct i965_driver_data *i965 = i965_driver_data(ctx);
1394 struct object_surface *obj_surface;
1395 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1397 if (decode_state->current_render_target == VA_INVALID_SURFACE)
1400 obj_surface = SURFACE(decode_state->current_render_target);
1405 decode_state->render_object = obj_surface;
1408 case VAProfileMPEG2Simple:
1409 case VAProfileMPEG2Main:
1410 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
1413 case VAProfileH264ConstrainedBaseline:
1414 case VAProfileH264Main:
1415 case VAProfileH264High:
1416 case VAProfileH264StereoHigh:
1417 case VAProfileH264MultiviewHigh:
1418 vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
1421 case VAProfileVC1Simple:
1422 case VAProfileVC1Main:
1423 case VAProfileVC1Advanced:
1424 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
1427 case VAProfileJPEGBaseline:
1428 vaStatus = VA_STATUS_SUCCESS;
1431 case VAProfileVP8Version0_3:
1432 vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
1435 case VAProfileHEVCMain:
1436 case VAProfileHEVCMain10:
1437 vaStatus = intel_decoder_check_hevc_parameter(ctx, decode_state);
1440 case VAProfileVP9Profile0:
1441 case VAProfileVP9Profile2:
1442 vaStatus = intel_decoder_check_vp9_parameter(ctx, profile, decode_state);
1446 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
1455 * Return the next slice paramter
1458 * slice_param: the current slice
1459 * *group_idx & *element_idx the current slice position in slice groups
1461 * Return the next slice parameter
1462 * *group_idx & *element_idx the next slice position in slice groups,
1463 * if the next slice is NULL, *group_idx & *element_idx will be ignored
1465 VASliceParameterBufferMPEG2 *
1466 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
1467 VAPictureParameterBufferMPEG2 *pic_param,
1468 VASliceParameterBufferMPEG2 *slice_param,
1472 VASliceParameterBufferMPEG2 *next_slice_param;
1473 unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1474 int j = *group_idx, i = *element_idx + 1;
1476 for (; j < decode_state->num_slice_params; j++) {
1477 for (; i < decode_state->slice_params[j]->num_elements; i++) {
1478 next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
1480 if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
1481 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
1485 return next_slice_param;
1495 /* Ensure the segmentation buffer is large enough for the supplied
1496 number of MBs, or re-allocate it */
1498 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
1499 unsigned int mb_width, unsigned int mb_height)
1501 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1502 /* The segmentation map is a 64-byte aligned linear buffer, with
1503 each cache line holding only 8 bits for 4 continuous MBs */
1504 const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
1507 if (buf->bo && buf->bo->size >= buf_size)
1509 drm_intel_bo_unreference(buf->bo);
1513 buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
1515 buf->valid = buf->bo != NULL;
1520 hevc_gen_default_iq_matrix(VAIQMatrixBufferHEVC *iq_matrix)
1523 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
1526 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
1529 memset(&iq_matrix->ScalingList16x16, 16, sizeof(iq_matrix->ScalingList16x16));
1532 memset(&iq_matrix->ScalingList32x32, 16, sizeof(iq_matrix->ScalingList32x32));
1534 /* Flat_16x16_dc_16 */
1535 memset(&iq_matrix->ScalingListDC16x16, 16, sizeof(iq_matrix->ScalingListDC16x16));
1537 /* Flat_32x32_dc_16 */
1538 memset(&iq_matrix->ScalingListDC32x32, 16, sizeof(iq_matrix->ScalingListDC32x32));