2 * Copyright © 2017 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Peng Chen <peng.c.chen@intel.com>
35 #include "intel_driver.h"
36 #include "intel_batchbuffer.h"
37 #include "i965_defines.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
40 #include "i965_encoder_common.h"
41 #include "i965_encoder_utils.h"
42 #include "i965_encoder_api.h"
43 #include "gen10_hcp_common.h"
44 #include "gen10_hevc_enc_common.h"
46 static const unsigned char default_scaling16[16] = {
53 static const unsigned char default_scaling_intra[64] = {
54 16, 16, 16, 16, 17, 18, 21, 24,
55 16, 16, 16, 16, 17, 19, 22, 25,
56 16, 16, 17, 18, 20, 22, 25, 29,
57 16, 16, 18, 21, 24, 27, 31, 36,
58 17, 17, 20, 24, 30, 35, 41, 47,
59 18, 19, 22, 27, 35, 44, 54, 65,
60 21, 22, 25, 31, 41, 54, 70, 88,
61 24, 25, 29, 36, 47, 65, 88, 115
64 static const unsigned char default_scaling_inter[64] = {
65 16, 16, 16, 16, 17, 18, 20, 24,
66 16, 16, 16, 17, 18, 20, 24, 25,
67 16, 16, 17, 18, 20, 24, 25, 28,
68 16, 17, 18, 20, 24, 25, 28, 33,
69 17, 18, 20, 24, 25, 28, 33, 41,
70 18, 20, 24, 25, 28, 33, 41, 54,
71 20, 24, 25, 28, 33, 41, 54, 71,
72 24, 25, 28, 33, 41, 54, 71, 91
76 hevc_init_qm_matrix(struct gen10_hevc_enc_frame_info *frame_info,
77 VAQMatrixBufferHEVC *qm_matrix,
80 uint8_t *real_qm, real_dc_qm = 0;
81 uint16_t *real_fqm, *real_dc_fqm;
85 if (matrix_flag == 0) {
86 for (m = 0; m < 4; m++) {
87 comps = (m == 3) ? 1 : 3;
88 len = (m == 0) ? 16 : 64;
90 for (i = 0; i < comps; i++) {
91 for (j = 0; j < 2; j++) {
92 real_qm = frame_info->qm_matrix[m][i][j];
96 memcpy(real_qm, qm_matrix->scaling_lists_4x4[i][j], len);
99 memcpy(real_qm, qm_matrix->scaling_lists_8x8[i][j], len);
102 memcpy(real_qm, qm_matrix->scaling_lists_16x16[i][j], len);
104 real_dc_qm = qm_matrix->scaling_list_dc_16x16[i][j];
105 frame_info->qm_dc_matrix[0][i][j] = real_dc_qm;
108 memcpy(real_qm, qm_matrix->scaling_lists_32x32[j], len);
110 real_dc_qm = qm_matrix->scaling_list_dc_32x32[j];
111 frame_info->qm_dc_matrix[1][i][j] = real_dc_qm;
118 real_fqm = frame_info->fqm_matrix[m][j];
120 for (n = 0; n < len; n++) {
121 uint32_t qm_value = *(real_qm + n);
122 uint32_t fqm_value = 0;
124 fqm_value = (qm_value < 2) ? 0xFFFF : 0xFFFF / qm_value;
126 *(real_fqm + n) = fqm_value;
129 if (m == 2 || m == 3) {
130 uint32_t dc = real_dc_qm;
132 real_dc_fqm = &frame_info->fqm_dc_matrix[m - 2][j];
133 dc = (dc < 2) ? 0xFFFF : 0xFFFF / dc;
141 } else if (matrix_flag == 1) {
142 for (m = 0; m < 4; m++) {
143 comps = (m == 3) ? 1 : 3;
144 len = (m == 0) ? 16 : 64;
146 for (i = 0; i < comps; i++) {
147 for (j = 0; j < 2; j++) {
148 real_qm = frame_info->qm_matrix[m][i][j];
152 memcpy(real_qm, default_scaling16, len);
158 memcpy(real_qm, default_scaling_intra, len);
160 memcpy(real_qm, default_scaling_inter, len);
168 real_fqm = frame_info->fqm_matrix[m][j];
170 for (n = 0; n < len; n++) {
171 uint32_t qm_value = *(real_qm + n);
172 uint32_t fqm_value = 0;
174 fqm_value = (qm_value < 2) ? 0xFFFF : 0xFFFF / qm_value;
176 *(real_fqm + n) = fqm_value;
183 memset(&frame_info->qm_dc_matrix, 16, sizeof(frame_info->qm_dc_matrix));
185 for (i = 0; i < 2; i++) {
186 for (j = 0; j < 2; j++)
187 frame_info->fqm_dc_matrix[i][j] = 0x1000;
190 } else if (matrix_flag == 2) {
191 memset(&frame_info->qm_matrix, 16, sizeof(frame_info->qm_matrix));
192 memset(&frame_info->qm_dc_matrix, 16, sizeof(frame_info->qm_dc_matrix));
194 for (m = 0; m < 4; m++) {
195 for (j = 0; j < 2; j++) {
196 for (n = 0; n < 64; n++)
197 frame_info->fqm_matrix[m][j][n] = 0x1000;
201 for (i = 0; i < 2; i++) {
202 for (j = 0; j < 2; j++)
203 frame_info->fqm_dc_matrix[i][j] = 0x1000;
210 hevc_enc_map_pic_index(VASurfaceID id,
211 VAPictureHEVC *pic_list,
216 if (id != VA_INVALID_ID) {
217 for (i = 0; i < pic_list_count; i++) {
218 VAPictureHEVC * const va_pic = &pic_list[i];
220 if (va_pic->picture_id == id &&
221 !(va_pic->flags & VA_PICTURE_HEVC_INVALID))
230 gen10_hevc_enc_get_relocation_flag(VAEncSequenceParameterBufferHEVC *cur_seq_param,
231 VAEncSequenceParameterBufferHEVC *last_seq_param)
233 if ((cur_seq_param->seq_fields.bits.bit_depth_luma_minus8 !=
234 last_seq_param->seq_fields.bits.bit_depth_luma_minus8) ||
235 (cur_seq_param->seq_fields.bits.bit_depth_chroma_minus8 !=
236 last_seq_param->seq_fields.bits.bit_depth_chroma_minus8) ||
237 (cur_seq_param->log2_min_luma_coding_block_size_minus3 !=
238 last_seq_param->log2_min_luma_coding_block_size_minus3) ||
239 (cur_seq_param->log2_diff_max_min_luma_coding_block_size !=
240 last_seq_param->log2_diff_max_min_luma_coding_block_size) ||
241 (cur_seq_param->pic_width_in_luma_samples !=
242 last_seq_param->pic_width_in_luma_samples) ||
243 (cur_seq_param->pic_height_in_luma_samples !=
244 last_seq_param->pic_height_in_luma_samples) ||
245 (cur_seq_param->seq_fields.bits.bit_depth_chroma_minus8 !=
246 last_seq_param->seq_fields.bits.bit_depth_chroma_minus8) ||
247 (cur_seq_param->seq_fields.bits.bit_depth_chroma_minus8 !=
248 last_seq_param->seq_fields.bits.bit_depth_chroma_minus8))
255 gen10_hevc_enc_init_frame_info(VADriverContextP ctx,
256 struct encode_state *encode_state,
257 struct intel_encoder_context *encoder_context,
258 struct gen10_hevc_enc_frame_info *frame_info)
260 uint32_t log2_max_coding_block_size = 0, raw_ctu_bits = 0;
261 VAEncPictureParameterBufferHEVC *pic_param = NULL;
262 VAEncSequenceParameterBufferHEVC *seq_param = NULL;
263 VAEncSliceParameterBufferHEVC *slice_param = NULL;
266 pic_param = (VAEncPictureParameterBufferHEVC *)encode_state->pic_param_ext->buffer;
267 seq_param = (VAEncSequenceParameterBufferHEVC *)encode_state->seq_param_ext->buffer;
268 slice_param = (VAEncSliceParameterBufferHEVC *)encode_state->slice_params_ext[0]->buffer;
270 frame_info->bit_depth_luma_minus8 = seq_param->seq_fields.bits.bit_depth_luma_minus8;
271 frame_info->bit_depth_chroma_minus8 = seq_param->seq_fields.bits.bit_depth_chroma_minus8;
272 frame_info->cu_size = 1 << (seq_param->log2_min_luma_coding_block_size_minus3 + 3);
273 frame_info->lcu_size = 1 << (seq_param->log2_diff_max_min_luma_coding_block_size +
274 seq_param->log2_min_luma_coding_block_size_minus3 + 3);
275 frame_info->frame_width = (seq_param->pic_width_in_luma_samples / frame_info->cu_size) * frame_info->cu_size;
276 frame_info->frame_height = (seq_param->pic_height_in_luma_samples / frame_info->cu_size) * frame_info->cu_size;
277 frame_info->width_in_lcu = ALIGN(frame_info->frame_width, frame_info->lcu_size) / frame_info->lcu_size;
278 frame_info->height_in_lcu = ALIGN(frame_info->frame_height, frame_info->lcu_size) / frame_info->lcu_size;
279 frame_info->width_in_cu = ALIGN(frame_info->frame_width, frame_info->cu_size) / frame_info->cu_size;
280 frame_info->height_in_cu = ALIGN(frame_info->frame_height, frame_info->cu_size) / frame_info->cu_size;
281 frame_info->width_in_mb = ALIGN(frame_info->frame_width, 16) / 16;
282 frame_info->height_in_mb = ALIGN(frame_info->frame_height, 16) / 16;
284 frame_info->picture_coding_type = slice_param->slice_type;
286 frame_info->ctu_max_bitsize_allowed = pic_param->ctu_max_bitsize_allowed;
288 log2_max_coding_block_size = seq_param->log2_min_luma_coding_block_size_minus3 + 3 +
289 seq_param->log2_diff_max_min_luma_coding_block_size;
290 raw_ctu_bits = (1 << (2 * log2_max_coding_block_size + 3)) +
291 (1 << (2 * log2_max_coding_block_size + 2));
292 raw_ctu_bits = (5 * raw_ctu_bits / 3);
294 if (frame_info->ctu_max_bitsize_allowed == 0 ||
295 frame_info->ctu_max_bitsize_allowed > raw_ctu_bits)
296 frame_info->ctu_max_bitsize_allowed = raw_ctu_bits;
298 frame_info->low_delay = 1;
299 frame_info->arbitrary_num_mb_in_slice = 0;
301 for (i = 0; i < encode_state->num_slice_params_ext; i++) {
302 slice_param = (VAEncSliceParameterBufferHEVC *)encode_state->slice_params_ext[i]->buffer;
305 frame_info->is_same_ref_list = 1;
306 if (slice_param->slice_type == HEVC_SLICE_B) {
307 if (slice_param->num_ref_idx_l0_active_minus1 >=
308 slice_param->num_ref_idx_l1_active_minus1) {
309 for (j = 0; j < slice_param->num_ref_idx_l1_active_minus1 + 1; j++)
310 if (slice_param->ref_pic_list0[j].picture_id !=
311 slice_param->ref_pic_list1[j].picture_id) {
312 frame_info->is_same_ref_list = 0;
316 frame_info->is_same_ref_list = 0;
320 if (slice_param->slice_type == HEVC_SLICE_B && frame_info->low_delay) {
321 for (j = 0; j <= slice_param->num_ref_idx_l0_active_minus1; j++) {
322 if (pic_param->decoded_curr_pic.pic_order_cnt <
323 slice_param->ref_pic_list0[j].pic_order_cnt)
324 frame_info->low_delay = 0;
327 for (j = 0; j <= slice_param->num_ref_idx_l1_active_minus1; j++) {
328 if (pic_param->decoded_curr_pic.pic_order_cnt <
329 slice_param->ref_pic_list1[j].pic_order_cnt)
330 frame_info->low_delay = 0;
334 if (!frame_info->arbitrary_num_mb_in_slice &&
335 (slice_param->num_ctu_in_slice % frame_info->width_in_lcu))
336 frame_info->arbitrary_num_mb_in_slice = 1;
339 for (i = 0; i < 8; i++) {
340 frame_info->mapped_ref_idx_list0[i] = -1;
341 frame_info->mapped_ref_idx_list1[i] = -1;
344 if (slice_param->slice_type != HEVC_SLICE_I) {
345 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
346 frame_info->mapped_ref_idx_list0[i] = hevc_enc_map_pic_index(slice_param->ref_pic_list0[i].picture_id,
347 pic_param->reference_frames, 8);
349 if (slice_param->slice_type == HEVC_SLICE_B) {
350 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
351 frame_info->mapped_ref_idx_list1[i] = hevc_enc_map_pic_index(slice_param->ref_pic_list1[i].picture_id,
352 pic_param->reference_frames, 8);
356 frame_info->slice_qp = pic_param->pic_init_qp + slice_param->slice_qp_delta;
358 if (encoder_context->is_new_sequence ||
359 memcmp(&frame_info->last_seq_param, seq_param, sizeof(*seq_param))) {
360 VAQMatrixBufferHEVC *input_matrix = NULL;
363 if (seq_param->seq_fields.bits.scaling_list_enabled_flag) {
364 if (pic_param->pic_fields.bits.scaling_list_data_present_flag) {
365 if (encode_state->q_matrix && encode_state->q_matrix->buffer)
366 input_matrix = (VAQMatrixBufferHEVC *)encode_state->q_matrix->buffer;
367 else if (encode_state->iq_matrix && encode_state->iq_matrix->buffer)
368 input_matrix = (VAQMatrixBufferHEVC *)encode_state->iq_matrix->buffer;
376 hevc_init_qm_matrix(frame_info, input_matrix, matrix_flag);
378 frame_info->gop_size = seq_param->intra_period;
379 frame_info->gop_ref_dist = seq_param->ip_period;
380 frame_info->gop_num_p = encoder_context->brc.num_pframes_in_gop;
381 frame_info->gop_num_b[0] = encoder_context->brc.num_bframes_in_gop;
382 frame_info->gop_num_b[1] = 0;
383 frame_info->gop_num_b[2] = 0;
385 if (gen10_hevc_enc_get_relocation_flag(seq_param,
386 &frame_info->last_seq_param))
387 frame_info->reallocate_flag = 1;
389 frame_info->reallocate_flag = 0;
391 memcpy(&frame_info->last_seq_param, seq_param, sizeof(*seq_param));
393 frame_info->reallocate_flag = 0;
397 hevc_find_skipemulcnt(uint8_t *buf, int bits_length)
399 int skip_cnt = 0, i = 0;
401 if ((bits_length >> 3) < 6)
404 for (i = 0; i < 3; i++)
417 gen10_hevc_enc_insert_object(VADriverContextP ctx,
418 struct intel_batchbuffer *batch,
419 uint8_t *header_data,
421 int end_of_slice_flag,
422 int last_header_flag,
424 int skip_emulation_bytes)
426 gen10_hcp_pak_insert_object_param insert_param;
428 memset(&insert_param, 0, sizeof(insert_param));
430 insert_param.dw1.bits.end_of_slice_flag = end_of_slice_flag;
431 insert_param.dw1.bits.last_header_flag = last_header_flag;
432 insert_param.dw1.bits.emulation_flag = emulation_flag;
434 if (emulation_flag) {
435 if (skip_emulation_bytes)
436 insert_param.dw1.bits.skip_emulation_bytes = skip_emulation_bytes;
438 insert_param.dw1.bits.skip_emulation_bytes = hevc_find_skipemulcnt((uint8_t *)header_data,
442 insert_param.dw1.bits.data_bits_in_last_dw = length_in_bits & 0x1f;
443 if (insert_param.dw1.bits.data_bits_in_last_dw == 0)
444 insert_param.dw1.bits.data_bits_in_last_dw = 32;
446 insert_param.inline_payload_ptr = header_data;
447 insert_param.inline_payload_bits = length_in_bits;
449 gen10_hcp_pak_insert_object(ctx, batch, &insert_param);
453 gen10_hevc_enc_insert_packed_header(VADriverContextP ctx,
454 struct encode_state *encode_state,
455 struct intel_encoder_context *encoder_context,
456 struct intel_batchbuffer *batch)
458 VAEncPackedHeaderParameterBuffer *param = NULL;
459 uint8_t *header_data = NULL;
460 uint32_t length_in_bits = 0;
462 int idx = 0, idx_offset = 0;
465 for (i = 0; i < 4; i++) {
469 packed_type = VAEncPackedHeaderHEVC_VPS;
472 packed_type = VAEncPackedHeaderHEVC_VPS;
476 packed_type = VAEncPackedHeaderHEVC_PPS;
479 packed_type = VAEncPackedHeaderHEVC_SEI;
485 idx = va_enc_packed_type_to_idx(packed_type) + idx_offset;
486 if (encode_state->packed_header_data[idx]) {
487 param = (VAEncPackedHeaderParameterBuffer *)encode_state->packed_header_param[idx]->buffer;
488 header_data = (uint8_t *)encode_state->packed_header_data[idx]->buffer;
489 length_in_bits = param->bit_length;
491 gen10_hevc_enc_insert_object(ctx, batch, header_data, length_in_bits,
492 0, 0, !param->has_emulation_bytes, 0);
498 gen10_hevc_enc_insert_slice_header(VADriverContextP ctx,
499 struct encode_state *encode_state,
500 struct intel_encoder_context *encoder_context,
501 struct intel_batchbuffer *batch,
504 VAEncPackedHeaderParameterBuffer *param = NULL;
505 uint8_t *header_data = NULL;
506 uint32_t length_in_bits = 0;
507 int count = 0, start_index = -1;
510 count = encode_state->slice_rawdata_count[slice_index];
511 start_index = encode_state->slice_rawdata_index[slice_index] &
512 SLICE_PACKED_DATA_INDEX_MASK;
514 for (i = 0; i < count; i++) {
515 param = (VAEncPackedHeaderParameterBuffer *)
516 (encode_state->packed_header_params_ext[start_index + i]->buffer);
518 if (param->type == VAEncPackedHeaderSlice)
521 header_data = (uint8_t *)encode_state->packed_header_data_ext[start_index]->buffer;
522 length_in_bits = param->bit_length;
524 gen10_hevc_enc_insert_object(ctx, batch, header_data, length_in_bits,
525 0, 0, !param->has_emulation_bytes, 0);
529 if (encode_state->slice_header_index[slice_index] & SLICE_PACKED_DATA_INDEX_TYPE)
530 start_index = encode_state->slice_header_index[slice_index] &
531 SLICE_PACKED_DATA_INDEX_MASK;
533 if (start_index == -1) {
534 VAEncSequenceParameterBufferHEVC *seq_param = (VAEncSequenceParameterBufferHEVC *)encode_state->seq_param_ext->buffer;
535 VAEncPictureParameterBufferHEVC *pic_param = (VAEncPictureParameterBufferHEVC *)encode_state->pic_param_ext->buffer;
536 VAEncSliceParameterBufferHEVC *slice_param = (VAEncSliceParameterBufferHEVC *)encode_state->slice_params_ext[slice_index]->buffer;
537 unsigned char *slice_header = NULL;
538 int slice_header_bits = 0;
540 slice_header_bits = build_hevc_slice_header(seq_param,
546 gen10_hevc_enc_insert_object(ctx, batch, slice_header, slice_header_bits,
551 param = (VAEncPackedHeaderParameterBuffer *)
552 (encode_state->packed_header_params_ext[start_index]->buffer);
553 header_data = (uint8_t *)encode_state->packed_header_data_ext[start_index]->buffer;
554 length_in_bits = param->bit_length;
556 gen10_hevc_enc_insert_object(ctx, batch, header_data, length_in_bits,
557 0, 1, !param->has_emulation_bytes, 0);
562 #define ALLOC_GPE_RESOURCE(RES, NAME, SIZE) \
564 i965_free_gpe_resource(&common_res->RES); \
565 if (!i965_allocate_gpe_resource(i965->intel.bufmgr, \
572 #define ALLOC_GPE_2D_RESOURCE(RES, NAME, W, H, P) \
574 i965_free_gpe_resource(&priv_ctx->RES); \
575 if (!i965_gpe_allocate_2d_resource(i965->intel.bufmgr, \
584 gen10_hevc_enc_init_common_resource(VADriverContextP ctx,
585 struct encode_state *encode_state,
586 struct intel_encoder_context *encoder_context,
587 struct gen10_hevc_enc_common_res *common_res,
588 struct gen10_hevc_enc_frame_info *frame_info,
592 struct i965_driver_data *i965 = i965_driver_data(ctx);
593 VAEncPictureParameterBufferHEVC *pic_param = NULL;
594 struct object_surface *obj_surface;
595 struct object_buffer *obj_buffer;
596 int res_size = 0, size_shift = 0;
597 int width = 0, height = 0;
600 pic_param = (VAEncPictureParameterBufferHEVC *)encode_state->pic_param_ext->buffer;
602 obj_buffer = encode_state->coded_buf_object;
603 i965_free_gpe_resource(&common_res->compressed_bitstream.gpe_res);
604 i965_dri_object_to_buffer_gpe_resource(&common_res->compressed_bitstream.gpe_res,
605 obj_buffer->buffer_store->bo);
606 common_res->compressed_bitstream.offset = I965_CODEDBUFFER_HEADER_SIZE;
607 common_res->compressed_bitstream.end_offset = ALIGN(obj_buffer->size_element - 0x1000,
610 i965_free_gpe_resource(&common_res->uncompressed_pic.gpe_res);
611 i965_object_surface_to_2d_gpe_resource(&common_res->uncompressed_pic.gpe_res,
612 encode_state->input_yuv_object);
613 common_res->uncompressed_pic.obj_surface = encode_state->input_yuv_object;
614 common_res->uncompressed_pic.surface_id = encoder_context->input_yuv_surface;
616 i965_free_gpe_resource(&common_res->reconstructed_pic.gpe_res);
617 i965_object_surface_to_2d_gpe_resource(&common_res->reconstructed_pic.gpe_res,
618 encode_state->reconstructed_object);
619 common_res->reconstructed_pic.obj_surface = encode_state->reconstructed_object;
620 common_res->reconstructed_pic.surface_id = pic_param->decoded_curr_pic.picture_id;
623 for (i = 0; i < 15; i++) {
624 if (common_res->reference_pics[i].surface_id != VA_INVALID_ID)
625 i965_free_gpe_resource(&common_res->reference_pics[i].gpe_res);
627 obj_surface = encode_state->reference_objects[i];
628 if (obj_surface && obj_surface->bo) {
629 i965_object_surface_to_2d_gpe_resource(&common_res->reference_pics[i].gpe_res,
632 common_res->reference_pics[i].obj_surface = obj_surface;
633 common_res->reference_pics[i].surface_id = pic_param->reference_frames[i].picture_id;
635 common_res->reference_pics[i].obj_surface = NULL;
636 common_res->reference_pics[i].surface_id = VA_INVALID_ID;
641 if (frame_info->reallocate_flag) {
642 width = frame_info->frame_width;
643 height = frame_info->frame_height;
644 size_shift = (frame_info->bit_depth_luma_minus8 ||
645 frame_info->bit_depth_chroma_minus8) ? 2 : 3;
647 res_size = ALIGN(width, 32) << (6 - size_shift);
648 ALLOC_GPE_RESOURCE(deblocking_filter_line_buffer,
649 "Deblocking filter line buffer",
651 ALLOC_GPE_RESOURCE(deblocking_filter_tile_line_buffer,
652 "Deblocking filter tile line buffer",
655 res_size = ALIGN(height +
656 frame_info->width_in_lcu * 6, 32) << (6 - size_shift);
657 ALLOC_GPE_RESOURCE(deblocking_filter_tile_column_buffer,
658 "Deblocking filter tile column buffer",
661 res_size = (((width + 15) >> 4) * 188 +
662 frame_info->width_in_lcu * 9 + 1023) >> 3;
663 ALLOC_GPE_RESOURCE(metadata_line_buffer,
664 "metadata line buffer",
667 res_size = (((width + 15) >> 4) * 172 +
668 frame_info->width_in_lcu * 9 + 1023) >> 3;
669 ALLOC_GPE_RESOURCE(metadata_tile_line_buffer,
670 "metadata tile line buffer",
673 res_size = (((height + 15) >> 4) * 176 +
674 frame_info->height_in_lcu * 9 + 1023) >> 3;
675 ALLOC_GPE_RESOURCE(metadata_tile_column_buffer,
676 "metadata tile column buffer",
679 res_size = ALIGN(((width >> 1) +
680 frame_info->width_in_lcu * 3), 16) << (6 - size_shift);
681 ALLOC_GPE_RESOURCE(sao_line_buffer,
685 res_size = ALIGN(((width >> 1) +
686 frame_info->width_in_lcu * 6), 16) << (6 - size_shift);
687 ALLOC_GPE_RESOURCE(sao_tile_line_buffer,
688 "sao tile line buffer",
691 res_size = ALIGN(((height >> 1) +
692 frame_info->height_in_lcu * 6), 16) << (6 - size_shift);
693 ALLOC_GPE_RESOURCE(sao_tile_column_buffer,
694 "sao tile column buffer",
699 ALLOC_GPE_RESOURCE(streamout_data_destination_buffer,
700 "streamout data destination buffer",
704 res_size = I965_MAX_NUM_SLICE * 64;
705 ALLOC_GPE_RESOURCE(picture_status_buffer,
706 "picture status buffer",
709 res_size = frame_info->width_in_lcu * frame_info->height_in_lcu * 256;
710 ALLOC_GPE_RESOURCE(ildb_streamout_buffer,
711 "ildb streamout buffer",
714 //res_size = frame_info->width_in_lcu * frame_info->height_in_lcu * 16;
715 width = ALIGN(frame_info->frame_width, 64) >> 3;
716 height = ALIGN(frame_info->frame_height, 64) >> 3;
717 res_size = width * height * 16 + 1024;
718 ALLOC_GPE_RESOURCE(sao_streamout_data_destination_buffer,
719 "sao streamout date destination buffer",
722 res_size = ALIGN(8 * 64, 4096);
723 ALLOC_GPE_RESOURCE(frame_statics_streamout_data_destination_buffer,
724 "frame statics streamout date destination buffer",
727 //res_size = (ALIGN(width, GEN10_HEVC_ENC_MAX_LCU_SIZE) + 2) * 64 * 8 * 2;
728 res_size = ALIGN(frame_info->frame_width, 64) * 1024 + 2048;
729 res_size = res_size << 1;
730 ALLOC_GPE_RESOURCE(sse_source_pixel_rowstore_buffer,
731 "sse source pixel rowstore buffer",
742 gen10_hevc_enc_free_common_resource(struct gen10_hevc_enc_common_res *common_res)
746 i965_free_gpe_resource(&common_res->compressed_bitstream.gpe_res);
747 i965_free_gpe_resource(&common_res->uncompressed_pic.gpe_res);
748 i965_free_gpe_resource(&common_res->reconstructed_pic.gpe_res);
750 for (i = 0; i < 16; i++)
751 if (common_res->reference_pics[i].surface_id != VA_INVALID_ID)
752 i965_free_gpe_resource(&common_res->reference_pics[i].gpe_res);
754 i965_free_gpe_resource(&common_res->deblocking_filter_line_buffer);
755 i965_free_gpe_resource(&common_res->deblocking_filter_tile_line_buffer);
756 i965_free_gpe_resource(&common_res->deblocking_filter_tile_column_buffer);
757 i965_free_gpe_resource(&common_res->metadata_line_buffer);
758 i965_free_gpe_resource(&common_res->metadata_tile_line_buffer);
759 i965_free_gpe_resource(&common_res->metadata_tile_column_buffer);
760 i965_free_gpe_resource(&common_res->sao_line_buffer);
761 i965_free_gpe_resource(&common_res->sao_tile_line_buffer);
762 i965_free_gpe_resource(&common_res->sao_tile_column_buffer);
763 i965_free_gpe_resource(&common_res->streamout_data_destination_buffer);
764 i965_free_gpe_resource(&common_res->picture_status_buffer);
765 i965_free_gpe_resource(&common_res->ildb_streamout_buffer);
766 i965_free_gpe_resource(&common_res->sao_streamout_data_destination_buffer);
767 i965_free_gpe_resource(&common_res->frame_statics_streamout_data_destination_buffer);
768 i965_free_gpe_resource(&common_res->sse_source_pixel_rowstore_buffer);
772 gen10_hevc_enc_init_status_buffer(VADriverContextP ctx,
773 struct encode_state *encode_state,
774 struct intel_encoder_context *encoder_context,
775 struct gen10_hevc_enc_status_buffer *status_buffer)
777 struct i965_coded_buffer_segment *coded_buffer_segment;
778 uint32_t base_offset;
782 bo = encode_state->coded_buf_object->buffer_store->bo;
784 i965_free_gpe_resource(&status_buffer->gpe_res);
785 i965_dri_object_to_buffer_gpe_resource(&status_buffer->gpe_res, bo);
787 status_buffer->status_size = ALIGN(sizeof(struct gen10_hevc_enc_status), 64);
789 status_buffer->mmio_bytes_per_frame_offset = GEN10_MMIO_HCP_ENC_BITSTREAM_BYTECOUNT_FRAME_OFFSET;
790 status_buffer->mmio_bs_frame_no_header_offset = GEN10_MMIO_HCP_ENC_BITSTREAM_BYTECOUNT_FRAME_NO_HEADER_OFFSET;
791 status_buffer->mmio_image_mask_offset = GEN10_MMIO_HCP_ENC_IMAGE_STATUS_MASK_OFFSET;
792 status_buffer->mmio_image_ctrl_offset = GEn10_MMIO_HCP_ENC_IMAGE_STATUS_CTRL_OFFSET;
793 status_buffer->mmio_qp_status_offset = GEN10_MMIO_HCP_ENC_QP_STATE_OFFSET;
794 status_buffer->mmio_bs_se_bitcount_offset = GEN10_MMIO_HCP_ENC_BITSTREAM_SE_BITCOUNT_FRAME_OFFSET;
796 base_offset = offsetof(struct i965_coded_buffer_segment, codec_private_data);
797 status_buffer->status_image_mask_offset = base_offset +
798 offsetof(struct gen10_hevc_enc_status, image_status_mask);
799 status_buffer->status_image_ctrl_offset = base_offset +
800 offsetof(struct gen10_hevc_enc_status, image_status_ctrl);
801 status_buffer->status_bytes_per_frame_offset = base_offset +
802 offsetof(struct gen10_hevc_enc_status, bytes_per_frame);
803 status_buffer->status_pass_num_offset = base_offset +
804 offsetof(struct gen10_hevc_enc_status, pass_number);
805 status_buffer->status_media_state_offset = base_offset +
806 offsetof(struct gen10_hevc_enc_status, media_state);
807 status_buffer->status_qp_status_offset = base_offset +
808 offsetof(struct gen10_hevc_enc_status, qp_status);
809 status_buffer->status_bs_se_bitcount_offset = base_offset +
810 offsetof(struct gen10_hevc_enc_status, bs_se_bitcount);
814 coded_buffer_segment = (struct i965_coded_buffer_segment *)bo->virtual;
815 coded_buffer_segment->mapped = 0;
816 coded_buffer_segment->codec = encoder_context->codec;
817 coded_buffer_segment->status_support = 1;
819 pbuffer = bo->virtual + base_offset;
820 memset(pbuffer, 0, status_buffer->status_size);
826 gen10_hevc_enc_init_lambda_param(struct gen10_hevc_enc_lambda_param *param,
827 int bit_depth_luma_minus8,
828 int bit_depth_chroma_minus8)
830 double qp_temp, lambda_double, qp_factor;
831 int qp, qp_max[2], qp_offset[2], shift_qp = 12;
835 memset(param, 0, sizeof(*param));
837 qp_offset[0] = 6 * bit_depth_luma_minus8;
838 qp_offset[1] = 6 * bit_depth_chroma_minus8;
839 qp_max[0] = 52 + qp_offset[0];
840 qp_max[1] = 52 + qp_offset[1];
842 qp_factor = 0.25 * 0.65;
843 for (i = 0; i < 2; i++) {
844 for (qp = 0; qp < qp_max[i]; qp++) {
845 qp_temp = (double)qp - qp_offset[i] - shift_qp;
846 lambda_double = qp_factor * pow(2.0, qp_temp / 3.0);
847 lambda_double = lambda_double * 16 + 0.5;
848 lambda_double = (lambda_double > 65535) ? 65535 : lambda_double;
849 lambda = (uint32_t)floor(lambda_double);
850 param->lambda_intra[i][qp] = (uint16_t)lambda;
855 for (i = 0; i < 2; i++) {
856 for (qp = 0; qp < qp_max[i]; qp++) {
857 qp_temp = (double)qp - qp_offset[i] - shift_qp;
858 lambda_double = qp_factor * pow(2.0, qp_temp / 3.0);
860 lambda_double *= MAX(1.00, MIN(1.6, 1.0 + 0.6 / 12.0 * (qp_temp - 10.0)));
862 lambda_double *= MAX(0.95, MIN(1.2, 0.25 / 12.0 * (qp_temp - 10.0) + 0.95));
863 lambda_double = lambda_double * 16 + 0.5;
864 lambda = (uint32_t)floor(lambda_double);
865 lambda_double = (lambda_double > 0xffff) ? 0xffff : lambda_double;
866 lambda = CLAMP(0, 0xffff, lambda);
867 param->lambda_inter[i][qp] = (uint16_t)lambda;
873 hevc_hcp_set_qm_state(VADriverContextP ctx,
874 struct intel_batchbuffer *batch,
882 gen10_hcp_qm_state_param param;
884 memset(¶m, 0, sizeof(param));
885 param.dw1.prediction_type = prediction_type;
886 param.dw1.size_id = size_id;
887 param.dw1.color_component = color_component;
888 param.dw1.dc_coefficient = dc_coefficient;
889 memcpy(param.quant_matrix, qm_buf, qm_length);
890 gen10_hcp_qm_state(ctx, batch, ¶m);
894 hevc_hcp_set_fqm_state(VADriverContextP ctx,
895 struct intel_batchbuffer *batch,
899 int forward_dc_coeff,
903 gen10_hcp_fqm_state_param param;
905 memset(¶m, 0, sizeof(param));
906 param.dw1.prediction_type = prediction_type;
907 param.dw1.size_id = size_id;
908 param.dw1.color_component = color_component;
909 param.dw1.forward_dc_coeff = forward_dc_coeff;
910 memcpy(param.forward_quant_matrix, fqm_buf, fqm_length * sizeof(uint16_t));
911 gen10_hcp_fqm_state(ctx, batch, ¶m);
915 gen10_hevc_enc_hcp_set_qm_fqm_states(VADriverContextP ctx,
916 struct intel_batchbuffer *batch,
917 struct gen10_hevc_enc_frame_info *frame_info)
919 int dc_coefficient, forward_dc_coeff;
925 for (m = 0; m < 4; m++) {
926 comps = (m == 3) ? 1 : 3;
927 len = (m == 0) ? 16 : 64;
929 for (i = 0; i < comps; i++) {
930 for (j = 0; j < 2; j++) {
931 real_qm = frame_info->qm_matrix[m][i][j];
934 real_fqm = frame_info->fqm_matrix[m][j];
936 if (m == 2 || m == 3) {
937 dc_coefficient = frame_info->qm_dc_matrix[m - 2][i][j];
940 forward_dc_coeff = frame_info->fqm_dc_matrix[m - 2][j];
943 forward_dc_coeff = 0;
946 hevc_hcp_set_qm_state(ctx, batch, m, i, j, dc_coefficient,
950 hevc_hcp_set_fqm_state(ctx, batch, m, i, j, forward_dc_coeff,
958 gen10_hevc_enc_hcp_set_ref_idx_state(VADriverContextP ctx,
959 struct intel_batchbuffer *batch,
960 VAEncPictureParameterBufferHEVC *pic_param,
961 VAEncSliceParameterBufferHEVC *slice_param,
964 gen10_hcp_ref_idx_state_param param;
965 VAPictureHEVC *ref_pic, *cur_pic;
966 int weighted_pred_flag;
969 assert(list_index < 2);
971 memset(¶m, 0, sizeof(param));
973 param.dw1.ref_pic_list_num = list_index;
974 param.dw1.num_ref_idx_active_minus1 = list_index == 0 ? slice_param->num_ref_idx_l0_active_minus1 :
975 slice_param->num_ref_idx_l1_active_minus1;
977 cur_pic = &pic_param->decoded_curr_pic;
978 weighted_pred_flag = (pic_param->pic_fields.bits.weighted_pred_flag &&
979 slice_param->slice_type == HEVC_SLICE_P) ||
980 (pic_param->pic_fields.bits.weighted_bipred_flag &&
981 slice_param->slice_type == HEVC_SLICE_B);
983 for (i = 0; i < 16; i++) {
984 if (i < MIN(param.dw1.num_ref_idx_active_minus1 + 1, 15)) {
986 ref_pic = &slice_param->ref_pic_list0[i];
988 ref_pic = &slice_param->ref_pic_list1[i];
990 j = hevc_enc_map_pic_index(ref_pic->picture_id,
991 pic_param->reference_frames, 8);
993 param.ref_list_entry[i].ref_pic_tb_value = CLAMP(-128, 127,
994 cur_pic->pic_order_cnt - ref_pic->pic_order_cnt) & 0xff;
995 param.ref_list_entry[i].ref_pic_frame_id = j;
996 param.ref_list_entry[i].chroma_weight_flag = weighted_pred_flag;
997 param.ref_list_entry[i].luma_weight_flag = weighted_pred_flag;
998 param.ref_list_entry[i].long_term_ref_flag = !!(ref_pic->flags &
999 VA_PICTURE_HEVC_LONG_TERM_REFERENCE);
1004 gen10_hcp_ref_idx_state(ctx, batch, ¶m);
1008 gen10_hevc_enc_hcp_set_ref_idx_lists(VADriverContextP ctx,
1009 struct intel_batchbuffer *batch,
1010 VAEncPictureParameterBufferHEVC *pic_param,
1011 VAEncSliceParameterBufferHEVC *slice_param)
1013 gen10_hevc_enc_hcp_set_ref_idx_state(ctx, batch, pic_param, slice_param, 0);
1014 if (slice_param->slice_type == HEVC_SLICE_B)
1015 gen10_hevc_enc_hcp_set_ref_idx_state(ctx, batch, pic_param, slice_param, 1);
1019 gen10_hevc_enc_hcp_set_weight_offsets(VADriverContextP ctx,
1020 struct intel_batchbuffer *batch,
1021 VAEncPictureParameterBufferHEVC *pic_param,
1022 VAEncSliceParameterBufferHEVC *slice_param)
1024 gen10_hcp_weightoffset_state_param param;
1028 for (i = 0; i < 2; i++) {
1030 pic_param->pic_fields.bits.weighted_pred_flag &&
1031 slice_param->slice_type == HEVC_SLICE_P)
1034 pic_param->pic_fields.bits.weighted_bipred_flag &&
1035 slice_param->slice_type == HEVC_SLICE_B)
1041 memset(¶m, 0, sizeof(param));
1043 param.dw1.ref_pic_list_num = i;
1046 for (j = 0; j < 15; j++) {
1047 param.luma_offset[j].delta_luma_weight = slice_param->delta_luma_weight_l0[j];
1048 param.luma_offset[j].luma_offset = slice_param->luma_offset_l0[j];
1050 param.chroma_offset[j].delta_chroma_weight_0 = slice_param->delta_chroma_weight_l0[j][0];
1051 param.chroma_offset[j].chroma_offset_0 = slice_param->chroma_offset_l0[j][0];
1052 param.chroma_offset[j].delta_chroma_weight_1 = slice_param->delta_chroma_weight_l0[j][1];
1053 param.chroma_offset[j].chroma_offset_1 = slice_param->chroma_offset_l0[j][1];
1056 for (j = 0; j < 15; j++) {
1057 param.luma_offset[j].delta_luma_weight = slice_param->delta_luma_weight_l1[j];
1058 param.luma_offset[j].luma_offset = slice_param->luma_offset_l1[j];
1060 param.chroma_offset[j].delta_chroma_weight_0 = slice_param->delta_chroma_weight_l1[j][0];
1061 param.chroma_offset[j].chroma_offset_0 = slice_param->chroma_offset_l1[j][0];
1062 param.chroma_offset[j].delta_chroma_weight_1 = slice_param->delta_chroma_weight_l1[j][1];
1063 param.chroma_offset[j].chroma_offset_1 = slice_param->chroma_offset_l1[j][1];
1067 gen10_hcp_weightoffset_state(ctx, batch, ¶m);
1073 gen10_hevc_enc_ensure_surface(VADriverContextP ctx,
1074 struct object_surface *obj_surface,
1075 int bit_depth_minus8,
1076 int reallocate_flag)
1078 VAStatus va_status = VA_STATUS_SUCCESS;
1079 uint32_t fourcc = VA_FOURCC_NV12;
1083 va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1088 if (bit_depth_minus8 > 0) {
1089 if (obj_surface->fourcc != VA_FOURCC_P010) {
1091 fourcc = VA_FOURCC_P010;
1093 } else if (obj_surface->fourcc != VA_FOURCC_NV12) {
1095 fourcc = VA_FOURCC_NV12;
1098 if (!obj_surface->bo || update) {
1099 if (reallocate_flag) {
1100 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1102 i965_destroy_surface_storage(obj_surface);
1104 va_status = i965_check_alloc_surface_bo(ctx,
1106 i965->codec_info->has_tiled_surface,
1110 va_status = VA_STATUS_ERROR_INVALID_PARAMETER;
1118 hevc_get_max_mbps(uint32_t level_idc,
1120 uint64_t *max_bps_per_pic)
1122 switch (level_idc) {
1125 *max_bps_per_pic = 36864;
1129 *max_bps_per_pic = 122880;
1133 *max_bps_per_pic = 245760;
1136 *max_bps = 16588800;
1137 *max_bps_per_pic = 552760;
1140 *max_bps = 33177600;
1141 *max_bps_per_pic = 983040;
1144 *max_bps = 66846720;
1145 *max_bps_per_pic = 2228224;
1148 *max_bps = 133693440;
1149 *max_bps_per_pic = 2228224;
1152 *max_bps = 267386880;
1153 *max_bps_per_pic = 8912896;
1156 *max_bps = 534773760;
1157 *max_bps_per_pic = 8912896;
1160 *max_bps = 1069547520;
1161 *max_bps_per_pic = 8912896;
1164 *max_bps = 1069547520;
1165 *max_bps_per_pic = 35651584;
1168 *max_bps = 2139095040;
1169 *max_bps_per_pic = 35651584;
1172 *max_bps = 4278190080;
1173 *max_bps_per_pic = 35651584;
1176 *max_bps = 16588800;
1177 *max_bps_per_pic = 552760;
1183 gen10_hevc_enc_get_profile_level_max_frame(VAEncSequenceParameterBufferHEVC *seq_param,
1184 uint32_t user_max_frame_size,
1185 uint32_t frame_rate)
1187 int bit_depth_minus8 = seq_param->seq_fields.bits.bit_depth_luma_minus8;
1188 uint64_t max_byte_per_pic, max_byte_per_pic_not0;
1189 int level_idc = seq_param->general_level_idc;
1190 uint32_t profile_level_max_frame, max_mbps;
1191 double format_factor = 1.5;
1192 double min_cr_scale = 1.0;
1195 assert(seq_param->seq_fields.bits.chroma_format_idc == 1);
1197 if (level_idc == 186 || level_idc == 150)
1199 else if (level_idc > 150)
1201 else if (level_idc > 93)
1206 format_factor = bit_depth_minus8 == 2 ? 1.875 :
1207 bit_depth_minus8 == 4 ? 2.25 : 1.5;
1209 min_cr_scale *= min_cr;
1210 format_factor /= min_cr_scale;
1212 hevc_get_max_mbps(level_idc, &max_mbps, &max_byte_per_pic);
1214 max_byte_per_pic_not0 = (uint64_t)((((float_t)max_mbps * (float_t)100) / (float_t)frame_rate) * format_factor);
1216 if (user_max_frame_size) {
1217 profile_level_max_frame = (uint32_t)MIN(user_max_frame_size, max_byte_per_pic);
1218 profile_level_max_frame = (uint32_t)MIN(max_byte_per_pic_not0, profile_level_max_frame);
1220 profile_level_max_frame = (uint32_t)MIN(max_byte_per_pic_not0, max_byte_per_pic);
1222 return MIN(profile_level_max_frame,
1223 seq_param->pic_width_in_luma_samples * seq_param->pic_height_in_luma_samples);
1227 gen10_hevc_enc_get_max_num_slices(VAEncSequenceParameterBufferHEVC *seq_param)
1229 uint32_t max_num_slices = 0;
1231 switch (seq_param->general_level_idc) {
1233 max_num_slices = 16;
1236 max_num_slices = 16;
1239 max_num_slices = 20;
1242 max_num_slices = 30;
1245 max_num_slices = 40;
1248 max_num_slices = 75;
1251 max_num_slices = 75;
1254 max_num_slices = 200;
1257 max_num_slices = 200;
1260 max_num_slices = 200;
1263 max_num_slices = 600;
1266 max_num_slices = 600;
1269 max_num_slices = 600;
1276 return max_num_slices;
1280 uint32_t gen10_hevc_get_start_code_offset(unsigned char *ptr,
1285 while (count < size && *ptr != 0x01) {
1297 uint32_t gen10_hevc_get_emulation_num(unsigned char *ptr,
1300 uint32_t emulation_num = 0;
1301 uint32_t header_offset = 0;
1302 uint32_t zero_count = 0;
1305 header_offset = gen10_hevc_get_start_code_offset(ptr, size);
1306 ptr += header_offset;
1308 for (i = 0 ; i < (size - header_offset); i++, ptr++) {
1309 if (zero_count == 2 && !(*ptr & 0xFC)) {
1320 return emulation_num;
1323 #define HEVC_ENC_START_CODE_NAL_OFFSET (2)
1326 gen10_hevc_enc_get_pic_header_size(struct encode_state *encode_state)
1328 VAEncPackedHeaderParameterBuffer *param = NULL;
1329 uint32_t header_begin = 0;
1330 uint32_t accum_size = 0;
1331 unsigned char *header_data = NULL;
1332 uint32_t length_in_bytes = 0;
1333 int packed_type = 0;
1334 int idx = 0, count = 0, idx_offset = 0;
1335 int i = 0, slice_idx = 0, start_index = 0;
1337 for (i = 0; i < 4; i++) {
1341 packed_type = VAEncPackedHeaderHEVC_VPS;
1344 packed_type = VAEncPackedHeaderHEVC_VPS;
1348 packed_type = VAEncPackedHeaderHEVC_PPS;
1351 packed_type = VAEncPackedHeaderHEVC_SEI;
1357 idx = va_enc_packed_type_to_idx(packed_type) + idx_offset;
1358 if (encode_state->packed_header_data[idx]) {
1359 param = (VAEncPackedHeaderParameterBuffer *)encode_state->packed_header_param[idx]->buffer;
1360 header_data = (unsigned char *)encode_state->packed_header_data[idx]->buffer;
1361 length_in_bytes = (param->bit_length + 7) / 8;
1363 header_begin = gen10_hevc_get_start_code_offset(header_data, length_in_bytes) +
1364 HEVC_ENC_START_CODE_NAL_OFFSET;
1366 accum_size += length_in_bytes;
1367 if (!param->has_emulation_bytes)
1368 accum_size += gen10_hevc_get_emulation_num(header_data,
1373 for (slice_idx = 0; slice_idx < encode_state->num_slice_params_ext; slice_idx++) {
1374 count = encode_state->slice_rawdata_count[slice_idx];
1375 start_index = encode_state->slice_rawdata_index[slice_idx] &
1376 SLICE_PACKED_DATA_INDEX_MASK;
1378 if (start_index >= 5)
1381 for (i = 0; i < count; i++) {
1382 param = (VAEncPackedHeaderParameterBuffer *)
1383 (encode_state->packed_header_params_ext[start_index + i]->buffer);
1385 if (param->type == VAEncPackedHeaderSlice)
1388 header_data = (unsigned char *)encode_state->packed_header_data[start_index]->buffer;
1389 length_in_bytes = (param->bit_length + 7) / 8;
1391 accum_size += length_in_bytes;
1392 if (!param->has_emulation_bytes)
1393 accum_size += gen10_hevc_get_emulation_num(header_data,
1398 header_begin = MIN(header_begin, accum_size);
1400 return ((accum_size - header_begin) * 8);