#define SLICE_TYPE_B 1
#define SLICE_TYPE_I 2
+#define FRAME_IDR 7
+
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
static int qp_value = 26;
static int intra_period = 30;
-static int pb_period = 5;
static int frame_bit_rate = -1;
static int frame_rate = 30;
static int ip_period = 1;
#define MAX_SLICES 32
+
+static unsigned int MaxFrameNum = (1<<12);
+static unsigned int Log2MaxFrameNum = 12;
+static unsigned int Log2MaxPicOrderCntLsb = 8;
+
+static const struct option longopts[] = {
+ {"qp", required_argument, 0, 1},
+ {"fb", required_argument, 0, 2},
+ {"mode", required_argument, 0, 3},
+ { NULL, 0, NULL, 0}
+};
+
static int
build_packed_pic_buffer(unsigned char **header_buffer);
build_packed_seq_buffer(unsigned char **header_buffer);
static int
-build_packed_sei_buffer_timing(unsigned int init_cpb_removal_length,
- unsigned int init_cpb_removal_delay,
- unsigned int init_cpb_removal_delay_offset,
+build_packed_sei_pic_timing(unsigned int cpb_removal_length,
+ unsigned int dpb_output_length,
+ unsigned char **sei_buffer);
+
+static int
+build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
unsigned int cpb_removal_length,
- unsigned int cpb_removal_delay,
unsigned int dpb_output_length,
- unsigned int dpb_output_delay,
unsigned char **sei_buffer);
struct upload_thread_param
pthread_t upload_thread_id;
int upload_thread_value;
int i_initial_cpb_removal_delay;
+ int i_initial_cpb_removal_delay_offset;
int i_initial_cpb_removal_delay_length;
int i_cpb_removal_delay;
int i_cpb_removal_delay_length;
int i_dpb_output_delay_length;
+ int time_offset_length;
+
+ unsigned long long idr_frame_num;
+ unsigned long long prev_idr_cpb_removal;
+ unsigned long long current_idr_cpb_removal;
+ unsigned long long current_cpb_removal;
+ /* This is relative to the current_cpb_removal */
+ unsigned int current_dpb_removal_delta;
} avcenc_context;
static VAPictureH264 ReferenceFrames[16], RefPicList0[32], RefPicList1[32];
#define SID_REFERENCE_PICTURE_L1 3
#define SID_RECON_PICTURE 4
#define SID_NUMBER SID_RECON_PICTURE + 1
+
+#define SURFACE_NUM 16 /* 16 surfaces for reference */
+
static VASurfaceID surface_ids[SID_NUMBER];
+static VASurfaceID ref_surface[SURFACE_NUM];
+static int use_slot[SURFACE_NUM];
+
+static unsigned long long current_frame_display = 0;
+static unsigned long long current_IDR_display = 0;
+
+static VAPictureH264 CurrentCurrPic;
+
+#define current_slot (current_frame_display % SURFACE_NUM)
static int frame_number;
-static int enc_frame_number;
+static unsigned long long enc_frame_number;
+static int current_frame_type;
+static int current_frame_num;
+static unsigned int current_poc;
+static unsigned int num_ref_frames = 2;
+static unsigned int numShortTerm = 0;
/***************************************************/
+static int get_free_slot()
+{
+ int i, index = -1;
+
+ for (i = 0; i < SURFACE_NUM; i++) {
+ if (use_slot[i] == 0) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ printf("WARNING: No free slot to store the reconstructed frame \n");
+ index = SURFACE_NUM - 1;
+ }
+ return index;
+}
+
static void *
upload_thread_function(void *data)
{
// Create surface
va_status = vaCreateSurfaces(
- va_dpy,
- VA_RT_FORMAT_YUV420, picture_width, picture_height,
- &surface_ids[0], SID_NUMBER,
- NULL, 0
- );
+ va_dpy,
+ VA_RT_FORMAT_YUV420, picture_width, picture_height,
+ surface_ids, SID_NUMBER,
+ NULL, 0
+ );
+
+ CHECK_VASTATUS(va_status, "vaCreateSurfaces");
+
+ // Create surface
+ va_status = vaCreateSurfaces(
+ va_dpy,
+ VA_RT_FORMAT_YUV420, picture_width, picture_height,
+ ref_surface, SURFACE_NUM,
+ NULL, 0
+ );
+
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
+
newImageBuffer = (unsigned char *)malloc(frame_size);
/* firstly upload YUV data to SID_INPUT_PICTURE_1 */
free(newImageBuffer);
// Release all the surfaces resource
- vaDestroySurfaces(va_dpy, &surface_ids[0], SID_NUMBER);
+ vaDestroySurfaces(va_dpy, surface_ids, SID_NUMBER);
+ // Release all the reference surfaces
+ vaDestroySurfaces(va_dpy, ref_surface, SURFACE_NUM);
}
-static void avcenc_update_sei_param(int frame_num)
+static void avcenc_update_sei_param(int is_idr)
{
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
- unsigned int length_in_bits, offset_in_bytes;
+ unsigned int length_in_bits;
unsigned char *packed_sei_buffer = NULL;
VAStatus va_status;
- length_in_bits = build_packed_sei_buffer_timing(
+ if (is_idr)
+ length_in_bits = build_packed_idr_sei_buffer_timing(
avcenc_context.i_initial_cpb_removal_delay_length,
- avcenc_context.i_initial_cpb_removal_delay,
- 0,
avcenc_context.i_cpb_removal_delay_length,
- avcenc_context.i_cpb_removal_delay * frame_num,
avcenc_context.i_dpb_output_delay_length,
- 0,
+ &packed_sei_buffer);
+ else
+ length_in_bits = build_packed_sei_pic_timing(
+ avcenc_context.i_cpb_removal_delay_length,
+ avcenc_context.i_dpb_output_delay_length,
&packed_sei_buffer);
- offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderH264_SEI;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
return;
}
-static void avcenc_update_picture_parameter(int slice_type, int frame_num, int display_num, int is_idr)
+#define partition(ref, field, key, ascending) \
+ while (i <= j) { \
+ if (ascending) { \
+ while (ref[i].field < key) \
+ i++; \
+ while (ref[j].field > key) \
+ j--; \
+ } else { \
+ while (ref[i].field > key) \
+ i++; \
+ while (ref[j].field < key) \
+ j--; \
+ } \
+ if (i <= j) { \
+ tmp = ref[i]; \
+ ref[i] = ref[j]; \
+ ref[j] = tmp; \
+ i++; \
+ j--; \
+ } \
+ } \
+
+static void sort_one(VAPictureH264 ref[], int left, int right,
+ int ascending, int frame_idx)
+{
+ int i = left, j = right;
+ unsigned int key;
+ VAPictureH264 tmp;
+
+ if (frame_idx) {
+ key = ref[(left + right) / 2].frame_idx;
+ partition(ref, frame_idx, key, ascending);
+ } else {
+ key = ref[(left + right) / 2].TopFieldOrderCnt;
+ partition(ref, TopFieldOrderCnt, (signed int)key, ascending);
+ }
+
+ /* recursion */
+ if (left < j)
+ sort_one(ref, left, j, ascending, frame_idx);
+
+ if (i < right)
+ sort_one(ref, i, right, ascending, frame_idx);
+}
+
+static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, unsigned int frame_idx,
+ int partition_ascending, int list0_ascending, int list1_ascending)
+{
+ int i = left, j = right;
+ VAPictureH264 tmp;
+
+ if (frame_idx) {
+ partition(ref, frame_idx, key, partition_ascending);
+ } else {
+ partition(ref, TopFieldOrderCnt, (signed int)key, partition_ascending);
+ }
+
+ sort_one(ref, left, i-1, list0_ascending, frame_idx);
+ sort_one(ref, j+1, right, list1_ascending, frame_idx);
+}
+
+static int update_RefPicList()
+{
+
+ if (current_frame_type == SLICE_TYPE_P) {
+ memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_one(RefPicList0, 0, numShortTerm-1, 0, 1);
+ }
+
+ if (current_frame_type == SLICE_TYPE_B) {
+ memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_two(RefPicList0, 0, numShortTerm-1, current_poc, 0,
+ 1, 0, 1);
+
+ memcpy(RefPicList1, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_two(RefPicList1, 0, numShortTerm-1, current_poc, 0,
+ 0, 1, 0);
+ }
+
+ return 0;
+}
+
+static void avcenc_update_picture_parameter(int slice_type, int is_idr)
{
VAEncPictureParameterBufferH264 *pic_param;
VAStatus va_status;
+ int recon_index;
+ recon_index = get_free_slot();
// Picture level
pic_param = &avcenc_context.pic_param;
- pic_param->CurrPic.picture_id = surface_ids[SID_RECON_PICTURE];
- pic_param->CurrPic.TopFieldOrderCnt = display_num * 2;
- pic_param->ReferenceFrames[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L0];
- pic_param->ReferenceFrames[1].picture_id = surface_ids[SID_REFERENCE_PICTURE_L1];
- pic_param->ReferenceFrames[2].picture_id = VA_INVALID_ID;
+
+ pic_param->CurrPic.picture_id = ref_surface[recon_index];
+ pic_param->CurrPic.frame_idx = current_frame_num;
+ pic_param->CurrPic.flags = 0;
+
+ pic_param->CurrPic.TopFieldOrderCnt = current_poc;
+ pic_param->CurrPic.BottomFieldOrderCnt = pic_param->CurrPic.TopFieldOrderCnt;
+
assert(avcenc_context.codedbuf_buf_id != VA_INVALID_ID);
pic_param->coded_buf = avcenc_context.codedbuf_buf_id;
- pic_param->frame_num = frame_num;
+ pic_param->frame_num = current_frame_num;
pic_param->pic_fields.bits.idr_pic_flag = !!is_idr;
pic_param->pic_fields.bits.reference_pic_flag = (slice_type != SLICE_TYPE_B);
+ CurrentCurrPic = pic_param->CurrPic;
+
+ if (slice_type == SLICE_TYPE_P || slice_type == SLICE_TYPE_B)
+ memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
+
+ if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
+ pic_param->ReferenceFrames[0] = RefPicList0[0];
+ }
+ if (slice_type == SLICE_TYPE_B) {
+ pic_param->ReferenceFrames[1] = RefPicList1[0];
+ }
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
sizeof(*pic_param), 1, pic_param,
&avcenc_context.pic_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
+
}
#ifndef VA_FOURCC_I420
/* FIXME: fill other fields */
if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
- int j;
- slice_param->RefPicList0[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L0];
- for (j = 1; j < 32; j++) {
- slice_param->RefPicList0[j].picture_id = VA_INVALID_SURFACE;
- slice_param->RefPicList0[j].flags = VA_PICTURE_H264_INVALID;
- }
+ memset(slice_param->RefPicList0, 0xFF, 32 * sizeof(VAPictureH264));
+ slice_param->RefPicList0[0] = RefPicList0[0];
}
if ((slice_type == SLICE_TYPE_B)) {
- int j;
- slice_param->RefPicList1[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L1];
- for (j = 1; j < 32; j++) {
- slice_param->RefPicList1[j].picture_id = VA_INVALID_SURFACE;
- slice_param->RefPicList1[j].flags = VA_PICTURE_H264_INVALID;
- }
+ memset(slice_param->RefPicList1, 0xFF, 32 * sizeof(VAPictureH264));
+ slice_param->RefPicList1[0] = RefPicList1[0];
}
va_status = vaCreateBuffer(va_dpy,
avcenc_context.num_slices = i;
}
+static int update_ReferenceFrames(void)
+{
+ int i;
+ /* B-frame is not used for reference */
+ if (current_frame_type == SLICE_TYPE_B)
+ return 0;
+
+ CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ numShortTerm++;
+ if (numShortTerm > num_ref_frames)
+ numShortTerm = num_ref_frames;
+ for (i=numShortTerm-1; i>0; i--)
+ ReferenceFrames[i] = ReferenceFrames[i-1];
+ ReferenceFrames[0] = CurrentCurrPic;
+
+ if (current_frame_type != SLICE_TYPE_B)
+ current_frame_num++;
+ if (current_frame_num > MaxFrameNum)
+ current_frame_num = 0;
+
+ /* Update the use_slot. Only when the surface is used in reference
+ * frame list, the use_slot[index] is set
+ */
+ for (i = 0; i < SURFACE_NUM; i++) {
+ int j;
+ bool found;
+
+ found = false;
+ for (j = 0; j < numShortTerm; j++) {
+ if (ref_surface[i] == ReferenceFrames[j].picture_id) {
+ found = true;
+ break;
+ }
+ }
+ if (found)
+ use_slot[i] = 1;
+ else
+ use_slot[i] = 0;
+ }
+
+ return 0;
+}
+
static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice_type, int is_idr)
{
VAStatus va_status;
fprintf(stderr, "FATAL error!!!\n");
exit(1);
}
-
+
pthread_join(avcenc_context.upload_thread_id, NULL);
avcenc_context.upload_thread_value = -1;
else
avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
- if (frame_num == 0) {
+ if (is_idr) {
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
- unsigned int length_in_bits, offset_in_bytes;
+ unsigned int length_in_bits;
unsigned char *packed_seq_buffer = NULL, *packed_pic_buffer = NULL;
assert(slice_type == SLICE_TYPE_I);
length_in_bits = build_packed_seq_buffer(&packed_seq_buffer);
- offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderSequence;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
CHECK_VASTATUS(va_status,"vaCreateBuffer");
length_in_bits = build_packed_pic_buffer(&packed_pic_buffer);
- offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderPicture;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
vaUnmapBuffer(va_dpy, avcenc_context.misc_parameter_hrd_buf_id);
- /* slice parameter */
- avcenc_update_slice_parameter(slice_type);
-
return 0;
}
return 0;
}
-static void end_picture(int slice_type, int next_is_bpic)
+static void end_picture()
{
- VABufferID tempID;
-
- /* Prepare for next picture */
- tempID = surface_ids[SID_RECON_PICTURE];
-
- if (slice_type != SLICE_TYPE_B) {
- if (next_is_bpic) {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L1];
- surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
- } else {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
- surface_ids[SID_REFERENCE_PICTURE_L0] = tempID;
- }
- } else {
- if (!next_is_bpic) {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
- surface_ids[SID_REFERENCE_PICTURE_L0] = surface_ids[SID_REFERENCE_PICTURE_L1];
- surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
- }
- }
+ update_ReferenceFrames();
avcenc_destroy_buffers(&avcenc_context.seq_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_seq_header_param_buf_id, 1);
bitstream_put_ue(bs, ((frame_bit_rate * 8000) >> 6) - 1); /* cpb_size_value_minus1[0] */
bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
- bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
- bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
- bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
- bitstream_put_ui(bs, 23, 5); /* time_offset_length */
+ /* initial_cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs,
+ (avcenc_context.i_initial_cpb_removal_delay_length - 1), 5);
+ /* cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs,
+ (avcenc_context.i_cpb_removal_delay_length - 1), 5);
+ /* dpb_output_delay_length_minus1 */
+ bitstream_put_ui(bs,
+ (avcenc_context.i_dpb_output_delay_length - 1), 5);
+ /* time_offset_length */
+ bitstream_put_ui(bs,
+ (avcenc_context.time_offset_length - 1), 5);
}
bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
}
static int
-build_packed_sei_buffer_timing(unsigned int init_cpb_removal_length,
- unsigned int init_cpb_removal_delay,
- unsigned int init_cpb_removal_delay_offset,
+build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
unsigned int cpb_removal_length,
- unsigned int cpb_removal_delay,
unsigned int dpb_output_length,
- unsigned int dpb_output_delay,
unsigned char **sei_buffer)
{
unsigned char *byte_buf;
int bp_byte_size, i, pic_byte_size;
+ unsigned int cpb_removal_delay;
bitstream nal_bs;
bitstream sei_bp_bs, sei_pic_bs;
bitstream_start(&sei_bp_bs);
bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/
- bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay, cpb_removal_length);
- bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay_offset, cpb_removal_length);
+ /* SEI buffer period info */
+ /* NALHrdBpPresentFlag == 1 */
+ bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay,
+ init_cpb_removal_delay_length);
+ bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay_offset,
+ init_cpb_removal_delay_length);
if ( sei_bp_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_bp_bs, 1, 1);
}
bitstream_end(&sei_bp_bs);
bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
+ /* SEI pic timing info */
bitstream_start(&sei_pic_bs);
+ /* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
+ * which is derived as 1 if one of the following conditions is true:
+ * nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
+ * vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
+ */
+ cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.prev_idr_cpb_removal);
bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
- bitstream_put_ui(&sei_pic_bs, dpb_output_delay, dpb_output_length);
+ bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
+ dpb_output_length);
if ( sei_pic_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_pic_bs, 1, 1);
}
+ /* The pic_structure_present_flag determines whether the pic_structure
+ * info is written into the SEI pic timing info.
+ * Currently it is set to zero.
+ */
bitstream_end(&sei_pic_bs);
pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
- /* write the SEI timing data */
+ /* write the SEI pic timing data */
bitstream_put_ui(&nal_bs, 0x01, 8);
bitstream_put_ui(&nal_bs, pic_byte_size, 8);
return nal_bs.bit_offset;
}
+static int
+build_packed_sei_pic_timing(unsigned int cpb_removal_length,
+ unsigned int dpb_output_length,
+ unsigned char **sei_buffer)
+{
+ unsigned char *byte_buf;
+ int i, pic_byte_size;
+ unsigned int cpb_removal_delay;
+
+ bitstream nal_bs;
+ bitstream sei_pic_bs;
+
+ bitstream_start(&sei_pic_bs);
+ /* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
+ * which is derived as 1 if one of the following conditions is true:
+ * nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
+ * vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
+ */
+ cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.current_idr_cpb_removal);
+ bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
+ bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
+ dpb_output_length);
+ if ( sei_pic_bs.bit_offset & 0x7) {
+ bitstream_put_ui(&sei_pic_bs, 1, 1);
+ }
+
+ /* The pic_structure_present_flag determines whether the pic_structure
+ * info is written into the SEI pic timing info.
+ * Currently it is set to zero.
+ */
+ bitstream_end(&sei_pic_bs);
+ pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
+
+ bitstream_start(&nal_bs);
+ nal_start_code_prefix(&nal_bs);
+ nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
+
+ /* write the SEI Pic timing data */
+ bitstream_put_ui(&nal_bs, 0x01, 8);
+ bitstream_put_ui(&nal_bs, pic_byte_size, 8);
+
+ byte_buf = (unsigned char *)sei_pic_bs.buffer;
+ for(i = 0; i < pic_byte_size; i++) {
+ bitstream_put_ui(&nal_bs, byte_buf[i], 8);
+ }
+ free(byte_buf);
+
+ rbsp_trailing_bits(&nal_bs);
+ bitstream_end(&nal_bs);
+
+ *sei_buffer = (unsigned char *)nal_bs.buffer;
+
+ return nal_bs.bit_offset;
+}
+
#if 0
static void
slice_header(bitstream *bs, int frame_num, int display_frame, int slice_type, int nal_ref_idc, int is_idr)
return 0;
}
+/*
+ * It is from the h264encode.c but it simplifies something.
+ * For example: When one frame is encoded as I-frame under the scenario with
+ * P-B frames, it will be regarded as IDR frame(key-frame) and then new GOP is
+ * started. If the video clip is encoded as all I-frames, the first frame
+ * is regarded as IDR and the remaining is regarded as I-frame.
+ *
+ */
+
+static void encoding2display_order(
+ unsigned long long encoding_order,int gop_size,
+ int ip_period,
+ unsigned long long *displaying_order,
+ int *frame_type)
+{
+ int encoding_order_gop = 0;
+
+ /* When ip_period is 0, all are I/IDR frames */
+ if (ip_period == 0) { /* all are I/IDR frames */
+ if (encoding_order == 0)
+ *frame_type = FRAME_IDR;
+ else
+ *frame_type = SLICE_TYPE_I;
+
+ *displaying_order = encoding_order;
+ return;
+ }
+
+ /* new sequence like
+ * IDR PPPPP IDRPPPPP
+ * IDR (PBB)(PBB)(PBB)(PBB) IDR (PBB)(PBB)(PBB)(PBB)
+ */
+ encoding_order_gop = encoding_order % gop_size;
+
+ if (encoding_order_gop == 0) { /* the first frame */
+ *frame_type = FRAME_IDR;
+ *displaying_order = encoding_order;
+ } else {
+ int gop_delta;
+
+ gop_delta = 1;
+
+ if ((ip_period != 1) && ((gop_size - 1) % ip_period)) {
+ int ipb_size;
+ ipb_size = (gop_size - 1) / ip_period * ip_period + 1;
+ if (encoding_order_gop >= ipb_size) {
+ gop_delta = ipb_size;
+ ip_period = gop_size - ipb_size;
+ }
+ }
+
+ if (((encoding_order_gop - gop_delta) % ip_period) == 0) { /* P frames */
+ *frame_type = SLICE_TYPE_P;
+ *displaying_order = encoding_order + ip_period - 1;
+ } else {
+ *frame_type = SLICE_TYPE_B;
+ *displaying_order = encoding_order - 1;
+ }
+ }
+}
+
+
static void
encode_picture(FILE *yuv_fp, FILE *avc_fp,
int frame_num, int display_num,
index = SID_INPUT_PICTURE_0;
if ( next_display_num >= frame_number )
next_display_num = frame_number - 1;
- fseek(yuv_fp, frame_size * next_display_num, SEEK_SET);
+ fseeko(yuv_fp, (off_t)frame_size * next_display_num, SEEK_SET);
avcenc_context.upload_thread_param.yuv_fp = yuv_fp;
avcenc_context.upload_thread_param.surface_id = surface_ids[index];
&avcenc_context.codedbuf_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
+ /* Update the RefPicList */
+ update_RefPicList();
+
/* picture parameter set */
- avcenc_update_picture_parameter(slice_type, frame_num, display_num, is_idr);
+ avcenc_update_picture_parameter(slice_type, is_idr);
+
+ /* slice parameter */
+ avcenc_update_slice_parameter(slice_type);
if (avcenc_context.rate_control_method == VA_RC_CBR)
- avcenc_update_sei_param(frame_num);
+ avcenc_update_sei_param(is_idr);
avcenc_render_picture();
end_picture(slice_type, next_is_bpic);
}
-static void encode_pb_pictures(FILE *yuv_fp, FILE *avc_fp, int f, int nbframes, int next_f)
-{
- int i;
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number, f + nbframes,
- 0,
- SLICE_TYPE_P, 1, f);
-
- for( i = 0; i < nbframes - 1; i++) {
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number + 1, f + i,
- 0,
- SLICE_TYPE_B, 1, f + i + 1);
- }
-
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number + 1, f + nbframes - 1,
- 0,
- SLICE_TYPE_B, 0, next_f);
-}
-
static void show_help()
{
- printf("Usage: avnenc <width> <height> <input_yuvfile> <output_avcfile> [qp=qpvalue|fb=framebitrate] [mode=0(I frames only)/1(I and P frames)/2(I, P and B frames)\n");
+ printf("Usage: avnenc <width> <height> <input_yuvfile> <output_avcfile> [--qp=qpvalue|--fb=framebitrate] [--mode=0(I frames only)/1(I and P frames)/2(I, P and B frames)\n");
}
static void avcenc_context_seq_param_init(VAEncSequenceParameterBufferH264 *seq_param,
seq_param->seq_parameter_set_id = 0;
seq_param->level_idc = 41;
seq_param->intra_period = intra_period;
+ seq_param->intra_idr_period = seq_param->intra_period;
seq_param->ip_period = ip_period;
seq_param->max_num_ref_frames = 4;
seq_param->picture_width_in_mbs = width_in_mbs;
seq_param->seq_fields.bits.pic_order_cnt_type = 0;
seq_param->seq_fields.bits.direct_8x8_inference_flag = 0;
- seq_param->seq_fields.bits.log2_max_frame_num_minus4 = 0;
- seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2;
+ seq_param->seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;
+ seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
if (frame_bit_rate > 0)
seq_param->vui_parameters_present_flag = 1; //HRD info located in vui
pic_param->pic_fields.bits.transform_8x8_mode_flag = 1;
pic_param->pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+ memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
}
static void avcenc_context_sei_init()
{
- int init_cpb_size;
- int target_bit_rate;
-
/* it comes for the bps defined in SPS */
- target_bit_rate = avcenc_context.seq_param.bits_per_second;
- init_cpb_size = (target_bit_rate * 8) >> 10;
- avcenc_context.i_initial_cpb_removal_delay = init_cpb_size * 0.5 * 1024 / target_bit_rate * 90000;
+ avcenc_context.i_initial_cpb_removal_delay = 2 * 90000;
+ avcenc_context.i_initial_cpb_removal_delay_offset = 2 * 90000;
avcenc_context.i_cpb_removal_delay = 2;
avcenc_context.i_initial_cpb_removal_delay_length = 24;
avcenc_context.i_cpb_removal_delay_length = 24;
avcenc_context.i_dpb_output_delay_length = 24;
+ avcenc_context.time_offset_length = 24;
+
+ avcenc_context.prev_idr_cpb_removal = avcenc_context.i_initial_cpb_removal_delay / 90000;
+ avcenc_context.current_idr_cpb_removal = avcenc_context.prev_idr_cpb_removal;
+ avcenc_context.current_cpb_removal = 0;
+ avcenc_context.idr_frame_num = 0;
}
static void avcenc_context_init(int width, int height)
memset(&avcenc_context, 0, sizeof(avcenc_context));
avcenc_context.profile = VAProfileH264Main;
+ memset(&use_slot, 0, sizeof(use_slot));
switch (avcenc_context.profile) {
case VAProfileH264Baseline:
avcenc_context.constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
int f;
FILE *yuv_fp;
FILE *avc_fp;
- long file_size;
- int i_frame_only=0,i_p_frame_only=1;
+ off_t file_size;
int mode_value;
struct timeval tpstart,tpend;
float timeuse;
va_init_display_args(&argc, argv);
- //TODO may be we should using option analytics library
- if(argc != 5 && argc != 6 && argc != 7) {
+ if(argc < 5) {
show_help();
return -1;
}
picture_width_in_mbs = (picture_width + 15) / 16;
picture_height_in_mbs = (picture_height + 15) / 16;
- if (argc == 6 || argc == 7) {
- qp_value = -1;
- sscanf(argv[5], "qp=%d", &qp_value);
- if ( qp_value == -1 ) {
- frame_bit_rate = -1;
- sscanf(argv[5], "fb=%d", &frame_bit_rate);
- if ( frame_bit_rate == -1 ) {
+ if (argc > 5) {
+ char o;
+
+ optind = 5;
+
+ while ((o = getopt_long_only(argc, argv, "", longopts, NULL)) != -1) {
+ switch (o) {
+ case 1: // qp
+ frame_bit_rate = -1;
+ qp_value = atoi(optarg);
+
+ if (qp_value > 51)
+ qp_value = 51;
+
+ if (qp_value < 0)
+ qp_value = 0;
+
+ break;
+
+ case 2: // fb
+ qp_value = -1;
+ frame_bit_rate = atoi(optarg);
+
+ if (frame_bit_rate <= 0) {
+ show_help();
+
+ return -1;
+ }
+
+ break;
+
+ case 3: // mode
+ mode_value = atoi(optarg);
+
+ if (mode_value == 0)
+ ip_period = 0;
+ else if (mode_value == 1)
+ ip_period = 1;
+ else if (mode_value == 2)
+ /* Hack mechanism before adding the parameter of B-frame number */
+ ip_period = 2;
+ else {
+ printf("mode_value = %d\n", mode_value);
+ show_help();
+ return -1;
+ }
+
+ break;
+
+ default:
show_help();
return -1;
}
- } else if (qp_value > 51) {
- qp_value = 51;
- } else if (qp_value < 0) {
- qp_value = 0;
}
} else
qp_value = 28; //default const QP mode
- if (argc == 7) {
- sscanf(argv[6], "mode=%d", &mode_value);
- if ( mode_value == 0 ) {
- i_frame_only = 1;
- i_p_frame_only = 0;
- ip_period = 0;
- }
- else if ( mode_value == 1) {
- i_frame_only = 0;
- i_p_frame_only = 1;
- ip_period = 1;
- }
- else if ( mode_value == 2 ) {
- i_frame_only = 0;
- i_p_frame_only = 0;
- /* Hack mechanism before adding the parameter of B-frame number */
- ip_period = 3;
- }
- else {
- printf("mode_value=%d\n",mode_value);
- show_help();
- return -1;
- }
- }
-
yuv_fp = fopen(argv[3],"rb");
if ( yuv_fp == NULL){
printf("Can't open input YUV file\n");
return -1;
}
- fseek(yuv_fp,0l, SEEK_END);
- file_size = ftell(yuv_fp);
+ fseeko(yuv_fp, (off_t)0, SEEK_END);
+ file_size = ftello(yuv_fp);
frame_size = picture_width * picture_height + ((picture_width * picture_height) >> 1) ;
if ( (file_size < frame_size) || (file_size % frame_size) ) {
return -1;
}
frame_number = file_size / frame_size;
- fseek(yuv_fp, 0l, SEEK_SET);
+ fseeko(yuv_fp, (off_t)0, SEEK_SET);
avc_fp = fopen(argv[4], "wb");
if ( avc_fp == NULL) {
alloc_encode_resource(yuv_fp);
enc_frame_number = 0;
- for ( f = 0; f < frame_number; ) { //picture level loop
- static int const frame_type_pattern[][2] = { {SLICE_TYPE_I,1},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,2} };
-
- if ( i_frame_only ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0, f+1);
- f++;
- enc_frame_number++;
- } else if ( i_p_frame_only ) {
- if ( (f % intra_period) == 0 ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0, f+1);
- f++;
- enc_frame_number++;
- } else {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_P, 0, f+1);
- f++;
- enc_frame_number++;
+ for ( f = 0; f < frame_number; f++) { //picture level loop
+ unsigned long long next_frame_display;
+ int next_frame_type;
+
+ enc_frame_number = f;
+
+ encoding2display_order(enc_frame_number, intra_period, ip_period,
+ ¤t_frame_display, ¤t_frame_type);
+
+ encoding2display_order(enc_frame_number + 1, intra_period, ip_period,
+ &next_frame_display, &next_frame_type);
+
+ if (current_frame_type == FRAME_IDR) {
+ numShortTerm = 0;
+ current_frame_num = 0;
+ memset(&use_slot, 0, sizeof(use_slot));
+ current_IDR_display = current_frame_display;
+ if (avcenc_context.rate_control_method == VA_RC_CBR) {
+ unsigned long long frame_interval;
+
+ frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
+
+ /* Based on the H264 spec the removal time of the IDR access
+ * unit is derived as the following:
+ * the removal time of previous IDR unit + Tc * cpb_removal_delay(n)
+ */
+ avcenc_context.current_cpb_removal = avcenc_context.prev_idr_cpb_removal +
+ frame_interval * 2;
+ avcenc_context.idr_frame_num = enc_frame_number;
+ avcenc_context.current_idr_cpb_removal = avcenc_context.current_cpb_removal;
+ if (ip_period)
+ avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
+ else
+ avcenc_context.current_dpb_removal_delta = 2;
}
- } else { // follow the i,p,b pattern
- static int fcurrent = 0;
- int fnext;
-
- fcurrent = fcurrent % (sizeof(frame_type_pattern)/sizeof(int[2]));
- fnext = (fcurrent+1) % (sizeof(frame_type_pattern)/sizeof(int[2]));
-
- if ( frame_type_pattern[fcurrent][0] == SLICE_TYPE_I ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0,
- f+frame_type_pattern[fnext][1]);
- f++;
- enc_frame_number++;
- } else {
- encode_pb_pictures(yuv_fp, avc_fp, f, frame_type_pattern[fcurrent][1]-1,
- f + frame_type_pattern[fcurrent][1] + frame_type_pattern[fnext][1] -1 );
- f += frame_type_pattern[fcurrent][1];
- enc_frame_number++;
+ } else {
+ if (avcenc_context.rate_control_method == VA_RC_CBR) {
+ unsigned long long frame_interval;
+
+ frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
+
+ /* Based on the H264 spec the removal time of the non-IDR access
+ * unit is derived as the following:
+ * the removal time of current IDR unit + Tc * cpb_removal_delay(n)
+ */
+ avcenc_context.current_cpb_removal = avcenc_context.current_idr_cpb_removal +
+ frame_interval * 2;
+ if (current_frame_type == SLICE_TYPE_I ||
+ current_frame_type == SLICE_TYPE_P) {
+ if (ip_period)
+ avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
+ else
+ avcenc_context.current_dpb_removal_delta = 2;
+ } else
+ avcenc_context.current_dpb_removal_delta = 2;
}
-
- fcurrent++;
}
- printf("\r %d/%d ...", f+1, frame_number);
+
+ /* use the simple mechanism to calc the POC */
+ current_poc = (current_frame_display - current_IDR_display) * 2;
+
+ encode_picture(yuv_fp, avc_fp, frame_number, current_frame_display,
+ (current_frame_type == FRAME_IDR) ? 1 : 0,
+ (current_frame_type == FRAME_IDR) ? SLICE_TYPE_I : current_frame_type,
+ (next_frame_type == SLICE_TYPE_B) ? 1 : 0,
+ next_frame_display);
+ if ((current_frame_type == FRAME_IDR) &&
+ (avcenc_context.rate_control_method == VA_RC_CBR)) {
+ /* after one IDR frame is written, it needs to update the
+ * prev_idr_cpb_removal for next IDR
+ */
+ avcenc_context.prev_idr_cpb_removal = avcenc_context.current_idr_cpb_removal;
+ }
+ printf("\r %d/%d ...", f, frame_number);
fflush(stdout);
}