OSDN Git Service

Refine VA_FOOL, and delete the hard coded clip va_fool_264.h
[android-x86/hardware-intel-common-libva.git] / test / encode / h264encode.c
1 /*
2  * Copyright (c) 2007-2008 Intel Corporation. All Rights Reserved.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  * 
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  * 
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  */
24
25 /*
26  * it is a real program to show how VAAPI encoding work,
27  * It does H264 element stream level encoding on auto-generated YUV data
28  *
29  * gcc -o  h264encode  h264encode -lva -lva-x11
30  * ./h264encode -w <width> -h <height> -n <frame_num>
31  *
32  */  
33 #include <stdio.h>
34 #include <string.h>
35 #include <stdlib.h>
36 #include <getopt.h>
37 #include <X11/Xlib.h>
38
39 #include <unistd.h>
40
41 #include <sys/types.h>
42 #include <sys/stat.h>
43 #include <fcntl.h>
44
45 #include <assert.h>
46
47 #include <va/va.h>
48 #include <va/va_x11.h>
49
50 #define CHECK_VASTATUS(va_status,func)                                  \
51 if (va_status != VA_STATUS_SUCCESS) {                                   \
52     fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \
53     exit(1);                                                            \
54 }
55
56 #include "loadsurface.h"
57
58 #define SURFACE_NUM 18 /* 16 surfaces for src, 2 surface for reconstructed/reference */
59
60 static  Display *x11_display;
61 static  VADisplay va_dpy;
62 static  VAContextID context_id;
63 static  VASurfaceID surface_id[SURFACE_NUM];
64 static  Window display_win = 0;
65 static  int win_width;
66 static  int win_height;
67
68 static  int coded_fd;
69 static  char coded_file[256];
70
71 #define CODEDBUF_NUM 5
72 static  VABufferID coded_buf[CODEDBUF_NUM];
73
74 static  int frame_display = 0; /* display the frame during encoding */
75 static  int frame_width=352, frame_height=288;
76 static  int frame_rate = 30;
77 static  int frame_count = 400;
78 static  int intra_count = 30;
79 static  int frame_bitrate = 8000000; /* 8M */
80 static  int initial_qp = 15;
81 static  int minimal_qp = 0;
82
83 static int upload_source_YUV_once_for_all()
84 {
85     VAImage surface_image;
86     void *surface_p=NULL, *U_start,*V_start;
87     VAStatus va_status;
88     int box_width=8;
89     int row_shift=0;
90     int i;
91     
92     for (i=0; i<SURFACE_NUM-2; i++) {
93         printf("\rLoading data into surface %d.....", i);
94         upload_surface(va_dpy, surface_id[i], box_width, row_shift, 0);
95         
96         row_shift++;
97         if (row_shift==(2*box_width)) row_shift= 0;
98     }
99     printf("\n", i);
100
101     return 0;
102 }
103
104
105 static int save_coded_buf(VABufferID coded_buf, int current_frame, int frame_skipped)
106 {    
107     void *coded_p=NULL;
108     VACodedBufferSegment *buf_list = NULL;
109     VAStatus va_status;
110     unsigned int coded_size = 0;
111     
112     va_status = vaMapBuffer(va_dpy,coded_buf,(void **)(&buf_list));
113     CHECK_VASTATUS(va_status,"vaMapBuffer");
114     while (buf_list != NULL) {
115         printf("Write %d bytes", buf_list->size);
116         coded_size += write(coded_fd, buf_list->buf, buf_list->size);
117         buf_list = buf_list->next;
118     }
119     vaUnmapBuffer(va_dpy,coded_buf);
120
121     printf("\r      "); /* return back to startpoint */
122     switch (current_frame % 4) {
123         case 0:
124             printf("|");
125             break;
126         case 1:
127             printf("/");
128             break;
129         case 2:
130             printf("-");
131             break;
132         case 3:
133             printf("\\");
134             break;
135     }
136     printf("%08d", current_frame);
137     if (current_frame % intra_count == 0)
138         printf("(I)");
139     else
140         printf("(P)");
141     
142     printf("(%06d bytes coded)",coded_size);
143     if (frame_skipped)
144         printf("(SKipped)");
145     printf("                                    ");
146
147     return;
148 }
149
150
151 static int display_surface(int frame_id, int *exit_encode)
152 {
153     Window win = display_win;
154     XEvent event;
155     VAStatus va_status;
156     
157     if (win == 0) { /* display reconstructed surface */
158         win_width = frame_width;
159         win_height = frame_height;
160         
161         win = XCreateSimpleWindow(x11_display, RootWindow(x11_display, 0), 0, 0,
162                                   frame_width, frame_height, 0, 0, WhitePixel(x11_display, 0));
163         XMapWindow(x11_display, win);
164         XSync(x11_display, False);
165
166         display_win = win;
167     }
168
169     va_status = vaPutSurface(va_dpy, surface_id[frame_id], win,
170                              0,0, frame_width, frame_height,
171                              0,0, win_width, win_height,
172                              NULL,0,0);
173
174     *exit_encode = 0;
175     while(XPending(x11_display)) {
176         XNextEvent(x11_display, &event);
177             
178         /* bail on any focused key press */
179         if(event.type == KeyPress) {  
180             *exit_encode = 1;
181             break;
182         }
183             
184         /* rescale the video to fit the window */
185         if(event.type == ConfigureNotify) { 
186             win_width = event.xconfigure.width;
187             win_height = event.xconfigure.height;
188         }       
189     }   
190
191     return;
192 }
193
194 enum {
195     SH_LEVEL_1=10,
196     SH_LEVEL_1B=11,
197     SH_LEVEL_2=20,
198     SH_LEVEL_3=30,
199     SH_LEVEL_31=31,
200     SH_LEVEL_32=32,
201     SH_LEVEL_4=40,
202     SH_LEVEL_5=50
203 };
204
205 static int do_h264_encoding(void)
206 {
207     VAEncPictureParameterBufferH264 pic_h264;
208     VAEncSliceParameterBuffer slice_h264;
209     VAStatus va_status;
210     VABufferID seq_param_buf, pic_param_buf, slice_param_buf;
211     int codedbuf_size;
212     VASurfaceStatus surface_status;
213     int src_surface, dst_surface, ref_surface;
214     int codedbuf_idx = 0;
215     int frame_skipped = 0;
216     int i;
217
218
219     va_status = vaCreateSurfaces(va_dpy,frame_width, frame_height,
220                                  VA_RT_FORMAT_YUV420, SURFACE_NUM, &surface_id[0]);
221     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
222     
223     /* upload RAW YUV data into all surfaces */
224     upload_source_YUV_once_for_all();
225     
226     codedbuf_size = (frame_width * frame_height * 400) / (16*16);
227
228     for (i = 0; i < CODEDBUF_NUM; i++) {
229         /* create coded buffer once for all
230          * other VA buffers which won't be used again after vaRenderPicture.
231          * so APP can always vaCreateBuffer for every frame
232          * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
233          * so VA won't maintain the coded buffer
234          */
235         va_status = vaCreateBuffer(va_dpy,context_id,VAEncCodedBufferType,
236                                    codedbuf_size, 1, NULL, &coded_buf[i]);
237         CHECK_VASTATUS(va_status,"vaBeginPicture");
238     }
239
240     src_surface = 0;
241     /* the last two frames are reference/reconstructed frame */
242     dst_surface = SURFACE_NUM - 1;
243     ref_surface = SURFACE_NUM - 2;
244     
245     for (i = 0; i < frame_count; i++) {
246         va_status = vaBeginPicture(va_dpy, context_id, surface_id[src_surface]);
247         CHECK_VASTATUS(va_status,"vaBeginPicture");
248
249         if (i == 0) {
250             VAEncSequenceParameterBufferH264 seq_h264 = {0};
251             VABufferID seq_param_buf;
252             
253             seq_h264.level_idc = SH_LEVEL_3;
254             seq_h264.picture_width_in_mbs = frame_width / 16;
255             seq_h264.picture_height_in_mbs = frame_height / 16;
256             seq_h264.bits_per_second = frame_bitrate;
257             seq_h264.frame_rate = frame_rate;
258             seq_h264.initial_qp = initial_qp;
259             seq_h264.min_qp = minimal_qp;
260             seq_h264.basic_unit_size = 0;
261             seq_h264.intra_period = intra_count;
262             
263             va_status = vaCreateBuffer(va_dpy, context_id,
264                                        VAEncSequenceParameterBufferType,
265                                        sizeof(seq_h264),1,&seq_h264,&seq_param_buf);
266             CHECK_VASTATUS(va_status,"vaCreateBuffer");;
267
268             va_status = vaRenderPicture(va_dpy,context_id, &seq_param_buf, 1);
269             CHECK_VASTATUS(va_status,"vaRenderPicture");;
270         }
271
272
273         pic_h264.reference_picture = surface_id[ref_surface];
274         pic_h264.reconstructed_picture= surface_id[dst_surface];
275         pic_h264.coded_buf = coded_buf[codedbuf_idx];
276         pic_h264.picture_width = frame_width;
277         pic_h264.picture_height = frame_height;
278         pic_h264.last_picture = (i==frame_count);
279         
280         va_status = vaCreateBuffer(va_dpy, context_id,VAEncPictureParameterBufferType,
281                                    sizeof(pic_h264),1,&pic_h264,&pic_param_buf);
282         CHECK_VASTATUS(va_status,"vaCreateBuffer");;
283
284         va_status = vaRenderPicture(va_dpy,context_id, &pic_param_buf, 1);
285         CHECK_VASTATUS(va_status,"vaRenderPicture");
286
287         /* one frame, one slice */
288         slice_h264.start_row_number = 0;
289         slice_h264.slice_height = frame_height/16; /* Measured by MB */
290         slice_h264.slice_flags.bits.is_intra = ((i % intra_count) == 0);
291         slice_h264.slice_flags.bits.disable_deblocking_filter_idc = 0;
292         va_status = vaCreateBuffer(va_dpy,context_id,VAEncSliceParameterBufferType,
293                                    sizeof(slice_h264),1,&slice_h264,&slice_param_buf);
294         CHECK_VASTATUS(va_status,"vaCreateBuffer");;
295         
296         va_status = vaRenderPicture(va_dpy,context_id, &slice_param_buf, 1);
297         CHECK_VASTATUS(va_status,"vaRenderPicture");
298         
299         va_status = vaEndPicture(va_dpy,context_id);
300         CHECK_VASTATUS(va_status,"vaEndPicture");;
301
302         va_status = vaSyncSurface(va_dpy, surface_id[src_surface]);
303         CHECK_VASTATUS(va_status,"vaSyncSurface");
304
305         surface_status = 0;
306         va_status = vaQuerySurfaceStatus(va_dpy, surface_id[src_surface],&surface_status);
307         frame_skipped = (surface_status & VASurfaceSkipped);
308
309         save_coded_buf(coded_buf[codedbuf_idx], i, frame_skipped);
310         
311         /* should display reconstructed frame, but just diplay source frame */
312         if (frame_display) {
313             int exit_encode = 0;
314
315             display_surface(src_surface, &exit_encode);
316             if (exit_encode)
317                 frame_count = i;
318         }
319         
320         /* use next surface */
321         src_surface++;
322         if (src_surface == (SURFACE_NUM - 2))
323             src_surface = 0;
324
325         /* use next codedbuf */
326         codedbuf_idx++;
327         if (codedbuf_idx == (CODEDBUF_NUM - 1))
328             codedbuf_idx = 0;
329         
330         /* if a frame is skipped, current frame still use last reference frame */
331         if (frame_skipped == 0) {
332             /* swap ref/dst */
333             int tmp = dst_surface;
334             dst_surface = ref_surface;
335             ref_surface = tmp;
336         } 
337     }
338
339     return 0;
340 }
341
342 int main(int argc,char **argv)
343 {
344     VAEntrypoint entrypoints[5];
345     int num_entrypoints,slice_entrypoint;
346     VAConfigAttrib attrib[2];
347     VAConfigID config_id;
348     int major_ver, minor_ver;
349     VAStatus va_status;
350     char c;
351
352     strcpy(coded_file, "/tmp/demo.264");
353     while ((c =getopt(argc,argv,"w:h:n:p:f:r:q:s:o:d?") ) != EOF) {
354         switch (c) {
355                 case 'w':
356                     frame_width = atoi(optarg);
357                     break;
358                 case 'h':
359                     frame_height = atoi(optarg);
360                     break;
361                 case 'n':
362                     frame_count = atoi(optarg);
363                     break;
364                 case 'p':
365                     intra_count = atoi(optarg);
366                     break;
367                 case 'f':
368                     frame_rate = atoi(optarg);
369                     break;
370                 case 'b':
371                     frame_bitrate = atoi(optarg);
372                     break;
373                 case 'q':
374                     initial_qp = atoi(optarg);
375                     break;
376                 case 's':
377                     minimal_qp = atoi(optarg);
378                     break;
379                 case 'd':
380                     frame_display = 1;
381                     break;
382                 case 'o':
383                     strcpy(coded_file, optarg);
384                     break;
385                 case ':':
386                 case '?':
387                     printf("./h264encode <options>\n");
388                     printf("   -w -h: resolution\n");
389                     printf("   -n frame number\n"); 
390                     printf("   -d display the source frame\n");
391                     printf("   -p P frame count between two I frames\n");
392                     printf("   -f frame rate\n");
393                     printf("   -r bit rate\n");
394                     printf("   -q initial QP\n");
395                     printf("   -s maximum QP\n");
396                     printf("   -o coded file\n");
397                     exit(0);
398         }
399     }
400     
401     x11_display = XOpenDisplay(":0.0");
402     assert(x11_display);
403     
404     va_dpy = vaGetDisplay(x11_display);
405     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
406     CHECK_VASTATUS(va_status, "vaInitialize");
407
408     vaQueryConfigEntrypoints(va_dpy, VAProfileH264Baseline, entrypoints, 
409                              &num_entrypoints);
410     for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
411         if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice)
412             break;
413     }
414     if (slice_entrypoint == num_entrypoints) {
415         /* not find Slice entry point */
416         assert(0);
417     }
418
419     /* find out the format for the render target, and rate control mode */
420     attrib[0].type = VAConfigAttribRTFormat;
421     attrib[1].type = VAConfigAttribRateControl;
422     vaGetConfigAttributes(va_dpy, VAProfileH264Baseline, VAEntrypointEncSlice,
423                           &attrib[0], 2);
424     if ((attrib[0].value & VA_RT_FORMAT_YUV420) == 0) {
425         /* not find desired YUV420 RT format */
426         assert(0);
427     }
428     if ((attrib[1].value & VA_RC_VBR) == 0) {
429         /* Can't find matched RC mode */
430         printf("VBR mode doesn't found, exit\n");
431         assert(0);
432     }
433     attrib[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */
434     attrib[1].value = VA_RC_VBR; /* set to desired RC mode */
435     
436     va_status = vaCreateConfig(va_dpy, VAProfileH264Baseline, VAEntrypointEncSlice,
437                               &attrib[0], 2,&config_id);
438     CHECK_VASTATUS(va_status, "vaCreateConfig");
439     
440     va_status = vaCreateSurfaces(va_dpy,frame_width, frame_height,
441                                  VA_RT_FORMAT_YUV420, SURFACE_NUM, &surface_id[0]);
442     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
443     
444     /* Create a context for this decode pipe */
445     va_status = vaCreateContext(va_dpy, config_id,
446                                 frame_width, ((frame_height+15)/16)*16,
447                                 VA_PROGRESSIVE,&surface_id[0],SURFACE_NUM,&context_id);
448     CHECK_VASTATUS(va_status, "vaCreateContext");
449
450     /* store coded data into a file */
451     coded_fd = open(coded_file,O_CREAT|O_RDWR, 0);
452     if (coded_fd == -1) {
453         printf("Open file %s failed, exit\n", coded_file);
454         exit(1);
455     }
456
457     printf("Coded %d frames, %dx%d, save the coded file into %s\n",
458            frame_count, frame_width, frame_height, coded_file);
459     do_h264_encoding();
460
461     printf("\n\n");
462     
463     vaDestroySurfaces(va_dpy,&surface_id[0],SURFACE_NUM);
464     vaDestroyConfig(va_dpy,config_id);
465     vaDestroyContext(va_dpy,context_id);
466     
467     vaTerminate(va_dpy);
468     
469     XCloseDisplay(x11_display);
470     
471     return 0;
472 }