2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
36 #include <va/va_dricommon.h>
38 #include "intel_driver.h"
39 #include "intel_memman.h"
40 #include "intel_batchbuffer.h"
41 #include "i965_defines.h"
42 #include "i965_drv_video.h"
44 #define CONFIG_ID_OFFSET 0x01000000
45 #define CONTEXT_ID_OFFSET 0x02000000
46 #define SURFACE_ID_OFFSET 0x04000000
47 #define BUFFER_ID_OFFSET 0x08000000
48 #define IMAGE_ID_OFFSET 0x0a000000
49 #define SUBPIC_ID_OFFSET 0x10000000
51 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
52 IS_IRONLAKE((ctx)->intel.device_id) || \
53 ((IS_GEN6((ctx)->intel.device_id) || \
54 IS_GEN7((ctx)->intel.device_id)) && \
55 (ctx)->intel.has_bsd))
57 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
58 IS_GEN6((ctx)->intel.device_id) || \
59 IS_IRONLAKE((ctx)->intel.device_id)) && \
62 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
63 IS_GEN6((ctx)->intel.device_id)) && \
66 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
67 IS_GEN6((ctx)->intel.device_id)) && \
68 (ctx)->render_state.interleaved_uv)
70 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
71 IS_GEN6((ctx)->intel.device_id)) && \
74 #define HAS_VPP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) || \
75 IS_GEN6((ctx)->intel.device_id) || \
76 IS_GEN7((ctx)->intel.device_id))
78 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
83 I965_SURFACETYPE_RGBA = 1,
85 I965_SURFACETYPE_INDEXED
88 /* List of supported image formats */
91 VAImageFormat va_format;
92 } i965_image_format_map_t;
94 static const i965_image_format_map_t
95 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
96 { I965_SURFACETYPE_YUV,
97 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
98 { I965_SURFACETYPE_YUV,
99 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
100 { I965_SURFACETYPE_YUV,
101 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
104 /* List of supported subpicture formats */
108 VAImageFormat va_format;
109 unsigned int va_flags;
110 } i965_subpic_format_map_t;
112 static const i965_subpic_format_map_t
113 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
114 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
115 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
116 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
117 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
118 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
119 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
120 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
121 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
122 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
123 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
124 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
125 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
126 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
127 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
130 static const i965_subpic_format_map_t *
131 get_subpic_format(const VAImageFormat *va_format)
134 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
135 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
136 if (m->va_format.fourcc == va_format->fourcc &&
137 (m->type == I965_SURFACETYPE_RGBA ?
138 (m->va_format.byte_order == va_format->byte_order &&
139 m->va_format.red_mask == va_format->red_mask &&
140 m->va_format.green_mask == va_format->green_mask &&
141 m->va_format.blue_mask == va_format->blue_mask &&
142 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
148 extern struct hw_context *i965_proc_context_init(VADriverContextP, VAProfile);
149 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
150 static struct hw_codec_info g4x_hw_codec_info = {
151 .dec_hw_context_init = g4x_dec_hw_context_init,
152 .enc_hw_context_init = NULL,
153 .proc_hw_context_init = NULL,
156 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
157 static struct hw_codec_info ironlake_hw_codec_info = {
158 .dec_hw_context_init = ironlake_dec_hw_context_init,
159 .enc_hw_context_init = NULL,
160 .proc_hw_context_init = i965_proc_context_init,
163 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
164 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
165 static struct hw_codec_info gen6_hw_codec_info = {
166 .dec_hw_context_init = gen6_dec_hw_context_init,
167 .enc_hw_context_init = gen6_enc_hw_context_init,
168 .proc_hw_context_init = i965_proc_context_init,
171 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
172 static struct hw_codec_info gen7_hw_codec_info = {
173 .dec_hw_context_init = gen7_dec_hw_context_init,
174 .enc_hw_context_init = gen6_enc_hw_context_init,
175 .proc_hw_context_init = NULL,
179 i965_QueryConfigProfiles(VADriverContextP ctx,
180 VAProfile *profile_list, /* out */
181 int *num_profiles) /* out */
183 struct i965_driver_data * const i965 = i965_driver_data(ctx);
186 if (HAS_MPEG2(i965)) {
187 profile_list[i++] = VAProfileMPEG2Simple;
188 profile_list[i++] = VAProfileMPEG2Main;
191 if (HAS_H264(i965)) {
192 profile_list[i++] = VAProfileH264Baseline;
193 profile_list[i++] = VAProfileH264Main;
194 profile_list[i++] = VAProfileH264High;
198 profile_list[i++] = VAProfileVC1Simple;
199 profile_list[i++] = VAProfileVC1Main;
200 profile_list[i++] = VAProfileVC1Advanced;
204 profile_list[i++] = VAProfileNone;
207 if (HAS_JPEG(i965)) {
208 profile_list[i++] = VAProfileJPEGBaseline;
211 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
212 assert(i <= I965_MAX_PROFILES);
215 return VA_STATUS_SUCCESS;
219 i965_QueryConfigEntrypoints(VADriverContextP ctx,
221 VAEntrypoint *entrypoint_list, /* out */
222 int *num_entrypoints) /* out */
224 struct i965_driver_data * const i965 = i965_driver_data(ctx);
228 case VAProfileMPEG2Simple:
229 case VAProfileMPEG2Main:
231 entrypoint_list[n++] = VAEntrypointVLD;
234 case VAProfileH264Baseline:
235 case VAProfileH264Main:
236 case VAProfileH264High:
238 entrypoint_list[n++] = VAEntrypointVLD;
240 if (HAS_ENCODER(i965))
241 entrypoint_list[n++] = VAEntrypointEncSlice;
245 case VAProfileVC1Simple:
246 case VAProfileVC1Main:
247 case VAProfileVC1Advanced:
249 entrypoint_list[n++] = VAEntrypointVLD;
254 entrypoint_list[n++] = VAEntrypointVideoProc;
257 case VAProfileJPEGBaseline:
259 entrypoint_list[n++] = VAEntrypointVLD;
266 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
267 assert(n <= I965_MAX_ENTRYPOINTS);
268 *num_entrypoints = n;
269 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
273 i965_GetConfigAttributes(VADriverContextP ctx,
275 VAEntrypoint entrypoint,
276 VAConfigAttrib *attrib_list, /* in/out */
281 /* Other attributes don't seem to be defined */
282 /* What to do if we don't know the attribute? */
283 for (i = 0; i < num_attribs; i++) {
284 switch (attrib_list[i].type) {
285 case VAConfigAttribRTFormat:
286 attrib_list[i].value = VA_RT_FORMAT_YUV420;
289 case VAConfigAttribRateControl:
290 attrib_list[i].value = VA_RC_VBR;
293 case VAConfigAttribEncHeaderPacking:
294 if (entrypoint == VAEntrypointEncSlice) {
295 attrib_list[i].value = VA_ENC_HEADER_PACKING_SLICE;
301 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
306 return VA_STATUS_SUCCESS;
310 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
312 object_heap_free(heap, obj);
316 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
320 /* Check existing attrbiutes */
321 for (i = 0; obj_config->num_attribs < i; i++) {
322 if (obj_config->attrib_list[i].type == attrib->type) {
323 /* Update existing attribute */
324 obj_config->attrib_list[i].value = attrib->value;
325 return VA_STATUS_SUCCESS;
329 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
330 i = obj_config->num_attribs;
331 obj_config->attrib_list[i].type = attrib->type;
332 obj_config->attrib_list[i].value = attrib->value;
333 obj_config->num_attribs++;
334 return VA_STATUS_SUCCESS;
337 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
341 i965_CreateConfig(VADriverContextP ctx,
343 VAEntrypoint entrypoint,
344 VAConfigAttrib *attrib_list,
346 VAConfigID *config_id) /* out */
348 struct i965_driver_data * const i965 = i965_driver_data(ctx);
349 struct object_config *obj_config;
354 /* Validate profile & entrypoint */
356 case VAProfileMPEG2Simple:
357 case VAProfileMPEG2Main:
358 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
359 vaStatus = VA_STATUS_SUCCESS;
361 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
365 case VAProfileH264Baseline:
366 case VAProfileH264Main:
367 case VAProfileH264High:
368 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
369 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
370 vaStatus = VA_STATUS_SUCCESS;
372 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
377 case VAProfileVC1Simple:
378 case VAProfileVC1Main:
379 case VAProfileVC1Advanced:
380 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
381 vaStatus = VA_STATUS_SUCCESS;
383 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
389 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
390 vaStatus = VA_STATUS_SUCCESS;
392 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
395 case VAProfileJPEGBaseline:
396 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
397 vaStatus = VA_STATUS_SUCCESS;
399 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
404 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
408 if (VA_STATUS_SUCCESS != vaStatus) {
412 configID = NEW_CONFIG_ID();
413 obj_config = CONFIG(configID);
415 if (NULL == obj_config) {
416 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
420 obj_config->profile = profile;
421 obj_config->entrypoint = entrypoint;
422 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
423 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
424 obj_config->num_attribs = 1;
426 for(i = 0; i < num_attribs; i++) {
427 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
429 if (VA_STATUS_SUCCESS != vaStatus) {
435 if (VA_STATUS_SUCCESS != vaStatus) {
436 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
438 *config_id = configID;
445 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
447 struct i965_driver_data *i965 = i965_driver_data(ctx);
448 struct object_config *obj_config = CONFIG(config_id);
451 if (NULL == obj_config) {
452 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
456 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
457 return VA_STATUS_SUCCESS;
460 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
461 VAConfigID config_id,
462 VAProfile *profile, /* out */
463 VAEntrypoint *entrypoint, /* out */
464 VAConfigAttrib *attrib_list, /* out */
465 int *num_attribs) /* out */
467 struct i965_driver_data *i965 = i965_driver_data(ctx);
468 struct object_config *obj_config = CONFIG(config_id);
469 VAStatus vaStatus = VA_STATUS_SUCCESS;
473 *profile = obj_config->profile;
474 *entrypoint = obj_config->entrypoint;
475 *num_attribs = obj_config->num_attribs;
477 for(i = 0; i < obj_config->num_attribs; i++) {
478 attrib_list[i] = obj_config->attrib_list[i];
485 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
487 struct object_surface *obj_surface = (struct object_surface *)obj;
489 dri_bo_unreference(obj_surface->bo);
490 obj_surface->bo = NULL;
492 if (obj_surface->free_private_data != NULL) {
493 obj_surface->free_private_data(&obj_surface->private_data);
494 obj_surface->private_data = NULL;
497 object_heap_free(heap, obj);
501 i965_CreateSurfaces(VADriverContextP ctx,
506 VASurfaceID *surfaces) /* out */
508 struct i965_driver_data *i965 = i965_driver_data(ctx);
510 VAStatus vaStatus = VA_STATUS_SUCCESS;
512 /* We only support one format */
513 if (VA_RT_FORMAT_YUV420 != format) {
514 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
517 for (i = 0; i < num_surfaces; i++) {
518 int surfaceID = NEW_SURFACE_ID();
519 struct object_surface *obj_surface = SURFACE(surfaceID);
521 if (NULL == obj_surface) {
522 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
526 surfaces[i] = surfaceID;
527 obj_surface->status = VASurfaceReady;
528 obj_surface->subpic = VA_INVALID_ID;
529 obj_surface->orig_width = width;
530 obj_surface->orig_height = height;
532 if (IS_GEN6(i965->intel.device_id) ||
533 IS_GEN7(i965->intel.device_id)) {
534 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
535 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
537 obj_surface->width = ALIGN(obj_surface->orig_width, 16);
538 obj_surface->height = ALIGN(obj_surface->orig_height, 16);
541 obj_surface->flags = SURFACE_REFERENCED;
542 obj_surface->fourcc = 0;
543 obj_surface->bo = NULL;
544 obj_surface->locked_image_id = VA_INVALID_ID;
545 obj_surface->private_data = NULL;
546 obj_surface->free_private_data = NULL;
550 if (VA_STATUS_SUCCESS != vaStatus) {
551 /* surfaces[i-1] was the last successful allocation */
553 struct object_surface *obj_surface = SURFACE(surfaces[i]);
555 surfaces[i] = VA_INVALID_SURFACE;
557 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
565 i965_DestroySurfaces(VADriverContextP ctx,
566 VASurfaceID *surface_list,
569 struct i965_driver_data *i965 = i965_driver_data(ctx);
572 for (i = num_surfaces; i--; ) {
573 struct object_surface *obj_surface = SURFACE(surface_list[i]);
576 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
579 return VA_STATUS_SUCCESS;
583 i965_QueryImageFormats(VADriverContextP ctx,
584 VAImageFormat *format_list, /* out */
585 int *num_formats) /* out */
589 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
590 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
592 format_list[n] = m->va_format;
598 return VA_STATUS_SUCCESS;
602 i965_PutImage(VADriverContextP ctx,
607 unsigned int src_width,
608 unsigned int src_height,
611 unsigned int dest_width,
612 unsigned int dest_height)
614 struct i965_driver_data *i965 = i965_driver_data(ctx);
615 struct object_surface *obj_surface = SURFACE(surface);
616 struct object_image *obj_image = IMAGE(image);
617 struct i965_surface src_surface, dst_surface;
618 VAStatus va_status = VA_STATUS_SUCCESS;
619 VARectangle src_rect, dst_rect;
622 return VA_STATUS_ERROR_INVALID_SURFACE;
624 if (!obj_image || !obj_image->bo)
625 return VA_STATUS_ERROR_INVALID_IMAGE;
629 src_x + src_width > obj_image->image.width ||
630 src_y + src_height > obj_image->image.height)
631 return VA_STATUS_ERROR_INVALID_PARAMETER;
635 dest_x + dest_width > obj_surface->orig_width ||
636 dest_y + dest_height > obj_surface->orig_height)
637 return VA_STATUS_ERROR_INVALID_PARAMETER;
639 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), VA_FOURCC('N', 'V', '1', '2'));
641 src_surface.id = image;
642 src_surface.flag = I965_SURFACE_IMAGE;
645 src_rect.width = src_width;
646 src_rect.height = src_height;
648 dst_surface.id = surface;
649 dst_surface.flag = I965_SURFACE_SURFACE;
652 dst_rect.width = dest_width;
653 dst_rect.height = dest_height;
655 va_status = i965_image_processing(ctx,
665 i965_QuerySubpictureFormats(VADriverContextP ctx,
666 VAImageFormat *format_list, /* out */
667 unsigned int *flags, /* out */
668 unsigned int *num_formats) /* out */
672 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
673 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
675 format_list[n] = m->va_format;
677 flags[n] = m->va_flags;
683 return VA_STATUS_SUCCESS;
687 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
689 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
691 object_heap_free(heap, obj);
695 i965_CreateSubpicture(VADriverContextP ctx,
697 VASubpictureID *subpicture) /* out */
699 struct i965_driver_data *i965 = i965_driver_data(ctx);
700 VASubpictureID subpicID = NEW_SUBPIC_ID()
701 struct object_subpic *obj_subpic = SUBPIC(subpicID);
704 return VA_STATUS_ERROR_ALLOCATION_FAILED;
706 struct object_image *obj_image = IMAGE(image);
708 return VA_STATUS_ERROR_INVALID_IMAGE;
710 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
712 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
714 *subpicture = subpicID;
715 obj_subpic->image = image;
716 obj_subpic->format = m->format;
717 obj_subpic->width = obj_image->image.width;
718 obj_subpic->height = obj_image->image.height;
719 obj_subpic->pitch = obj_image->image.pitches[0];
720 obj_subpic->bo = obj_image->bo;
721 return VA_STATUS_SUCCESS;
725 i965_DestroySubpicture(VADriverContextP ctx,
726 VASubpictureID subpicture)
728 struct i965_driver_data *i965 = i965_driver_data(ctx);
729 struct object_subpic *obj_subpic = SUBPIC(subpicture);
730 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
731 return VA_STATUS_SUCCESS;
735 i965_SetSubpictureImage(VADriverContextP ctx,
736 VASubpictureID subpicture,
740 return VA_STATUS_ERROR_UNIMPLEMENTED;
744 i965_SetSubpictureChromakey(VADriverContextP ctx,
745 VASubpictureID subpicture,
746 unsigned int chromakey_min,
747 unsigned int chromakey_max,
748 unsigned int chromakey_mask)
751 return VA_STATUS_ERROR_UNIMPLEMENTED;
755 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
756 VASubpictureID subpicture,
760 return VA_STATUS_ERROR_UNIMPLEMENTED;
764 i965_AssociateSubpicture(VADriverContextP ctx,
765 VASubpictureID subpicture,
766 VASurfaceID *target_surfaces,
768 short src_x, /* upper left offset in subpicture */
770 unsigned short src_width,
771 unsigned short src_height,
772 short dest_x, /* upper left offset in surface */
774 unsigned short dest_width,
775 unsigned short dest_height,
777 * whether to enable chroma-keying or global-alpha
778 * see VA_SUBPICTURE_XXX values
782 struct i965_driver_data *i965 = i965_driver_data(ctx);
783 struct object_subpic *obj_subpic = SUBPIC(subpicture);
786 obj_subpic->src_rect.x = src_x;
787 obj_subpic->src_rect.y = src_y;
788 obj_subpic->src_rect.width = src_width;
789 obj_subpic->src_rect.height = src_height;
790 obj_subpic->dst_rect.x = dest_x;
791 obj_subpic->dst_rect.y = dest_y;
792 obj_subpic->dst_rect.width = dest_width;
793 obj_subpic->dst_rect.height = dest_height;
794 obj_subpic->flags = flags;
796 for (i = 0; i < num_surfaces; i++) {
797 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
799 return VA_STATUS_ERROR_INVALID_SURFACE;
800 obj_surface->subpic = subpicture;
802 return VA_STATUS_SUCCESS;
807 i965_DeassociateSubpicture(VADriverContextP ctx,
808 VASubpictureID subpicture,
809 VASurfaceID *target_surfaces,
812 struct i965_driver_data *i965 = i965_driver_data(ctx);
815 for (i = 0; i < num_surfaces; i++) {
816 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
818 return VA_STATUS_ERROR_INVALID_SURFACE;
819 if (obj_surface->subpic == subpicture)
820 obj_surface->subpic = VA_INVALID_ID;
822 return VA_STATUS_SUCCESS;
826 i965_reference_buffer_store(struct buffer_store **ptr,
827 struct buffer_store *buffer_store)
829 assert(*ptr == NULL);
832 buffer_store->ref_count++;
838 i965_release_buffer_store(struct buffer_store **ptr)
840 struct buffer_store *buffer_store = *ptr;
842 if (buffer_store == NULL)
845 assert(buffer_store->bo || buffer_store->buffer);
846 assert(!(buffer_store->bo && buffer_store->buffer));
847 buffer_store->ref_count--;
849 if (buffer_store->ref_count == 0) {
850 dri_bo_unreference(buffer_store->bo);
851 free(buffer_store->buffer);
852 buffer_store->bo = NULL;
853 buffer_store->buffer = NULL;
861 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
863 struct object_context *obj_context = (struct object_context *)obj;
866 if (obj_context->hw_context) {
867 obj_context->hw_context->destroy(obj_context->hw_context);
868 obj_context->hw_context = NULL;
871 if (obj_context->codec_type == CODEC_PROC) {
872 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
873 i965_release_buffer_store(&obj_context->codec_state.proc.input_param);
875 for (i = 0; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++)
876 i965_release_buffer_store(&obj_context->codec_state.proc.filter_param[i]);
877 } else if (obj_context->codec_type == CODEC_ENC) {
878 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
879 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
880 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
882 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
883 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
885 free(obj_context->codec_state.encode.slice_params);
887 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
888 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
889 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
890 i965_release_buffer_store(&obj_context->codec_state.encode.dec_ref_pic_marking);
892 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
893 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
895 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
896 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
898 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
899 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
901 free(obj_context->codec_state.encode.slice_params_ext);
903 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
904 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
906 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
907 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
908 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
910 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
911 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
913 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
914 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
916 free(obj_context->codec_state.decode.slice_params);
917 free(obj_context->codec_state.decode.slice_datas);
920 free(obj_context->render_targets);
921 object_heap_free(heap, obj);
925 i965_CreateContext(VADriverContextP ctx,
926 VAConfigID config_id,
930 VASurfaceID *render_targets,
931 int num_render_targets,
932 VAContextID *context) /* out */
934 struct i965_driver_data *i965 = i965_driver_data(ctx);
935 struct i965_render_state *render_state = &i965->render_state;
936 struct object_config *obj_config = CONFIG(config_id);
937 struct object_context *obj_context = NULL;
938 VAStatus vaStatus = VA_STATUS_SUCCESS;
942 if (NULL == obj_config) {
943 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
948 /* Validate picture dimensions */
949 contextID = NEW_CONTEXT_ID();
950 obj_context = CONTEXT(contextID);
952 if (NULL == obj_context) {
953 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
957 render_state->inited = 1;
959 switch (obj_config->profile) {
960 case VAProfileH264Baseline:
961 case VAProfileH264Main:
962 case VAProfileH264High:
964 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
965 render_state->interleaved_uv = 1;
968 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
972 *context = contextID;
973 obj_context->flags = flag;
974 obj_context->context_id = contextID;
975 obj_context->config_id = config_id;
976 obj_context->picture_width = picture_width;
977 obj_context->picture_height = picture_height;
978 obj_context->num_render_targets = num_render_targets;
979 obj_context->render_targets =
980 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
981 obj_context->hw_context = NULL;
983 for(i = 0; i < num_render_targets; i++) {
984 if (NULL == SURFACE(render_targets[i])) {
985 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
989 obj_context->render_targets[i] = render_targets[i];
992 if (VA_STATUS_SUCCESS == vaStatus) {
993 if (VAEntrypointVideoProc == obj_config->entrypoint) {
994 obj_context->codec_type = CODEC_PROC;
995 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
996 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
997 assert(i965->codec_info->proc_hw_context_init);
998 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config->profile);
999 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1000 obj_context->codec_type = CODEC_ENC;
1001 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1002 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1003 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1004 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1005 sizeof(*obj_context->codec_state.encode.slice_params));
1006 assert(i965->codec_info->enc_hw_context_init);
1007 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
1009 obj_context->codec_type = CODEC_DEC;
1010 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1011 obj_context->codec_state.decode.current_render_target = -1;
1012 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1013 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1014 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1015 sizeof(*obj_context->codec_state.decode.slice_params));
1016 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1017 sizeof(*obj_context->codec_state.decode.slice_datas));
1019 assert(i965->codec_info->dec_hw_context_init);
1020 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
1024 /* Error recovery */
1025 if (VA_STATUS_SUCCESS != vaStatus) {
1026 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1033 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1035 struct i965_driver_data *i965 = i965_driver_data(ctx);
1036 struct object_context *obj_context = CONTEXT(context);
1038 assert(obj_context);
1039 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1041 return VA_STATUS_SUCCESS;
1045 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1047 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1049 assert(obj_buffer->buffer_store);
1050 i965_release_buffer_store(&obj_buffer->buffer_store);
1051 object_heap_free(heap, obj);
1055 i965_create_buffer_internal(VADriverContextP ctx,
1056 VAContextID context,
1059 unsigned int num_elements,
1064 struct i965_driver_data *i965 = i965_driver_data(ctx);
1065 struct object_buffer *obj_buffer = NULL;
1066 struct buffer_store *buffer_store = NULL;
1071 case VAPictureParameterBufferType:
1072 case VAIQMatrixBufferType:
1073 case VABitPlaneBufferType:
1074 case VASliceGroupMapBufferType:
1075 case VASliceParameterBufferType:
1076 case VASliceDataBufferType:
1077 case VAMacroblockParameterBufferType:
1078 case VAResidualDataBufferType:
1079 case VADeblockingParameterBufferType:
1080 case VAImageBufferType:
1081 case VAEncCodedBufferType:
1082 case VAEncSequenceParameterBufferType:
1083 case VAEncPictureParameterBufferType:
1084 case VAEncSliceParameterBufferType:
1085 case VAEncSequenceParameterBufferExtType:
1086 case VAEncPictureParameterBufferExtType:
1087 case VAEncSliceParameterBufferExtType:
1088 case VAEncDecRefPicMarkingBufferH264Type:
1089 case VAEncPackedHeaderParameterBufferType:
1090 case VAEncPackedHeaderDataBufferType:
1091 case VAProcPipelineParameterBufferType:
1092 case VAProcInputParameterBufferType:
1093 case VAProcFilterBaseParameterBufferType:
1094 case VAProcFilterDeinterlacingParameterBufferType:
1095 case VAProcFilterProcAmpParameterBufferType:
1096 case VAHuffmanTableBufferType:
1101 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1104 bufferID = NEW_BUFFER_ID();
1105 obj_buffer = BUFFER(bufferID);
1107 if (NULL == obj_buffer) {
1108 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1111 if (type == VAEncCodedBufferType) {
1112 size += ALIGN(sizeof(VACodedBufferSegment), 64);
1113 size += 0x1000; /* for upper bound check */
1116 obj_buffer->max_num_elements = num_elements;
1117 obj_buffer->num_elements = num_elements;
1118 obj_buffer->size_element = size;
1119 obj_buffer->type = type;
1120 obj_buffer->buffer_store = NULL;
1121 buffer_store = calloc(1, sizeof(struct buffer_store));
1122 assert(buffer_store);
1123 buffer_store->ref_count = 1;
1125 if (store_bo != NULL) {
1126 buffer_store->bo = store_bo;
1127 dri_bo_reference(buffer_store->bo);
1130 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1131 } else if (type == VASliceDataBufferType ||
1132 type == VAImageBufferType ||
1133 type == VAEncCodedBufferType) {
1134 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1136 size * num_elements, 64);
1137 assert(buffer_store->bo);
1139 if (type == VAEncCodedBufferType) {
1140 VACodedBufferSegment *coded_buffer_segment;
1141 dri_bo_map(buffer_store->bo, 1);
1142 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1143 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
1144 coded_buffer_segment->bit_offset = 0;
1145 coded_buffer_segment->status = 0;
1146 coded_buffer_segment->buf = NULL;
1147 coded_buffer_segment->next = NULL;
1148 dri_bo_unmap(buffer_store->bo);
1150 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1153 } else if (type == VAEncPackedHeaderParameterBufferType) {
1154 VAEncPackedHeaderParameterBuffer *param;
1158 assert(num_elements == 1);
1159 assert(size == sizeof(*param));
1161 param = (VAEncPackedHeaderParameterBuffer *)data;
1162 msize = ALIGN(size, 32) + param->num_headers * sizeof(int) * 2;
1163 buffer_store->buffer = malloc(msize);
1164 assert(buffer_store->buffer);
1166 memcpy(buffer_store->buffer,
1169 memcpy((unsigned char *)buffer_store->buffer + ALIGN(size, 32),
1170 param->length_in_bits,
1171 param->num_headers * sizeof(int));
1172 memcpy((unsigned char *)buffer_store->buffer + ALIGN(size, 32) + param->num_headers * sizeof(int),
1173 param->offset_in_bytes,
1174 param->num_headers * sizeof(int));
1176 param = (VAEncPackedHeaderParameterBuffer *)buffer_store->buffer;
1177 param->length_in_bits = (unsigned int *)((unsigned char *)buffer_store->buffer + ALIGN(size, 32));
1178 param->offset_in_bytes = (unsigned int *)((unsigned char *)buffer_store->buffer + ALIGN(size, 32) + param->num_headers * sizeof(int));
1182 if (type == VAEncPackedHeaderDataBufferType) {
1183 msize = ALIGN(size, 4);
1186 buffer_store->buffer = malloc(msize * num_elements);
1187 assert(buffer_store->buffer);
1190 memcpy(buffer_store->buffer, data, size * num_elements);
1193 buffer_store->num_elements = obj_buffer->num_elements;
1194 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1195 i965_release_buffer_store(&buffer_store);
1198 return VA_STATUS_SUCCESS;
1202 i965_CreateBuffer(VADriverContextP ctx,
1203 VAContextID context, /* in */
1204 VABufferType type, /* in */
1205 unsigned int size, /* in */
1206 unsigned int num_elements, /* in */
1207 void *data, /* in */
1208 VABufferID *buf_id) /* out */
1210 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1215 i965_BufferSetNumElements(VADriverContextP ctx,
1216 VABufferID buf_id, /* in */
1217 unsigned int num_elements) /* in */
1219 struct i965_driver_data *i965 = i965_driver_data(ctx);
1220 struct object_buffer *obj_buffer = BUFFER(buf_id);
1221 VAStatus vaStatus = VA_STATUS_SUCCESS;
1225 if ((num_elements < 0) ||
1226 (num_elements > obj_buffer->max_num_elements)) {
1227 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1229 obj_buffer->num_elements = num_elements;
1230 if (obj_buffer->buffer_store != NULL) {
1231 obj_buffer->buffer_store->num_elements = num_elements;
1239 i965_MapBuffer(VADriverContextP ctx,
1240 VABufferID buf_id, /* in */
1241 void **pbuf) /* out */
1243 struct i965_driver_data *i965 = i965_driver_data(ctx);
1244 struct object_buffer *obj_buffer = BUFFER(buf_id);
1245 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1247 assert(obj_buffer && obj_buffer->buffer_store);
1248 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1249 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1251 if (NULL != obj_buffer->buffer_store->bo) {
1252 unsigned int tiling, swizzle;
1254 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1256 if (tiling != I915_TILING_NONE)
1257 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1259 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1261 assert(obj_buffer->buffer_store->bo->virtual);
1262 *pbuf = obj_buffer->buffer_store->bo->virtual;
1264 if (obj_buffer->type == VAEncCodedBufferType) {
1266 unsigned char *buffer = NULL;
1267 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1269 coded_buffer_segment->buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1271 for (i = 0; i < obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000; i++) {
1279 if (i == obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000) {
1280 coded_buffer_segment->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1283 coded_buffer_segment->size = i;
1286 vaStatus = VA_STATUS_SUCCESS;
1287 } else if (NULL != obj_buffer->buffer_store->buffer) {
1288 *pbuf = obj_buffer->buffer_store->buffer;
1289 vaStatus = VA_STATUS_SUCCESS;
1296 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1298 struct i965_driver_data *i965 = i965_driver_data(ctx);
1299 struct object_buffer *obj_buffer = BUFFER(buf_id);
1300 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1302 assert(obj_buffer && obj_buffer->buffer_store);
1303 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1304 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1306 if (NULL != obj_buffer->buffer_store->bo) {
1307 unsigned int tiling, swizzle;
1309 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1311 if (tiling != I915_TILING_NONE)
1312 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1314 dri_bo_unmap(obj_buffer->buffer_store->bo);
1316 vaStatus = VA_STATUS_SUCCESS;
1317 } else if (NULL != obj_buffer->buffer_store->buffer) {
1319 vaStatus = VA_STATUS_SUCCESS;
1326 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1328 struct i965_driver_data *i965 = i965_driver_data(ctx);
1329 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1332 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1334 return VA_STATUS_SUCCESS;
1338 i965_BeginPicture(VADriverContextP ctx,
1339 VAContextID context,
1340 VASurfaceID render_target)
1342 struct i965_driver_data *i965 = i965_driver_data(ctx);
1343 struct object_context *obj_context = CONTEXT(context);
1344 struct object_surface *obj_surface = SURFACE(render_target);
1345 struct object_config *obj_config;
1350 assert(obj_context);
1351 assert(obj_surface);
1353 config = obj_context->config_id;
1354 obj_config = CONFIG(config);
1357 switch (obj_config->profile) {
1358 case VAProfileMPEG2Simple:
1359 case VAProfileMPEG2Main:
1360 vaStatus = VA_STATUS_SUCCESS;
1363 case VAProfileH264Baseline:
1364 case VAProfileH264Main:
1365 case VAProfileH264High:
1366 vaStatus = VA_STATUS_SUCCESS;
1369 case VAProfileVC1Simple:
1370 case VAProfileVC1Main:
1371 case VAProfileVC1Advanced:
1372 vaStatus = VA_STATUS_SUCCESS;
1376 vaStatus = VA_STATUS_SUCCESS;
1381 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1385 if (obj_context->codec_type == CODEC_PROC) {
1386 obj_context->codec_state.proc.current_render_target = render_target;
1387 } else if (obj_context->codec_type == CODEC_ENC) {
1388 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1389 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1391 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1392 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1395 obj_context->codec_state.encode.num_slice_params = 0;
1398 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1399 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1400 i965_release_buffer_store(&obj_context->codec_state.encode.dec_ref_pic_marking);
1403 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1404 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1406 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1407 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1409 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1410 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1412 obj_context->codec_state.encode.num_slice_params_ext = 0;
1413 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1414 obj_context->codec_state.encode.last_packed_header_type = 0;
1416 obj_context->codec_state.decode.current_render_target = render_target;
1417 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1418 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1419 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1421 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1422 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1423 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1426 obj_context->codec_state.decode.num_slice_params = 0;
1427 obj_context->codec_state.decode.num_slice_datas = 0;
1433 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1435 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1437 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1438 struct object_context *obj_context, \
1439 struct object_buffer *obj_buffer) \
1441 struct category##_state *category = &obj_context->codec_state.category; \
1442 assert(obj_buffer->buffer_store->bo == NULL); \
1443 assert(obj_buffer->buffer_store->buffer); \
1444 i965_release_buffer_store(&category->member); \
1445 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1446 return VA_STATUS_SUCCESS; \
1449 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1451 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1452 struct object_context *obj_context, \
1453 struct object_buffer *obj_buffer) \
1455 struct category##_state *category = &obj_context->codec_state.category; \
1456 if (category->num_##member == category->max_##member) { \
1457 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1458 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1459 category->max_##member += NUM_SLICES; \
1461 i965_release_buffer_store(&category->member[category->num_##member]); \
1462 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1463 category->num_##member++; \
1464 return VA_STATUS_SUCCESS; \
1467 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1469 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1470 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1471 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1472 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1473 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1475 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1476 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1477 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1480 i965_decoder_render_picture(VADriverContextP ctx,
1481 VAContextID context,
1482 VABufferID *buffers,
1485 struct i965_driver_data *i965 = i965_driver_data(ctx);
1486 struct object_context *obj_context = CONTEXT(context);
1490 for (i = 0; i < num_buffers; i++) {
1491 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1494 switch (obj_buffer->type) {
1495 case VAPictureParameterBufferType:
1496 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1499 case VAIQMatrixBufferType:
1500 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1503 case VABitPlaneBufferType:
1504 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1507 case VASliceParameterBufferType:
1508 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1511 case VASliceDataBufferType:
1512 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1515 case VAHuffmanTableBufferType:
1516 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1520 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1528 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1530 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1531 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1532 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1533 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1534 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1535 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1536 /* extended buffer */
1537 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1538 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1539 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(dec_ref_pic_marking, dec_ref_pic_marking)
1541 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1542 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1543 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1546 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1547 struct object_context *obj_context,
1548 struct object_buffer *obj_buffer,
1549 VAEncPackedHeaderType type)
1551 struct encode_state *encode = &obj_context->codec_state.encode;
1553 assert(obj_buffer->buffer_store->bo == NULL);
1554 assert(obj_buffer->buffer_store->buffer);
1555 i965_release_buffer_store(&encode->packed_header_param[type]);
1556 i965_reference_buffer_store(&encode->packed_header_param[type], obj_buffer->buffer_store);
1558 return VA_STATUS_SUCCESS;
1562 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1563 struct object_context *obj_context,
1564 struct object_buffer *obj_buffer,
1565 VAEncPackedHeaderType type)
1567 struct encode_state *encode = &obj_context->codec_state.encode;
1569 assert(obj_buffer->buffer_store->bo == NULL);
1570 assert(obj_buffer->buffer_store->buffer);
1571 i965_release_buffer_store(&encode->packed_header_data[type]);
1572 i965_reference_buffer_store(&encode->packed_header_data[type], obj_buffer->buffer_store);
1574 return VA_STATUS_SUCCESS;
1578 i965_encoder_render_picture(VADriverContextP ctx,
1579 VAContextID context,
1580 VABufferID *buffers,
1583 struct i965_driver_data *i965 = i965_driver_data(ctx);
1584 struct object_context *obj_context = CONTEXT(context);
1588 for (i = 0; i < num_buffers; i++) {
1589 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1592 switch (obj_buffer->type) {
1593 case VAEncSequenceParameterBufferType:
1594 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter);
1597 case VAEncPictureParameterBufferType:
1598 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter);
1601 case VAEncSliceParameterBufferType:
1602 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter);
1605 case VAPictureParameterBufferType:
1606 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_control);
1609 case VAQMatrixBufferType:
1610 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1613 case VAIQMatrixBufferType:
1614 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1617 case VAEncSequenceParameterBufferExtType:
1618 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1621 case VAEncPictureParameterBufferExtType:
1622 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1625 case VAEncSliceParameterBufferExtType:
1626 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1629 case VAEncDecRefPicMarkingBufferH264Type:
1630 vaStatus = I965_RENDER_ENCODE_BUFFER(dec_ref_pic_marking);
1633 case VAEncPackedHeaderParameterBufferType:
1635 struct encode_state *encode = &obj_context->codec_state.encode;
1636 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1637 encode->last_packed_header_type = param->type;
1639 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1642 encode->last_packed_header_type);
1646 case VAEncPackedHeaderDataBufferType:
1648 struct encode_state *encode = &obj_context->codec_state.encode;
1650 assert(encode->last_packed_header_type == VAEncPackedHeaderSPS ||
1651 encode->last_packed_header_type == VAEncPackedHeaderPPS ||
1652 encode->last_packed_header_type == VAEncPackedHeaderSlice);
1653 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1656 encode->last_packed_header_type);
1661 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1669 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1671 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1672 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1673 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(input_parameter, input_param)
1676 i965_render_proc_filter_parameter_buffer(VADriverContextP ctx,
1677 struct object_context *obj_context,
1678 struct object_buffer *obj_buffer,
1679 VAProcFilterType type)
1681 struct proc_state *proc = &obj_context->codec_state.proc;
1683 assert(obj_buffer->buffer_store->bo == NULL);
1684 assert(obj_buffer->buffer_store->buffer);
1685 i965_release_buffer_store(&proc->filter_param[type]);
1686 i965_reference_buffer_store(&proc->filter_param[type], obj_buffer->buffer_store);
1688 return VA_STATUS_SUCCESS;
1692 i965_proc_render_picture(VADriverContextP ctx,
1693 VAContextID context,
1694 VABufferID *buffers,
1697 struct i965_driver_data *i965 = i965_driver_data(ctx);
1698 struct object_context *obj_context = CONTEXT(context);
1702 for (i = 0; i < num_buffers; i++) {
1703 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1706 switch (obj_buffer->type) {
1707 case VAProcPipelineParameterBufferType:
1708 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
1711 case VAProcInputParameterBufferType:
1712 vaStatus = I965_RENDER_PROC_BUFFER(input_parameter);
1715 case VAProcFilterBaseParameterBufferType:
1717 VAProcFilterBaseParameterBuffer *param = (VAProcFilterBaseParameterBuffer *)obj_buffer->buffer_store->buffer;
1718 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, param->filter);
1722 case VAProcFilterDeinterlacingParameterBufferType:
1723 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, VAProcFilterDeinterlacing);
1726 case VAProcFilterProcAmpParameterBufferType:
1727 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, VAProcFilterProcAmp);
1731 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1740 i965_RenderPicture(VADriverContextP ctx,
1741 VAContextID context,
1742 VABufferID *buffers,
1745 struct i965_driver_data *i965 = i965_driver_data(ctx);
1746 struct object_context *obj_context;
1747 struct object_config *obj_config;
1749 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1751 obj_context = CONTEXT(context);
1752 assert(obj_context);
1754 config = obj_context->config_id;
1755 obj_config = CONFIG(config);
1758 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1759 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
1760 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
1761 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1763 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1770 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1772 struct i965_driver_data *i965 = i965_driver_data(ctx);
1773 struct object_context *obj_context = CONTEXT(context);
1774 struct object_config *obj_config;
1777 assert(obj_context);
1778 config = obj_context->config_id;
1779 obj_config = CONFIG(config);
1782 if (obj_context->codec_type == CODEC_PROC) {
1783 assert(VAEntrypointVideoProc == obj_config->entrypoint);
1784 } else if (obj_context->codec_type == CODEC_ENC) {
1785 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1787 assert(obj_context->codec_state.encode.pic_param ||
1788 obj_context->codec_state.encode.pic_param_ext);
1789 assert(obj_context->codec_state.encode.seq_param ||
1790 obj_context->codec_state.encode.seq_param_ext);
1791 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
1792 obj_context->codec_state.encode.num_slice_params_ext >= 1);
1794 assert(obj_context->codec_state.decode.pic_param);
1795 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1796 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1797 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1800 assert(obj_context->hw_context->run);
1801 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1803 return VA_STATUS_SUCCESS;
1807 i965_SyncSurface(VADriverContextP ctx,
1808 VASurfaceID render_target)
1810 struct i965_driver_data *i965 = i965_driver_data(ctx);
1811 struct object_surface *obj_surface = SURFACE(render_target);
1813 assert(obj_surface);
1815 return VA_STATUS_SUCCESS;
1819 i965_QuerySurfaceStatus(VADriverContextP ctx,
1820 VASurfaceID render_target,
1821 VASurfaceStatus *status) /* out */
1823 struct i965_driver_data *i965 = i965_driver_data(ctx);
1824 struct object_surface *obj_surface = SURFACE(render_target);
1826 assert(obj_surface);
1828 /* Usually GEM will handle synchronization with the graphics hardware */
1830 if (obj_surface->bo) {
1831 dri_bo_map(obj_surface->bo, 0);
1832 dri_bo_unmap(obj_surface->bo);
1836 *status = obj_surface->status;
1838 return VA_STATUS_SUCCESS;
1843 * Query display attributes
1844 * The caller must provide a "attr_list" array that can hold at
1845 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1846 * returned in "attr_list" is returned in "num_attributes".
1849 i965_QueryDisplayAttributes(VADriverContextP ctx,
1850 VADisplayAttribute *attr_list, /* out */
1851 int *num_attributes) /* out */
1854 *num_attributes = 0;
1856 return VA_STATUS_SUCCESS;
1860 * Get display attributes
1861 * This function returns the current attribute values in "attr_list".
1862 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1863 * from vaQueryDisplayAttributes() can have their values retrieved.
1866 i965_GetDisplayAttributes(VADriverContextP ctx,
1867 VADisplayAttribute *attr_list, /* in/out */
1871 return VA_STATUS_ERROR_UNIMPLEMENTED;
1875 * Set display attributes
1876 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1877 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1878 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1881 i965_SetDisplayAttributes(VADriverContextP ctx,
1882 VADisplayAttribute *attr_list,
1886 return VA_STATUS_ERROR_UNIMPLEMENTED;
1890 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1891 VASurfaceID surface,
1892 void **buffer, /* out */
1893 unsigned int *stride) /* out */
1896 return VA_STATUS_ERROR_UNIMPLEMENTED;
1900 i965_Init(VADriverContextP ctx)
1902 struct i965_driver_data *i965 = i965_driver_data(ctx);
1904 if (intel_driver_init(ctx) == False)
1905 return VA_STATUS_ERROR_UNKNOWN;
1907 if (IS_G4X(i965->intel.device_id))
1908 i965->codec_info = &g4x_hw_codec_info;
1909 else if (IS_IRONLAKE(i965->intel.device_id))
1910 i965->codec_info = &ironlake_hw_codec_info;
1911 else if (IS_GEN6(i965->intel.device_id))
1912 i965->codec_info = &gen6_hw_codec_info;
1913 else if (IS_GEN7(i965->intel.device_id))
1914 i965->codec_info = &gen7_hw_codec_info;
1916 return VA_STATUS_ERROR_UNKNOWN;
1918 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1920 if (i965_post_processing_init(ctx) == False)
1921 return VA_STATUS_ERROR_UNKNOWN;
1923 if (i965_render_init(ctx) == False)
1924 return VA_STATUS_ERROR_UNKNOWN;
1926 _i965InitMutex(&i965->render_mutex);
1928 return VA_STATUS_SUCCESS;
1932 i965_destroy_heap(struct object_heap *heap,
1933 void (*func)(struct object_heap *heap, struct object_base *object))
1935 struct object_base *object;
1936 object_heap_iterator iter;
1938 object = object_heap_first(heap, &iter);
1944 object = object_heap_next(heap, &iter);
1947 object_heap_destroy(heap);
1952 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1955 i965_CreateImage(VADriverContextP ctx,
1956 VAImageFormat *format,
1959 VAImage *out_image) /* out */
1961 struct i965_driver_data *i965 = i965_driver_data(ctx);
1962 struct object_image *obj_image;
1963 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1965 unsigned int width2, height2, size2, size;
1967 out_image->image_id = VA_INVALID_ID;
1968 out_image->buf = VA_INVALID_ID;
1970 image_id = NEW_IMAGE_ID();
1971 if (image_id == VA_INVALID_ID)
1972 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1974 obj_image = IMAGE(image_id);
1976 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1977 obj_image->bo = NULL;
1978 obj_image->palette = NULL;
1979 obj_image->derived_surface = VA_INVALID_ID;
1981 VAImage * const image = &obj_image->image;
1982 image->image_id = image_id;
1983 image->buf = VA_INVALID_ID;
1985 size = width * height;
1986 width2 = (width + 1) / 2;
1987 height2 = (height + 1) / 2;
1988 size2 = width2 * height2;
1990 image->num_palette_entries = 0;
1991 image->entry_bytes = 0;
1992 memset(image->component_order, 0, sizeof(image->component_order));
1994 switch (format->fourcc) {
1995 case VA_FOURCC('I','A','4','4'):
1996 case VA_FOURCC('A','I','4','4'):
1997 image->num_planes = 1;
1998 image->pitches[0] = width;
1999 image->offsets[0] = 0;
2000 image->data_size = image->offsets[0] + image->pitches[0] * height;
2001 image->num_palette_entries = 16;
2002 image->entry_bytes = 3;
2003 image->component_order[0] = 'R';
2004 image->component_order[1] = 'G';
2005 image->component_order[2] = 'B';
2007 case VA_FOURCC('A','R','G','B'):
2008 case VA_FOURCC('A','B','G','R'):
2009 case VA_FOURCC('B','G','R','A'):
2010 case VA_FOURCC('R','G','B','A'):
2011 image->num_planes = 1;
2012 image->pitches[0] = width * 4;
2013 image->offsets[0] = 0;
2014 image->data_size = image->offsets[0] + image->pitches[0] * height;
2016 case VA_FOURCC('Y','V','1','2'):
2017 image->num_planes = 3;
2018 image->pitches[0] = width;
2019 image->offsets[0] = 0;
2020 image->pitches[1] = width2;
2021 image->offsets[1] = size + size2;
2022 image->pitches[2] = width2;
2023 image->offsets[2] = size;
2024 image->data_size = size + 2 * size2;
2026 case VA_FOURCC('I','4','2','0'):
2027 image->num_planes = 3;
2028 image->pitches[0] = width;
2029 image->offsets[0] = 0;
2030 image->pitches[1] = width2;
2031 image->offsets[1] = size;
2032 image->pitches[2] = width2;
2033 image->offsets[2] = size + size2;
2034 image->data_size = size + 2 * size2;
2036 case VA_FOURCC('N','V','1','2'):
2037 image->num_planes = 2;
2038 image->pitches[0] = width;
2039 image->offsets[0] = 0;
2040 image->pitches[1] = width;
2041 image->offsets[1] = size;
2042 image->data_size = size + 2 * size2;
2048 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2049 image->data_size, 1, NULL, &image->buf);
2050 if (va_status != VA_STATUS_SUCCESS)
2053 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2054 dri_bo_reference(obj_image->bo);
2056 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2057 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2058 if (!obj_image->palette)
2062 image->image_id = image_id;
2063 image->format = *format;
2064 image->width = width;
2065 image->height = height;
2067 *out_image = *image;
2068 return VA_STATUS_SUCCESS;
2071 i965_DestroyImage(ctx, image_id);
2076 i965_check_alloc_surface_bo(VADriverContextP ctx,
2077 struct object_surface *obj_surface,
2079 unsigned int fourcc)
2081 struct i965_driver_data *i965 = i965_driver_data(ctx);
2083 if (obj_surface->bo) {
2084 assert(obj_surface->fourcc);
2085 assert(obj_surface->fourcc == fourcc);
2089 if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2090 fourcc == VA_FOURCC('I', 'M', 'C', '3'))
2091 obj_surface->size = ALIGN(obj_surface->width * obj_surface->height * 2, 0x1000);
2093 obj_surface->size = ALIGN(obj_surface->width * obj_surface->height * 3 / 2, 0x1000);
2096 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2097 unsigned long pitch;
2098 unsigned long height;
2100 if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2101 fourcc == VA_FOURCC('I', 'M', 'C', '3'))
2102 height = ALIGN(obj_surface->height, 32) + ALIGN(obj_surface->height / 2, 32) * 2;
2104 height = ALIGN(obj_surface->height, 32) + ALIGN(obj_surface->height / 2, 32);
2106 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2114 assert(tiling_mode == I915_TILING_Y);
2115 assert(pitch == obj_surface->width);
2117 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2123 obj_surface->fourcc = fourcc;
2124 assert(obj_surface->bo);
2127 VAStatus i965_DeriveImage(VADriverContextP ctx,
2128 VASurfaceID surface,
2129 VAImage *out_image) /* out */
2131 struct i965_driver_data *i965 = i965_driver_data(ctx);
2132 struct i965_render_state *render_state = &i965->render_state;
2133 struct object_image *obj_image;
2134 struct object_surface *obj_surface;
2136 unsigned int w_pitch, h_pitch;
2137 unsigned int data_size;
2138 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2140 out_image->image_id = VA_INVALID_ID;
2141 obj_surface = SURFACE(surface);
2144 return VA_STATUS_ERROR_INVALID_SURFACE;
2146 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), VA_FOURCC('N', 'V', '1', '2'));
2148 w_pitch = obj_surface->width;
2149 h_pitch = obj_surface->height;
2150 data_size = obj_surface->orig_width * obj_surface->orig_height +
2151 2 * (((obj_surface->orig_width + 1) / 2) * ((obj_surface->orig_height + 1) / 2));
2153 image_id = NEW_IMAGE_ID();
2155 if (image_id == VA_INVALID_ID)
2156 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2158 obj_image = IMAGE(image_id);
2161 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2163 obj_image->bo = NULL;
2164 obj_image->palette = NULL;
2165 obj_image->derived_surface = VA_INVALID_ID;
2167 VAImage * const image = &obj_image->image;
2169 memset(image, 0, sizeof(*image));
2170 image->image_id = image_id;
2171 image->buf = VA_INVALID_ID;
2172 image->num_palette_entries = 0;
2173 image->entry_bytes = 0;
2174 image->width = obj_surface->orig_width;
2175 image->height = obj_surface->orig_height;
2176 image->data_size = data_size;
2178 image->format.fourcc = obj_surface->fourcc;
2179 image->format.byte_order = VA_LSB_FIRST;
2180 image->format.bits_per_pixel = 12;
2182 switch (image->format.fourcc) {
2183 case VA_FOURCC('Y', 'V', '1', '2'):
2184 image->num_planes = 3;
2185 image->pitches[0] = w_pitch; /* Y */
2186 image->offsets[0] = 0;
2187 image->pitches[1] = w_pitch / 2; /* V */
2188 image->offsets[1] = w_pitch * h_pitch;
2189 image->pitches[2] = w_pitch / 2; /* U */
2190 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2193 case VA_FOURCC('N', 'V', '1', '2'):
2194 image->num_planes = 2;
2195 image->pitches[0] = w_pitch; /* Y */
2196 image->offsets[0] = 0;
2197 image->pitches[1] = w_pitch; /* UV */
2198 image->offsets[1] = w_pitch * h_pitch;
2201 case VA_FOURCC('I', '4', '2', '0'):
2202 image->num_planes = 3;
2203 image->pitches[0] = w_pitch; /* Y */
2204 image->offsets[0] = 0;
2205 image->pitches[1] = w_pitch / 2; /* U */
2206 image->offsets[1] = w_pitch * h_pitch;
2207 image->pitches[2] = w_pitch / 2; /* V */
2208 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2215 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2216 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2217 if (va_status != VA_STATUS_SUCCESS)
2220 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2221 dri_bo_reference(obj_image->bo);
2223 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2224 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2225 if (!obj_image->palette) {
2226 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2231 *out_image = *image;
2232 obj_surface->flags |= SURFACE_DERIVED;
2233 obj_image->derived_surface = surface;
2235 return VA_STATUS_SUCCESS;
2238 i965_DestroyImage(ctx, image_id);
2243 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2245 object_heap_free(heap, obj);
2250 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2252 struct i965_driver_data *i965 = i965_driver_data(ctx);
2253 struct object_image *obj_image = IMAGE(image);
2254 struct object_surface *obj_surface;
2257 return VA_STATUS_SUCCESS;
2259 dri_bo_unreference(obj_image->bo);
2260 obj_image->bo = NULL;
2262 if (obj_image->image.buf != VA_INVALID_ID) {
2263 i965_DestroyBuffer(ctx, obj_image->image.buf);
2264 obj_image->image.buf = VA_INVALID_ID;
2267 if (obj_image->palette) {
2268 free(obj_image->palette);
2269 obj_image->palette = NULL;
2272 obj_surface = SURFACE(obj_image->derived_surface);
2275 obj_surface->flags &= ~SURFACE_DERIVED;
2278 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2280 return VA_STATUS_SUCCESS;
2284 * pointer to an array holding the palette data. The size of the array is
2285 * num_palette_entries * entry_bytes in size. The order of the components
2286 * in the palette is described by the component_order in VASubpicture struct
2289 i965_SetImagePalette(VADriverContextP ctx,
2291 unsigned char *palette)
2293 struct i965_driver_data *i965 = i965_driver_data(ctx);
2296 struct object_image *obj_image = IMAGE(image);
2298 return VA_STATUS_ERROR_INVALID_IMAGE;
2300 if (!obj_image->palette)
2301 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2303 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2304 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2305 ((unsigned int)palette[3*i + 1] << 8) |
2306 (unsigned int)palette[3*i + 2]);
2307 return VA_STATUS_SUCCESS;
2311 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2312 const uint8_t *src, unsigned int src_stride,
2313 unsigned int len, unsigned int height)
2317 for (i = 0; i < height; i++) {
2318 memcpy(dst, src, len);
2325 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2326 struct object_surface *obj_surface,
2327 const VARectangle *rect)
2329 uint8_t *dst[3], *src[3];
2331 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2332 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2333 unsigned int tiling, swizzle;
2335 if (!obj_surface->bo)
2338 assert(obj_surface->fourcc);
2339 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2341 if (tiling != I915_TILING_NONE)
2342 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2344 dri_bo_map(obj_surface->bo, 0);
2346 if (!obj_surface->bo->virtual)
2349 /* Dest VA image has either I420 or YV12 format.
2350 Source VA surface alway has I420 format */
2351 dst[Y] = image_data + obj_image->image.offsets[Y];
2352 src[0] = (uint8_t *)obj_surface->bo->virtual;
2353 dst[U] = image_data + obj_image->image.offsets[U];
2354 src[1] = src[0] + obj_surface->width * obj_surface->height;
2355 dst[V] = image_data + obj_image->image.offsets[V];
2356 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2359 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2360 src[0] += rect->y * obj_surface->width + rect->x;
2361 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2362 src[0], obj_surface->width,
2363 rect->width, rect->height);
2366 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2367 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2368 memcpy_pic(dst[U], obj_image->image.pitches[U],
2369 src[1], obj_surface->width / 2,
2370 rect->width / 2, rect->height / 2);
2373 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2374 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2375 memcpy_pic(dst[V], obj_image->image.pitches[V],
2376 src[2], obj_surface->width / 2,
2377 rect->width / 2, rect->height / 2);
2379 if (tiling != I915_TILING_NONE)
2380 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2382 dri_bo_unmap(obj_surface->bo);
2386 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2387 struct object_surface *obj_surface,
2388 const VARectangle *rect)
2390 uint8_t *dst[2], *src[2];
2391 unsigned int tiling, swizzle;
2393 if (!obj_surface->bo)
2396 assert(obj_surface->fourcc);
2397 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2399 if (tiling != I915_TILING_NONE)
2400 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2402 dri_bo_map(obj_surface->bo, 0);
2404 if (!obj_surface->bo->virtual)
2407 /* Both dest VA image and source surface have NV12 format */
2408 dst[0] = image_data + obj_image->image.offsets[0];
2409 src[0] = (uint8_t *)obj_surface->bo->virtual;
2410 dst[1] = image_data + obj_image->image.offsets[1];
2411 src[1] = src[0] + obj_surface->width * obj_surface->height;
2414 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2415 src[0] += rect->y * obj_surface->width + rect->x;
2416 memcpy_pic(dst[0], obj_image->image.pitches[0],
2417 src[0], obj_surface->width,
2418 rect->width, rect->height);
2421 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2422 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2423 memcpy_pic(dst[1], obj_image->image.pitches[1],
2424 src[1], obj_surface->width,
2425 rect->width, rect->height / 2);
2427 if (tiling != I915_TILING_NONE)
2428 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2430 dri_bo_unmap(obj_surface->bo);
2434 i965_GetImage(VADriverContextP ctx,
2435 VASurfaceID surface,
2436 int x, /* coordinates of the upper left source pixel */
2438 unsigned int width, /* width and height of the region */
2439 unsigned int height,
2442 struct i965_driver_data *i965 = i965_driver_data(ctx);
2443 struct i965_render_state *render_state = &i965->render_state;
2445 struct object_surface *obj_surface = SURFACE(surface);
2447 return VA_STATUS_ERROR_INVALID_SURFACE;
2449 struct object_image *obj_image = IMAGE(image);
2451 return VA_STATUS_ERROR_INVALID_IMAGE;
2454 return VA_STATUS_ERROR_INVALID_PARAMETER;
2455 if (x + width > obj_surface->orig_width ||
2456 y + height > obj_surface->orig_height)
2457 return VA_STATUS_ERROR_INVALID_PARAMETER;
2458 if (x + width > obj_image->image.width ||
2459 y + height > obj_image->image.height)
2460 return VA_STATUS_ERROR_INVALID_PARAMETER;
2463 void *image_data = NULL;
2465 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2466 if (va_status != VA_STATUS_SUCCESS)
2473 rect.height = height;
2475 switch (obj_image->image.format.fourcc) {
2476 case VA_FOURCC('Y','V','1','2'):
2477 case VA_FOURCC('I','4','2','0'):
2478 /* I420 is native format for MPEG-2 decoded surfaces */
2479 if (render_state->interleaved_uv)
2480 goto operation_failed;
2481 get_image_i420(obj_image, image_data, obj_surface, &rect);
2483 case VA_FOURCC('N','V','1','2'):
2484 /* NV12 is native format for H.264 decoded surfaces */
2485 if (!render_state->interleaved_uv)
2486 goto operation_failed;
2487 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2491 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2495 i965_UnmapBuffer(ctx, obj_image->image.buf);
2500 i965_PutSurface(VADriverContextP ctx,
2501 VASurfaceID surface,
2502 void *draw, /* X Drawable */
2505 unsigned short srcw,
2506 unsigned short srch,
2509 unsigned short destw,
2510 unsigned short desth,
2511 VARectangle *cliprects, /* client supplied clip list */
2512 unsigned int number_cliprects, /* number of clip rects in the clip list */
2513 unsigned int flags) /* de-interlacing flags */
2515 struct i965_driver_data *i965 = i965_driver_data(ctx);
2516 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
2517 struct i965_render_state *render_state = &i965->render_state;
2518 struct dri_drawable *dri_drawable;
2519 union dri_buffer *buffer;
2520 struct intel_region *dest_region;
2521 struct object_surface *obj_surface;
2522 VARectangle src_rect, dst_rect;
2525 Bool new_region = False;
2528 /* Currently don't support DRI1 */
2529 if (dri_state->driConnectedFlag != VA_DRI2)
2530 return VA_STATUS_ERROR_UNKNOWN;
2532 /* Some broken sources such as H.264 conformance case FM2_SVA_C
2535 obj_surface = SURFACE(surface);
2536 if (!obj_surface || !obj_surface->bo)
2537 return VA_STATUS_SUCCESS;
2539 _i965LockMutex(&i965->render_mutex);
2541 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
2542 assert(dri_drawable);
2544 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
2547 dest_region = render_state->draw_region;
2550 assert(dest_region->bo);
2551 dri_bo_flink(dest_region->bo, &name);
2553 if (buffer->dri2.name != name) {
2555 dri_bo_unreference(dest_region->bo);
2558 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
2559 assert(dest_region);
2560 render_state->draw_region = dest_region;
2565 dest_region->x = dri_drawable->x;
2566 dest_region->y = dri_drawable->y;
2567 dest_region->width = dri_drawable->width;
2568 dest_region->height = dri_drawable->height;
2569 dest_region->cpp = buffer->dri2.cpp;
2570 dest_region->pitch = buffer->dri2.pitch;
2572 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
2573 assert(dest_region->bo);
2575 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
2579 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
2580 pp_flag |= I965_PP_FLAG_AVS;
2582 if (flags & (VA_BOTTOM_FIELD | VA_TOP_FIELD))
2583 pp_flag |= I965_PP_FLAG_DEINTERLACING;
2587 src_rect.width = srcw;
2588 src_rect.height = srch;
2592 dst_rect.width = destw;
2593 dst_rect.height = desth;
2595 intel_render_put_surface(ctx, surface, &src_rect, &dst_rect, pp_flag);
2597 if(obj_surface->subpic != VA_INVALID_ID) {
2598 intel_render_put_subpicture(ctx, surface, &src_rect, &dst_rect);
2601 dri_swap_buffer(ctx, dri_drawable);
2602 obj_surface->flags |= SURFACE_DISPLAYED;
2604 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
2605 dri_bo_unreference(obj_surface->bo);
2606 obj_surface->bo = NULL;
2607 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
2609 if (obj_surface->free_private_data)
2610 obj_surface->free_private_data(&obj_surface->private_data);
2613 _i965UnlockMutex(&i965->render_mutex);
2615 return VA_STATUS_SUCCESS;
2619 i965_Terminate(VADriverContextP ctx)
2621 struct i965_driver_data *i965 = i965_driver_data(ctx);
2624 intel_batchbuffer_free(i965->batch);
2626 _i965DestroyMutex(&i965->render_mutex);
2628 if (i965_render_terminate(ctx) == False)
2629 return VA_STATUS_ERROR_UNKNOWN;
2631 if (i965_post_processing_terminate(ctx) == False)
2632 return VA_STATUS_ERROR_UNKNOWN;
2634 if (intel_driver_terminate(ctx) == False)
2635 return VA_STATUS_ERROR_UNKNOWN;
2637 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2638 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2639 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2640 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2641 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2642 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2644 free(ctx->pDriverData);
2645 ctx->pDriverData = NULL;
2647 return VA_STATUS_SUCCESS;
2652 VADriverContextP ctx, /* in */
2653 VABufferID buf_id, /* in */
2654 VABufferType *type, /* out */
2655 unsigned int *size, /* out */
2656 unsigned int *num_elements /* out */
2659 struct i965_driver_data *i965 = NULL;
2660 struct object_buffer *obj_buffer = NULL;
2662 i965 = i965_driver_data(ctx);
2663 obj_buffer = BUFFER(buf_id);
2665 *type = obj_buffer->type;
2666 *size = obj_buffer->size_element;
2667 *num_elements = obj_buffer->num_elements;
2669 return VA_STATUS_SUCCESS;
2674 VADriverContextP ctx, /* in */
2675 VASurfaceID surface, /* in */
2676 unsigned int *fourcc, /* out */
2677 unsigned int *luma_stride, /* out */
2678 unsigned int *chroma_u_stride, /* out */
2679 unsigned int *chroma_v_stride, /* out */
2680 unsigned int *luma_offset, /* out */
2681 unsigned int *chroma_u_offset, /* out */
2682 unsigned int *chroma_v_offset, /* out */
2683 unsigned int *buffer_name, /* out */
2684 void **buffer /* out */
2687 VAStatus vaStatus = VA_STATUS_SUCCESS;
2688 struct i965_driver_data *i965 = i965_driver_data(ctx);
2689 struct object_surface *obj_surface = NULL;
2693 assert(luma_stride);
2694 assert(chroma_u_stride);
2695 assert(chroma_v_stride);
2696 assert(luma_offset);
2697 assert(chroma_u_offset);
2698 assert(chroma_v_offset);
2699 assert(buffer_name);
2702 tmpImage.image_id = VA_INVALID_ID;
2704 obj_surface = SURFACE(surface);
2705 if (obj_surface == NULL) {
2706 // Surface is absent.
2707 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2711 // Lock functionality is absent now.
2712 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2713 // Surface is locked already.
2714 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2718 vaStatus = i965_DeriveImage(
2722 if (vaStatus != VA_STATUS_SUCCESS) {
2726 obj_surface->locked_image_id = tmpImage.image_id;
2728 vaStatus = i965_MapBuffer(
2732 if (vaStatus != VA_STATUS_SUCCESS) {
2736 *fourcc = tmpImage.format.fourcc;
2737 *luma_offset = tmpImage.offsets[0];
2738 *luma_stride = tmpImage.pitches[0];
2739 *chroma_u_offset = tmpImage.offsets[1];
2740 *chroma_u_stride = tmpImage.pitches[1];
2741 *chroma_v_offset = tmpImage.offsets[2];
2742 *chroma_v_stride = tmpImage.pitches[2];
2743 *buffer_name = tmpImage.buf;
2746 if (vaStatus != VA_STATUS_SUCCESS) {
2755 VADriverContextP ctx, /* in */
2756 VASurfaceID surface /* in */
2759 VAStatus vaStatus = VA_STATUS_SUCCESS;
2760 struct i965_driver_data *i965 = i965_driver_data(ctx);
2761 struct object_image *locked_img = NULL;
2762 struct object_surface *obj_surface = NULL;
2764 obj_surface = SURFACE(surface);
2766 if (obj_surface == NULL) {
2767 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2770 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2771 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2775 locked_img = IMAGE(obj_surface->locked_image_id);
2776 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2777 // Work image was deallocated before i965_UnlockSurface()
2778 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2782 vaStatus = i965_UnmapBuffer(
2784 locked_img->image.buf);
2785 if (vaStatus != VA_STATUS_SUCCESS) {
2789 vaStatus = i965_DestroyImage(
2791 locked_img->image.image_id);
2792 if (vaStatus != VA_STATUS_SUCCESS) {
2796 locked_img->image.image_id = VA_INVALID_ID;
2803 * Query video processing pipeline
2805 VAStatus i965_QueryVideoProcPipelineCap(
2806 VADriverContextP ctx,
2807 VAContextID context,
2808 VAProcPipelineCap *pipeline_cap /* out */
2811 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2814 if (HAS_VPP(i965)) {
2815 pipeline_cap->filter_pipeline[i] = VAProcFilterNoiseReduction;
2816 pipeline_cap->bypass[i++] = 1;
2817 pipeline_cap->filter_pipeline[i] = VAProcFilterDeinterlacing;
2818 pipeline_cap->bypass[i++] = 1;
2821 for (; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++) {
2822 pipeline_cap->filter_pipeline[i] = VAProcFilterNone;
2823 pipeline_cap->bypass[i] = 1;
2826 return VA_STATUS_SUCCESS;
2829 VAStatus i965_QueryVideoProcFilterCap(
2830 VADriverContextP ctx,
2831 VAContextID context,
2832 VAProcFilterType filter,
2836 return VA_STATUS_SUCCESS;
2839 VAStatus i965_QueryVideoProcReferenceFramesCap(
2840 VADriverContextP ctx,
2841 VAContextID context,
2842 unsigned int *num_forward_reference, /* out */
2843 unsigned int *num_backward_reference /* out */
2847 *num_forward_reference = 0;
2848 *num_backward_reference = 0;
2850 return VA_STATUS_SUCCESS;
2854 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
2857 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2859 struct VADriverVTable * const vtable = ctx->vtable;
2860 struct i965_driver_data *i965;
2863 ctx->version_major = VA_MAJOR_VERSION;
2864 ctx->version_minor = VA_MINOR_VERSION;
2865 ctx->max_profiles = I965_MAX_PROFILES;
2866 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2867 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2868 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2869 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2870 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
2872 vtable->vaTerminate = i965_Terminate;
2873 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2874 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2875 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2876 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2877 vtable->vaCreateConfig = i965_CreateConfig;
2878 vtable->vaDestroyConfig = i965_DestroyConfig;
2879 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2880 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2881 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2882 vtable->vaCreateContext = i965_CreateContext;
2883 vtable->vaDestroyContext = i965_DestroyContext;
2884 vtable->vaCreateBuffer = i965_CreateBuffer;
2885 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2886 vtable->vaMapBuffer = i965_MapBuffer;
2887 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2888 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2889 vtable->vaBeginPicture = i965_BeginPicture;
2890 vtable->vaRenderPicture = i965_RenderPicture;
2891 vtable->vaEndPicture = i965_EndPicture;
2892 vtable->vaSyncSurface = i965_SyncSurface;
2893 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2894 vtable->vaPutSurface = i965_PutSurface;
2895 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2896 vtable->vaCreateImage = i965_CreateImage;
2897 vtable->vaDeriveImage = i965_DeriveImage;
2898 vtable->vaDestroyImage = i965_DestroyImage;
2899 vtable->vaSetImagePalette = i965_SetImagePalette;
2900 vtable->vaGetImage = i965_GetImage;
2901 vtable->vaPutImage = i965_PutImage;
2902 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2903 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2904 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2905 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2906 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2907 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2908 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2909 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2910 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2911 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2912 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2913 vtable->vaBufferInfo = i965_BufferInfo;
2914 vtable->vaLockSurface = i965_LockSurface;
2915 vtable->vaUnlockSurface = i965_UnlockSurface;
2916 vtable->vaQueryVideoProcPipelineCap = i965_QueryVideoProcPipelineCap;
2917 vtable->vaQueryVideoProcFilterCap = i965_QueryVideoProcFilterCap;
2918 vtable->vaQueryVideoProcReferenceFramesCap = i965_QueryVideoProcReferenceFramesCap;
2920 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2922 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2924 ctx->pDriverData = (void *)i965;
2926 result = object_heap_init(&i965->config_heap,
2927 sizeof(struct object_config),
2929 assert(result == 0);
2931 result = object_heap_init(&i965->context_heap,
2932 sizeof(struct object_context),
2934 assert(result == 0);
2936 result = object_heap_init(&i965->surface_heap,
2937 sizeof(struct object_surface),
2939 assert(result == 0);
2941 result = object_heap_init(&i965->buffer_heap,
2942 sizeof(struct object_buffer),
2944 assert(result == 0);
2946 result = object_heap_init(&i965->image_heap,
2947 sizeof(struct object_image),
2949 assert(result == 0);
2951 result = object_heap_init(&i965->subpic_heap,
2952 sizeof(struct object_subpic),
2954 assert(result == 0);
2956 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
2957 INTEL_STR_DRIVER_VENDOR,
2958 INTEL_STR_DRIVER_NAME,
2959 INTEL_DRIVER_MAJOR_VERSION,
2960 INTEL_DRIVER_MINOR_VERSION,
2961 INTEL_DRIVER_MICRO_VERSION);
2963 if (INTEL_DRIVER_PRE_VERSION > 0) {
2964 const int len = strlen(i965->va_vendor);
2965 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
2967 ctx->str_vendor = i965->va_vendor;
2969 return i965_Init(ctx);