2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
33 # include "i965_output_dri.h"
36 #ifdef HAVE_VA_WAYLAND
37 # include "i965_output_wayland.h"
40 #include "intel_driver.h"
41 #include "intel_memman.h"
42 #include "intel_batchbuffer.h"
43 #include "i965_defines.h"
44 #include "i965_drv_video.h"
45 #include "i965_decoder.h"
46 #include "i965_encoder.h"
48 #define CONFIG_ID_OFFSET 0x01000000
49 #define CONTEXT_ID_OFFSET 0x02000000
50 #define SURFACE_ID_OFFSET 0x04000000
51 #define BUFFER_ID_OFFSET 0x08000000
52 #define IMAGE_ID_OFFSET 0x0a000000
53 #define SUBPIC_ID_OFFSET 0x10000000
55 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
56 IS_IRONLAKE((ctx)->intel.device_id) || \
57 ((IS_GEN6((ctx)->intel.device_id) || \
58 IS_GEN7((ctx)->intel.device_id)) && \
59 (ctx)->intel.has_bsd))
61 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
62 IS_GEN6((ctx)->intel.device_id) || \
63 IS_IRONLAKE((ctx)->intel.device_id)) && \
66 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
67 IS_GEN6((ctx)->intel.device_id)) && \
70 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
71 IS_GEN6((ctx)->intel.device_id)) && \
72 (ctx)->render_state.interleaved_uv)
74 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
75 IS_GEN6((ctx)->intel.device_id)) && \
78 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
81 #define HAS_ACCELERATED_GETIMAGE(ctx) (IS_GEN6((ctx)->intel.device_id) || \
82 IS_GEN7((ctx)->intel.device_id))
84 #define HAS_ACCELERATED_PUTIMAGE(ctx) HAS_VPP(ctx)
86 #if VA_CHECK_VERSION(0,33,0)
87 /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
88 #define IS_VA_X11(ctx) \
89 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
91 /* Check whether we are rendering to Wayland */
92 #define IS_VA_WAYLAND(ctx) \
93 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
95 /* Previous VA-API versions only supported VA/X11 (and VA/GLX) API */
96 #define IS_VA_X11(ctx) 1
97 #define IS_VA_WAYLAND(ctx) 0
101 I965_SURFACETYPE_RGBA = 1,
102 I965_SURFACETYPE_YUV,
103 I965_SURFACETYPE_INDEXED
106 /* List of supported display attributes */
107 static const VADisplayAttribute i965_display_attributes[] = {
109 VADisplayAttribRotation,
110 0, 3, VA_ROTATION_NONE,
111 VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
115 /* List of supported image formats */
118 VAImageFormat va_format;
119 } i965_image_format_map_t;
121 static const i965_image_format_map_t
122 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
123 { I965_SURFACETYPE_YUV,
124 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
125 { I965_SURFACETYPE_YUV,
126 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
127 { I965_SURFACETYPE_YUV,
128 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
131 /* List of supported subpicture formats */
135 VAImageFormat va_format;
136 unsigned int va_flags;
137 } i965_subpic_format_map_t;
139 #define COMMON_SUBPICTURE_FLAGS \
140 (VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD| \
141 VA_SUBPICTURE_GLOBAL_ALPHA)
143 static const i965_subpic_format_map_t
144 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
145 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
146 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
147 COMMON_SUBPICTURE_FLAGS },
148 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
149 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
150 COMMON_SUBPICTURE_FLAGS },
151 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P8A8_UNORM,
152 { VA_FOURCC('I','A','8','8'), VA_MSB_FIRST, 16, },
153 COMMON_SUBPICTURE_FLAGS },
154 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A8P8_UNORM,
155 { VA_FOURCC('A','I','8','8'), VA_MSB_FIRST, 16, },
156 COMMON_SUBPICTURE_FLAGS },
157 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
158 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
159 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
160 COMMON_SUBPICTURE_FLAGS },
161 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
162 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
163 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
164 COMMON_SUBPICTURE_FLAGS },
167 static const i965_subpic_format_map_t *
168 get_subpic_format(const VAImageFormat *va_format)
171 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
172 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
173 if (m->va_format.fourcc == va_format->fourcc &&
174 (m->type == I965_SURFACETYPE_RGBA ?
175 (m->va_format.byte_order == va_format->byte_order &&
176 m->va_format.red_mask == va_format->red_mask &&
177 m->va_format.green_mask == va_format->green_mask &&
178 m->va_format.blue_mask == va_format->blue_mask &&
179 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
185 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
186 static struct hw_codec_info g4x_hw_codec_info = {
187 .dec_hw_context_init = g4x_dec_hw_context_init,
188 .enc_hw_context_init = NULL,
193 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
194 static struct hw_codec_info ironlake_hw_codec_info = {
195 .dec_hw_context_init = ironlake_dec_hw_context_init,
196 .enc_hw_context_init = NULL,
201 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
202 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
203 static struct hw_codec_info gen6_hw_codec_info = {
204 .dec_hw_context_init = gen6_dec_hw_context_init,
205 .enc_hw_context_init = gen6_enc_hw_context_init,
210 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
211 static struct hw_codec_info gen7_hw_codec_info = {
212 .dec_hw_context_init = gen7_dec_hw_context_init,
213 .enc_hw_context_init = gen6_enc_hw_context_init,
218 static struct hw_codec_info gen75_hw_codec_info = {
219 .dec_hw_context_init = gen75_dec_hw_context_init,
220 .enc_hw_context_init = gen75_enc_hw_context_init,
226 i965_QueryConfigProfiles(VADriverContextP ctx,
227 VAProfile *profile_list, /* out */
228 int *num_profiles) /* out */
230 struct i965_driver_data * const i965 = i965_driver_data(ctx);
233 if (HAS_MPEG2(i965)) {
234 profile_list[i++] = VAProfileMPEG2Simple;
235 profile_list[i++] = VAProfileMPEG2Main;
238 if (HAS_H264(i965)) {
239 profile_list[i++] = VAProfileH264Baseline;
240 profile_list[i++] = VAProfileH264Main;
241 profile_list[i++] = VAProfileH264High;
245 profile_list[i++] = VAProfileVC1Simple;
246 profile_list[i++] = VAProfileVC1Main;
247 profile_list[i++] = VAProfileVC1Advanced;
250 #ifdef HAVE_VA_JPEG_DECODE
251 if (HAS_JPEG(i965)) {
252 profile_list[i++] = VAProfileJPEGBaseline;
256 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
257 assert(i <= I965_MAX_PROFILES);
260 return VA_STATUS_SUCCESS;
264 i965_QueryConfigEntrypoints(VADriverContextP ctx,
266 VAEntrypoint *entrypoint_list, /* out */
267 int *num_entrypoints) /* out */
269 struct i965_driver_data * const i965 = i965_driver_data(ctx);
273 case VAProfileMPEG2Simple:
274 case VAProfileMPEG2Main:
276 entrypoint_list[n++] = VAEntrypointVLD;
279 case VAProfileH264Baseline:
280 case VAProfileH264Main:
281 case VAProfileH264High:
283 entrypoint_list[n++] = VAEntrypointVLD;
285 if (HAS_ENCODER(i965))
286 entrypoint_list[n++] = VAEntrypointEncSlice;
290 case VAProfileVC1Simple:
291 case VAProfileVC1Main:
292 case VAProfileVC1Advanced:
294 entrypoint_list[n++] = VAEntrypointVLD;
297 case VAProfileJPEGBaseline:
299 entrypoint_list[n++] = VAEntrypointVLD;
306 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
307 assert(n <= I965_MAX_ENTRYPOINTS);
308 *num_entrypoints = n;
309 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
313 i965_GetConfigAttributes(VADriverContextP ctx,
315 VAEntrypoint entrypoint,
316 VAConfigAttrib *attrib_list, /* in/out */
321 /* Other attributes don't seem to be defined */
322 /* What to do if we don't know the attribute? */
323 for (i = 0; i < num_attribs; i++) {
324 switch (attrib_list[i].type) {
325 case VAConfigAttribRTFormat:
326 attrib_list[i].value = VA_RT_FORMAT_YUV420;
329 case VAConfigAttribRateControl:
330 attrib_list[i].value = VA_RC_VBR;
335 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
340 return VA_STATUS_SUCCESS;
344 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
346 object_heap_free(heap, obj);
350 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
354 /* Check existing attrbiutes */
355 for (i = 0; obj_config->num_attribs < i; i++) {
356 if (obj_config->attrib_list[i].type == attrib->type) {
357 /* Update existing attribute */
358 obj_config->attrib_list[i].value = attrib->value;
359 return VA_STATUS_SUCCESS;
363 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
364 i = obj_config->num_attribs;
365 obj_config->attrib_list[i].type = attrib->type;
366 obj_config->attrib_list[i].value = attrib->value;
367 obj_config->num_attribs++;
368 return VA_STATUS_SUCCESS;
371 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
375 i965_CreateConfig(VADriverContextP ctx,
377 VAEntrypoint entrypoint,
378 VAConfigAttrib *attrib_list,
380 VAConfigID *config_id) /* out */
382 struct i965_driver_data * const i965 = i965_driver_data(ctx);
383 struct object_config *obj_config;
388 /* Validate profile & entrypoint */
390 case VAProfileMPEG2Simple:
391 case VAProfileMPEG2Main:
392 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
393 vaStatus = VA_STATUS_SUCCESS;
395 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
399 case VAProfileH264Baseline:
400 case VAProfileH264Main:
401 case VAProfileH264High:
402 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
403 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
404 vaStatus = VA_STATUS_SUCCESS;
406 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
411 case VAProfileVC1Simple:
412 case VAProfileVC1Main:
413 case VAProfileVC1Advanced:
414 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
415 vaStatus = VA_STATUS_SUCCESS;
417 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
422 case VAProfileJPEGBaseline:
423 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
424 vaStatus = VA_STATUS_SUCCESS;
426 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
432 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
436 if (VA_STATUS_SUCCESS != vaStatus) {
440 configID = NEW_CONFIG_ID();
441 obj_config = CONFIG(configID);
443 if (NULL == obj_config) {
444 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
448 obj_config->profile = profile;
449 obj_config->entrypoint = entrypoint;
450 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
451 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
452 obj_config->num_attribs = 1;
454 for(i = 0; i < num_attribs; i++) {
455 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
457 if (VA_STATUS_SUCCESS != vaStatus) {
463 if (VA_STATUS_SUCCESS != vaStatus) {
464 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
466 *config_id = configID;
473 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
475 struct i965_driver_data *i965 = i965_driver_data(ctx);
476 struct object_config *obj_config = CONFIG(config_id);
479 if (NULL == obj_config) {
480 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
484 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
485 return VA_STATUS_SUCCESS;
488 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
489 VAConfigID config_id,
490 VAProfile *profile, /* out */
491 VAEntrypoint *entrypoint, /* out */
492 VAConfigAttrib *attrib_list, /* out */
493 int *num_attribs) /* out */
495 struct i965_driver_data *i965 = i965_driver_data(ctx);
496 struct object_config *obj_config = CONFIG(config_id);
497 VAStatus vaStatus = VA_STATUS_SUCCESS;
501 *profile = obj_config->profile;
502 *entrypoint = obj_config->entrypoint;
503 *num_attribs = obj_config->num_attribs;
505 for(i = 0; i < obj_config->num_attribs; i++) {
506 attrib_list[i] = obj_config->attrib_list[i];
513 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
515 struct object_surface *obj_surface = (struct object_surface *)obj;
517 dri_bo_unreference(obj_surface->bo);
518 obj_surface->bo = NULL;
520 if (obj_surface->free_private_data != NULL) {
521 obj_surface->free_private_data(&obj_surface->private_data);
522 obj_surface->private_data = NULL;
525 object_heap_free(heap, obj);
529 i965_CreateSurfaces(VADriverContextP ctx,
534 VASurfaceID *surfaces) /* out */
536 struct i965_driver_data *i965 = i965_driver_data(ctx);
538 VAStatus vaStatus = VA_STATUS_SUCCESS;
540 /* We only support one format */
541 if (VA_RT_FORMAT_YUV420 != format) {
542 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
545 for (i = 0; i < num_surfaces; i++) {
546 int surfaceID = NEW_SURFACE_ID();
547 struct object_surface *obj_surface = SURFACE(surfaceID);
549 if (NULL == obj_surface) {
550 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
554 surfaces[i] = surfaceID;
555 obj_surface->status = VASurfaceReady;
556 obj_surface->orig_width = width;
557 obj_surface->orig_height = height;
559 if (IS_G4X(i965->intel.device_id) || IS_IRONLAKE(i965->intel.device_id)) {
560 obj_surface->width = ALIGN(width, 16);
561 obj_surface->height = ALIGN(height, 16);
563 obj_surface->width = ALIGN(width, 128);
564 obj_surface->height = ALIGN(height, 32);
567 obj_surface->subpic_render_idx = 0;
568 for(j = 0; j < I965_MAX_SUBPIC_SUM; j++){
569 obj_surface->subpic[j] = VA_INVALID_ID;
572 obj_surface->flags = SURFACE_REFERENCED;
573 obj_surface->fourcc = 0;
574 obj_surface->bo = NULL;
575 obj_surface->locked_image_id = VA_INVALID_ID;
576 obj_surface->private_data = NULL;
577 obj_surface->free_private_data = NULL;
578 obj_surface->subsampling = SUBSAMPLE_YUV420;
582 if (VA_STATUS_SUCCESS != vaStatus) {
583 /* surfaces[i-1] was the last successful allocation */
585 struct object_surface *obj_surface = SURFACE(surfaces[i]);
587 surfaces[i] = VA_INVALID_SURFACE;
589 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
597 i965_DestroySurfaces(VADriverContextP ctx,
598 VASurfaceID *surface_list,
601 struct i965_driver_data *i965 = i965_driver_data(ctx);
604 for (i = num_surfaces; i--; ) {
605 struct object_surface *obj_surface = SURFACE(surface_list[i]);
608 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
611 return VA_STATUS_SUCCESS;
615 i965_QueryImageFormats(VADriverContextP ctx,
616 VAImageFormat *format_list, /* out */
617 int *num_formats) /* out */
621 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
622 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
624 format_list[n] = m->va_format;
630 return VA_STATUS_SUCCESS;
634 i965_PutImage(VADriverContextP ctx,
639 unsigned int src_width,
640 unsigned int src_height,
643 unsigned int dest_width,
644 unsigned int dest_height)
646 return VA_STATUS_SUCCESS;
650 i965_QuerySubpictureFormats(VADriverContextP ctx,
651 VAImageFormat *format_list, /* out */
652 unsigned int *flags, /* out */
653 unsigned int *num_formats) /* out */
657 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
658 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
660 format_list[n] = m->va_format;
662 flags[n] = m->va_flags;
668 return VA_STATUS_SUCCESS;
672 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
674 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
676 object_heap_free(heap, obj);
680 i965_CreateSubpicture(VADriverContextP ctx,
682 VASubpictureID *subpicture) /* out */
684 struct i965_driver_data *i965 = i965_driver_data(ctx);
685 VASubpictureID subpicID = NEW_SUBPIC_ID()
686 struct object_subpic *obj_subpic = SUBPIC(subpicID);
689 return VA_STATUS_ERROR_ALLOCATION_FAILED;
691 struct object_image *obj_image = IMAGE(image);
693 return VA_STATUS_ERROR_INVALID_IMAGE;
695 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
697 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
699 *subpicture = subpicID;
700 obj_subpic->image = image;
701 obj_subpic->format = m->format;
702 obj_subpic->width = obj_image->image.width;
703 obj_subpic->height = obj_image->image.height;
704 obj_subpic->pitch = obj_image->image.pitches[0];
705 obj_subpic->bo = obj_image->bo;
706 obj_subpic->global_alpha = 1.0;
708 return VA_STATUS_SUCCESS;
712 i965_DestroySubpicture(VADriverContextP ctx,
713 VASubpictureID subpicture)
715 struct i965_driver_data *i965 = i965_driver_data(ctx);
716 struct object_subpic *obj_subpic = SUBPIC(subpicture);
717 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
718 return VA_STATUS_SUCCESS;
722 i965_SetSubpictureImage(VADriverContextP ctx,
723 VASubpictureID subpicture,
727 return VA_STATUS_ERROR_UNIMPLEMENTED;
731 i965_SetSubpictureChromakey(VADriverContextP ctx,
732 VASubpictureID subpicture,
733 unsigned int chromakey_min,
734 unsigned int chromakey_max,
735 unsigned int chromakey_mask)
738 return VA_STATUS_ERROR_UNIMPLEMENTED;
742 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
743 VASubpictureID subpicture,
746 struct i965_driver_data *i965 = i965_driver_data(ctx);
747 struct object_subpic *obj_subpic = SUBPIC(subpicture);
749 if(global_alpha > 1.0 || global_alpha < 0.0){
750 return VA_STATUS_ERROR_INVALID_PARAMETER;
752 obj_subpic->global_alpha = global_alpha;
754 return VA_STATUS_SUCCESS;
758 i965_AssociateSubpicture(VADriverContextP ctx,
759 VASubpictureID subpicture,
760 VASurfaceID *target_surfaces,
762 short src_x, /* upper left offset in subpicture */
764 unsigned short src_width,
765 unsigned short src_height,
766 short dest_x, /* upper left offset in surface */
768 unsigned short dest_width,
769 unsigned short dest_height,
771 * whether to enable chroma-keying or global-alpha
772 * see VA_SUBPICTURE_XXX values
776 struct i965_driver_data *i965 = i965_driver_data(ctx);
777 struct object_subpic *obj_subpic = SUBPIC(subpicture);
780 obj_subpic->src_rect.x = src_x;
781 obj_subpic->src_rect.y = src_y;
782 obj_subpic->src_rect.width = src_width;
783 obj_subpic->src_rect.height = src_height;
784 obj_subpic->dst_rect.x = dest_x;
785 obj_subpic->dst_rect.y = dest_y;
786 obj_subpic->dst_rect.width = dest_width;
787 obj_subpic->dst_rect.height = dest_height;
788 obj_subpic->flags = flags;
790 for (i = 0; i < num_surfaces; i++) {
791 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
793 return VA_STATUS_ERROR_INVALID_SURFACE;
795 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
796 if(obj_surface->subpic[j] == VA_INVALID_ID){
797 obj_surface->subpic[j] = subpicture;
802 if(j == I965_MAX_SUBPIC_SUM){
803 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
807 return VA_STATUS_SUCCESS;
812 i965_DeassociateSubpicture(VADriverContextP ctx,
813 VASubpictureID subpicture,
814 VASurfaceID *target_surfaces,
817 struct i965_driver_data *i965 = i965_driver_data(ctx);
820 for (i = 0; i < num_surfaces; i++) {
821 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
823 return VA_STATUS_ERROR_INVALID_SURFACE;
825 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
826 if(obj_surface->subpic[j] == subpicture){
827 obj_surface->subpic[j] = VA_INVALID_ID;
832 if(j == I965_MAX_SUBPIC_SUM){
833 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
836 return VA_STATUS_SUCCESS;
840 i965_reference_buffer_store(struct buffer_store **ptr,
841 struct buffer_store *buffer_store)
843 assert(*ptr == NULL);
846 buffer_store->ref_count++;
852 i965_release_buffer_store(struct buffer_store **ptr)
854 struct buffer_store *buffer_store = *ptr;
856 if (buffer_store == NULL)
859 assert(buffer_store->bo || buffer_store->buffer);
860 assert(!(buffer_store->bo && buffer_store->buffer));
861 buffer_store->ref_count--;
863 if (buffer_store->ref_count == 0) {
864 dri_bo_unreference(buffer_store->bo);
865 free(buffer_store->buffer);
866 buffer_store->bo = NULL;
867 buffer_store->buffer = NULL;
875 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
877 struct object_context *obj_context = (struct object_context *)obj;
880 if (obj_context->hw_context) {
881 obj_context->hw_context->destroy(obj_context->hw_context);
882 obj_context->hw_context = NULL;
885 if (obj_context->codec_type == CODEC_ENC) {
886 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
887 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
888 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
890 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
891 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
893 free(obj_context->codec_state.encode.slice_params);
895 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
896 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
898 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
899 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
900 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
902 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
903 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
905 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
906 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
908 free(obj_context->codec_state.decode.slice_params);
909 free(obj_context->codec_state.decode.slice_datas);
912 free(obj_context->render_targets);
913 object_heap_free(heap, obj);
917 i965_CreateContext(VADriverContextP ctx,
918 VAConfigID config_id,
922 VASurfaceID *render_targets,
923 int num_render_targets,
924 VAContextID *context) /* out */
926 struct i965_driver_data *i965 = i965_driver_data(ctx);
927 struct i965_render_state *render_state = &i965->render_state;
928 struct object_config *obj_config = CONFIG(config_id);
929 struct object_context *obj_context = NULL;
930 VAStatus vaStatus = VA_STATUS_SUCCESS;
934 if (NULL == obj_config) {
935 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
939 if (picture_width > i965->codec_info->max_width ||
940 picture_height > i965->codec_info->max_height) {
941 vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
946 /* Validate picture dimensions */
947 contextID = NEW_CONTEXT_ID();
948 obj_context = CONTEXT(contextID);
950 if (NULL == obj_context) {
951 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
955 render_state->inited = 1;
957 switch (obj_config->profile) {
958 case VAProfileH264Baseline:
959 case VAProfileH264Main:
960 case VAProfileH264High:
962 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
963 render_state->interleaved_uv = 1;
966 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
970 *context = contextID;
971 obj_context->flags = flag;
972 obj_context->context_id = contextID;
973 obj_context->config_id = config_id;
974 obj_context->picture_width = picture_width;
975 obj_context->picture_height = picture_height;
976 obj_context->num_render_targets = num_render_targets;
977 obj_context->render_targets =
978 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
979 obj_context->hw_context = NULL;
981 for(i = 0; i < num_render_targets; i++) {
982 if (NULL == SURFACE(render_targets[i])) {
983 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
987 obj_context->render_targets[i] = render_targets[i];
990 if (VA_STATUS_SUCCESS == vaStatus) {
991 if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
992 obj_context->codec_type = CODEC_ENC;
993 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
994 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
995 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
996 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
997 sizeof(*obj_context->codec_state.encode.slice_params));
998 assert(i965->codec_info->enc_hw_context_init);
999 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
1001 obj_context->codec_type = CODEC_DEC;
1002 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1003 obj_context->codec_state.decode.current_render_target = -1;
1004 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1005 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1006 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1007 sizeof(*obj_context->codec_state.decode.slice_params));
1008 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1009 sizeof(*obj_context->codec_state.decode.slice_datas));
1011 assert(i965->codec_info->dec_hw_context_init);
1012 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
1016 /* Error recovery */
1017 if (VA_STATUS_SUCCESS != vaStatus) {
1018 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1025 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1027 struct i965_driver_data *i965 = i965_driver_data(ctx);
1028 struct object_context *obj_context = CONTEXT(context);
1030 assert(obj_context);
1031 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1033 return VA_STATUS_SUCCESS;
1037 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1039 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1041 assert(obj_buffer->buffer_store);
1042 i965_release_buffer_store(&obj_buffer->buffer_store);
1043 object_heap_free(heap, obj);
1047 i965_create_buffer_internal(VADriverContextP ctx,
1048 VAContextID context,
1051 unsigned int num_elements,
1056 struct i965_driver_data *i965 = i965_driver_data(ctx);
1057 struct object_buffer *obj_buffer = NULL;
1058 struct buffer_store *buffer_store = NULL;
1063 case VAPictureParameterBufferType:
1064 case VAIQMatrixBufferType:
1065 case VABitPlaneBufferType:
1066 case VASliceGroupMapBufferType:
1067 case VASliceParameterBufferType:
1068 case VASliceDataBufferType:
1069 case VAMacroblockParameterBufferType:
1070 case VAResidualDataBufferType:
1071 case VADeblockingParameterBufferType:
1072 case VAImageBufferType:
1073 case VAEncCodedBufferType:
1074 case VAEncSequenceParameterBufferType:
1075 case VAEncPictureParameterBufferType:
1076 case VAEncSliceParameterBufferType:
1077 #ifdef HAVE_VA_JPEG_DECODE
1078 case VAHuffmanTableBufferType:
1084 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1087 bufferID = NEW_BUFFER_ID();
1088 obj_buffer = BUFFER(bufferID);
1090 if (NULL == obj_buffer) {
1091 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1094 if (type == VAEncCodedBufferType) {
1095 size += ALIGN(sizeof(VACodedBufferSegment), 64);
1098 obj_buffer->max_num_elements = num_elements;
1099 obj_buffer->num_elements = num_elements;
1100 obj_buffer->size_element = size;
1101 obj_buffer->type = type;
1102 obj_buffer->buffer_store = NULL;
1103 buffer_store = calloc(1, sizeof(struct buffer_store));
1104 assert(buffer_store);
1105 buffer_store->ref_count = 1;
1107 if (store_bo != NULL) {
1108 buffer_store->bo = store_bo;
1109 dri_bo_reference(buffer_store->bo);
1112 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1113 } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
1114 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1116 size * num_elements, 64);
1117 assert(buffer_store->bo);
1119 if (type == VAEncCodedBufferType) {
1120 VACodedBufferSegment *coded_buffer_segment;
1121 dri_bo_map(buffer_store->bo, 1);
1122 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1123 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
1124 coded_buffer_segment->bit_offset = 0;
1125 coded_buffer_segment->status = 0;
1126 coded_buffer_segment->buf = NULL;
1127 coded_buffer_segment->next = NULL;
1128 dri_bo_unmap(buffer_store->bo);
1130 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1134 buffer_store->buffer = malloc(size * num_elements);
1135 assert(buffer_store->buffer);
1138 memcpy(buffer_store->buffer, data, size * num_elements);
1141 buffer_store->num_elements = obj_buffer->num_elements;
1142 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1143 i965_release_buffer_store(&buffer_store);
1146 return VA_STATUS_SUCCESS;
1150 i965_CreateBuffer(VADriverContextP ctx,
1151 VAContextID context, /* in */
1152 VABufferType type, /* in */
1153 unsigned int size, /* in */
1154 unsigned int num_elements, /* in */
1155 void *data, /* in */
1156 VABufferID *buf_id) /* out */
1158 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1163 i965_BufferSetNumElements(VADriverContextP ctx,
1164 VABufferID buf_id, /* in */
1165 unsigned int num_elements) /* in */
1167 struct i965_driver_data *i965 = i965_driver_data(ctx);
1168 struct object_buffer *obj_buffer = BUFFER(buf_id);
1169 VAStatus vaStatus = VA_STATUS_SUCCESS;
1173 if ((num_elements < 0) ||
1174 (num_elements > obj_buffer->max_num_elements)) {
1175 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1177 obj_buffer->num_elements = num_elements;
1178 if (obj_buffer->buffer_store != NULL) {
1179 obj_buffer->buffer_store->num_elements = num_elements;
1187 i965_MapBuffer(VADriverContextP ctx,
1188 VABufferID buf_id, /* in */
1189 void **pbuf) /* out */
1191 struct i965_driver_data *i965 = i965_driver_data(ctx);
1192 struct object_buffer *obj_buffer = BUFFER(buf_id);
1193 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1195 assert(obj_buffer && obj_buffer->buffer_store);
1196 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1197 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1199 if (NULL != obj_buffer->buffer_store->bo) {
1200 unsigned int tiling, swizzle;
1202 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1204 if (tiling != I915_TILING_NONE)
1205 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1207 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1209 assert(obj_buffer->buffer_store->bo->virtual);
1210 *pbuf = obj_buffer->buffer_store->bo->virtual;
1212 if (obj_buffer->type == VAEncCodedBufferType) {
1213 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1214 coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1217 vaStatus = VA_STATUS_SUCCESS;
1218 } else if (NULL != obj_buffer->buffer_store->buffer) {
1219 *pbuf = obj_buffer->buffer_store->buffer;
1220 vaStatus = VA_STATUS_SUCCESS;
1227 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1229 struct i965_driver_data *i965 = i965_driver_data(ctx);
1230 struct object_buffer *obj_buffer = BUFFER(buf_id);
1231 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1233 assert(obj_buffer && obj_buffer->buffer_store);
1234 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1235 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1237 if (NULL != obj_buffer->buffer_store->bo) {
1238 unsigned int tiling, swizzle;
1240 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1242 if (tiling != I915_TILING_NONE)
1243 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1245 dri_bo_unmap(obj_buffer->buffer_store->bo);
1247 vaStatus = VA_STATUS_SUCCESS;
1248 } else if (NULL != obj_buffer->buffer_store->buffer) {
1250 vaStatus = VA_STATUS_SUCCESS;
1257 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1259 struct i965_driver_data *i965 = i965_driver_data(ctx);
1260 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1263 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1265 return VA_STATUS_SUCCESS;
1269 i965_BeginPicture(VADriverContextP ctx,
1270 VAContextID context,
1271 VASurfaceID render_target)
1273 struct i965_driver_data *i965 = i965_driver_data(ctx);
1274 struct object_context *obj_context = CONTEXT(context);
1275 struct object_surface *obj_surface = SURFACE(render_target);
1276 struct object_config *obj_config;
1281 assert(obj_context);
1282 assert(obj_surface);
1284 config = obj_context->config_id;
1285 obj_config = CONFIG(config);
1288 switch (obj_config->profile) {
1289 case VAProfileMPEG2Simple:
1290 case VAProfileMPEG2Main:
1291 vaStatus = VA_STATUS_SUCCESS;
1294 case VAProfileH264Baseline:
1295 case VAProfileH264Main:
1296 case VAProfileH264High:
1297 vaStatus = VA_STATUS_SUCCESS;
1300 case VAProfileVC1Simple:
1301 case VAProfileVC1Main:
1302 case VAProfileVC1Advanced:
1303 vaStatus = VA_STATUS_SUCCESS;
1306 case VAProfileJPEGBaseline:
1307 vaStatus = VA_STATUS_SUCCESS;
1312 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1316 if (obj_context->codec_type == CODEC_ENC) {
1317 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1318 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1320 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1321 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1324 obj_context->codec_state.encode.num_slice_params = 0;
1325 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1327 obj_context->codec_state.decode.current_render_target = render_target;
1328 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1329 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1330 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1331 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1333 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1334 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1335 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1338 obj_context->codec_state.decode.num_slice_params = 0;
1339 obj_context->codec_state.decode.num_slice_datas = 0;
1345 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1347 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1349 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1350 struct object_context *obj_context, \
1351 struct object_buffer *obj_buffer) \
1353 struct category##_state *category = &obj_context->codec_state.category; \
1354 assert(obj_buffer->buffer_store->bo == NULL); \
1355 assert(obj_buffer->buffer_store->buffer); \
1356 i965_release_buffer_store(&category->member); \
1357 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1358 return VA_STATUS_SUCCESS; \
1361 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1363 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1364 struct object_context *obj_context, \
1365 struct object_buffer *obj_buffer) \
1367 struct category##_state *category = &obj_context->codec_state.category; \
1368 if (category->num_##member == category->max_##member) { \
1369 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1370 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1371 category->max_##member += NUM_SLICES; \
1373 i965_release_buffer_store(&category->member[category->num_##member]); \
1374 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1375 category->num_##member++; \
1376 return VA_STATUS_SUCCESS; \
1379 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1381 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1382 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1383 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1384 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1385 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1387 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1388 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1389 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1392 i965_decoder_render_picture(VADriverContextP ctx,
1393 VAContextID context,
1394 VABufferID *buffers,
1397 struct i965_driver_data *i965 = i965_driver_data(ctx);
1398 struct object_context *obj_context = CONTEXT(context);
1399 VAStatus vaStatus = VA_STATUS_SUCCESS;
1402 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1403 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1406 switch (obj_buffer->type) {
1407 case VAPictureParameterBufferType:
1408 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1411 case VAIQMatrixBufferType:
1412 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1415 case VABitPlaneBufferType:
1416 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1419 case VASliceParameterBufferType:
1420 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1423 case VASliceDataBufferType:
1424 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1427 #ifdef HAVE_VA_JPEG_DECODE
1428 case VAHuffmanTableBufferType:
1429 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1434 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1442 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1444 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1445 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1446 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1447 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1448 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1449 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1451 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1452 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1455 i965_encoder_render_picture(VADriverContextP ctx,
1456 VAContextID context,
1457 VABufferID *buffers,
1460 struct i965_driver_data *i965 = i965_driver_data(ctx);
1461 struct object_context *obj_context = CONTEXT(context);
1462 VAStatus vaStatus = VA_STATUS_SUCCESS;
1465 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1466 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1469 switch (obj_buffer->type) {
1470 case VAEncSequenceParameterBufferType:
1471 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter);
1474 case VAEncPictureParameterBufferType:
1475 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter);
1478 case VAEncSliceParameterBufferType:
1479 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter);
1482 case VAPictureParameterBufferType:
1483 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_control);
1486 case VAQMatrixBufferType:
1487 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1490 case VAIQMatrixBufferType:
1491 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1495 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1504 i965_RenderPicture(VADriverContextP ctx,
1505 VAContextID context,
1506 VABufferID *buffers,
1509 struct i965_driver_data *i965 = i965_driver_data(ctx);
1510 struct object_context *obj_context;
1511 struct object_config *obj_config;
1513 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1515 obj_context = CONTEXT(context);
1516 assert(obj_context);
1518 config = obj_context->config_id;
1519 obj_config = CONFIG(config);
1522 if (VAEntrypointEncSlice == obj_config->entrypoint ){
1523 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1525 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1532 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1534 struct i965_driver_data *i965 = i965_driver_data(ctx);
1535 struct object_context *obj_context = CONTEXT(context);
1536 struct object_config *obj_config;
1539 assert(obj_context);
1540 config = obj_context->config_id;
1541 obj_config = CONFIG(config);
1544 if (obj_context->codec_type == CODEC_ENC) {
1545 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1547 assert(obj_context->codec_state.encode.pic_param);
1548 assert(obj_context->codec_state.encode.seq_param);
1549 assert(obj_context->codec_state.encode.num_slice_params >= 1);
1551 assert(obj_context->codec_state.decode.pic_param);
1552 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1553 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1554 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1557 assert(obj_context->hw_context->run);
1558 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1560 return VA_STATUS_SUCCESS;
1564 i965_SyncSurface(VADriverContextP ctx,
1565 VASurfaceID render_target)
1567 struct i965_driver_data *i965 = i965_driver_data(ctx);
1568 struct object_surface *obj_surface = SURFACE(render_target);
1570 assert(obj_surface);
1573 drm_intel_bo_wait_rendering(obj_surface->bo);
1575 return VA_STATUS_SUCCESS;
1579 i965_QuerySurfaceStatus(VADriverContextP ctx,
1580 VASurfaceID render_target,
1581 VASurfaceStatus *status) /* out */
1583 struct i965_driver_data *i965 = i965_driver_data(ctx);
1584 struct object_surface *obj_surface = SURFACE(render_target);
1586 assert(obj_surface);
1588 if (obj_surface->bo) {
1589 if (drm_intel_bo_busy(obj_surface->bo)){
1590 *status = VASurfaceRendering;
1593 *status = VASurfaceReady;
1596 *status = VASurfaceReady;
1599 return VA_STATUS_SUCCESS;
1602 static VADisplayAttribute *
1603 get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
1605 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1608 if (!i965->display_attributes)
1611 for (i = 0; i < i965->num_display_attributes; i++) {
1612 if (i965->display_attributes[i].type == type)
1613 return &i965->display_attributes[i];
1619 i965_display_attributes_init(VADriverContextP ctx)
1621 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1623 i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
1624 i965->display_attributes = malloc(
1625 i965->num_display_attributes * sizeof(i965->display_attributes[0]));
1626 if (!i965->display_attributes)
1630 i965->display_attributes,
1631 i965_display_attributes,
1632 sizeof(i965_display_attributes)
1635 i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
1636 if (!i965->rotation_attrib)
1642 i965_display_attributes_terminate(VADriverContextP ctx)
1644 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1646 if (i965->display_attributes) {
1647 free(i965->display_attributes);
1648 i965->display_attributes = NULL;
1649 i965->num_display_attributes = 0;
1654 * Query display attributes
1655 * The caller must provide a "attr_list" array that can hold at
1656 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1657 * returned in "attr_list" is returned in "num_attributes".
1660 i965_QueryDisplayAttributes(
1661 VADriverContextP ctx,
1662 VADisplayAttribute *attribs, /* out */
1663 int *num_attribs_ptr /* out */
1666 const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
1668 if (attribs && num_attribs > 0)
1669 memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
1671 if (num_attribs_ptr)
1672 *num_attribs_ptr = num_attribs;
1674 return VA_STATUS_SUCCESS;
1678 * Get display attributes
1679 * This function returns the current attribute values in "attr_list".
1680 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1681 * from vaQueryDisplayAttributes() can have their values retrieved.
1684 i965_GetDisplayAttributes(
1685 VADriverContextP ctx,
1686 VADisplayAttribute *attribs, /* inout */
1687 int num_attribs /* in */
1692 for (i = 0; i < num_attribs; i++) {
1693 VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
1695 src_attrib = get_display_attribute(ctx, dst_attrib->type);
1696 if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
1697 dst_attrib->min_value = src_attrib->min_value;
1698 dst_attrib->max_value = src_attrib->max_value;
1699 dst_attrib->value = src_attrib->value;
1702 dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
1704 return VA_STATUS_SUCCESS;
1708 * Set display attributes
1709 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1710 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1711 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1714 i965_SetDisplayAttributes(
1715 VADriverContextP ctx,
1716 VADisplayAttribute *attribs, /* in */
1717 int num_attribs /* in */
1722 for (i = 0; i < num_attribs; i++) {
1723 VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
1725 dst_attrib = get_display_attribute(ctx, src_attrib->type);
1727 return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
1729 if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
1732 if (src_attrib->value < dst_attrib->min_value ||
1733 src_attrib->value > dst_attrib->max_value)
1734 return VA_STATUS_ERROR_INVALID_PARAMETER;
1736 dst_attrib->value = src_attrib->value;
1737 /* XXX: track modified attributes through timestamps */
1739 return VA_STATUS_SUCCESS;
1743 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1744 VASurfaceID surface,
1745 void **buffer, /* out */
1746 unsigned int *stride) /* out */
1749 return VA_STATUS_ERROR_UNIMPLEMENTED;
1753 i965_Init(VADriverContextP ctx)
1755 struct i965_driver_data *i965 = i965_driver_data(ctx);
1757 if (intel_driver_init(ctx) == False)
1758 return VA_STATUS_ERROR_UNKNOWN;
1760 if (IS_HASWELL(i965->intel.device_id))
1761 i965->codec_info = &gen75_hw_codec_info;
1762 else if (IS_G4X(i965->intel.device_id))
1763 i965->codec_info = &g4x_hw_codec_info;
1764 else if (IS_IRONLAKE(i965->intel.device_id))
1765 i965->codec_info = &ironlake_hw_codec_info;
1766 else if (IS_GEN6(i965->intel.device_id))
1767 i965->codec_info = &gen6_hw_codec_info;
1768 else if (IS_GEN7(i965->intel.device_id))
1769 i965->codec_info = &gen7_hw_codec_info;
1771 return VA_STATUS_ERROR_UNKNOWN;
1773 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER, 0);
1775 if (!i965_display_attributes_init(ctx))
1776 return VA_STATUS_ERROR_UNKNOWN;
1778 if (i965_post_processing_init(ctx) == False)
1779 return VA_STATUS_ERROR_UNKNOWN;
1781 if (i965_render_init(ctx) == False)
1782 return VA_STATUS_ERROR_UNKNOWN;
1784 #ifdef HAVE_VA_WAYLAND
1785 if (IS_VA_WAYLAND(ctx) && !i965_output_wayland_init(ctx))
1786 return VA_STATUS_ERROR_UNKNOWN;
1790 if (IS_VA_X11(ctx) && !i965_output_dri_init(ctx))
1791 return VA_STATUS_ERROR_UNKNOWN;
1794 _i965InitMutex(&i965->render_mutex);
1796 return VA_STATUS_SUCCESS;
1800 i965_destroy_heap(struct object_heap *heap,
1801 void (*func)(struct object_heap *heap, struct object_base *object))
1803 struct object_base *object;
1804 object_heap_iterator iter;
1806 object = object_heap_first(heap, &iter);
1812 object = object_heap_next(heap, &iter);
1815 object_heap_destroy(heap);
1820 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1823 i965_CreateImage(VADriverContextP ctx,
1824 VAImageFormat *format,
1827 VAImage *out_image) /* out */
1829 struct i965_driver_data *i965 = i965_driver_data(ctx);
1830 struct object_image *obj_image;
1831 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1833 unsigned int width2, height2, size2, size;
1835 out_image->image_id = VA_INVALID_ID;
1836 out_image->buf = VA_INVALID_ID;
1838 image_id = NEW_IMAGE_ID();
1839 if (image_id == VA_INVALID_ID)
1840 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1842 obj_image = IMAGE(image_id);
1844 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1845 obj_image->bo = NULL;
1846 obj_image->palette = NULL;
1847 obj_image->derived_surface = VA_INVALID_ID;
1849 VAImage * const image = &obj_image->image;
1850 image->image_id = image_id;
1851 image->buf = VA_INVALID_ID;
1853 size = width * height;
1854 width2 = (width + 1) / 2;
1855 height2 = (height + 1) / 2;
1856 size2 = width2 * height2;
1858 image->num_palette_entries = 0;
1859 image->entry_bytes = 0;
1860 memset(image->component_order, 0, sizeof(image->component_order));
1862 switch (format->fourcc) {
1863 case VA_FOURCC('I','A','4','4'):
1864 case VA_FOURCC('A','I','4','4'):
1865 image->num_planes = 1;
1866 image->pitches[0] = width;
1867 image->offsets[0] = 0;
1868 image->data_size = image->offsets[0] + image->pitches[0] * height;
1869 image->num_palette_entries = 16;
1870 image->entry_bytes = 3;
1871 image->component_order[0] = 'R';
1872 image->component_order[1] = 'G';
1873 image->component_order[2] = 'B';
1875 case VA_FOURCC('I','A','8','8'):
1876 case VA_FOURCC('A','I','8','8'):
1877 image->num_planes = 1;
1878 image->pitches[0] = width * 2;
1879 image->offsets[0] = 0;
1880 image->data_size = image->offsets[0] + image->pitches[0] * height;
1881 image->num_palette_entries = 256;
1882 image->entry_bytes = 3;
1883 image->component_order[0] = 'R';
1884 image->component_order[1] = 'G';
1885 image->component_order[2] = 'B';
1887 case VA_FOURCC('A','R','G','B'):
1888 case VA_FOURCC('A','B','G','R'):
1889 case VA_FOURCC('B','G','R','A'):
1890 case VA_FOURCC('R','G','B','A'):
1891 image->num_planes = 1;
1892 image->pitches[0] = width * 4;
1893 image->offsets[0] = 0;
1894 image->data_size = image->offsets[0] + image->pitches[0] * height;
1896 case VA_FOURCC('Y','V','1','2'):
1897 image->num_planes = 3;
1898 image->pitches[0] = width;
1899 image->offsets[0] = 0;
1900 image->pitches[1] = width2;
1901 image->offsets[1] = size + size2;
1902 image->pitches[2] = width2;
1903 image->offsets[2] = size;
1904 image->data_size = size + 2 * size2;
1906 case VA_FOURCC('I','4','2','0'):
1907 image->num_planes = 3;
1908 image->pitches[0] = width;
1909 image->offsets[0] = 0;
1910 image->pitches[1] = width2;
1911 image->offsets[1] = size;
1912 image->pitches[2] = width2;
1913 image->offsets[2] = size + size2;
1914 image->data_size = size + 2 * size2;
1916 case VA_FOURCC('N','V','1','2'):
1917 image->num_planes = 2;
1918 image->pitches[0] = width;
1919 image->offsets[0] = 0;
1920 image->pitches[1] = width;
1921 image->offsets[1] = size;
1922 image->data_size = size + 2 * size2;
1928 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
1929 image->data_size, 1, NULL, &image->buf);
1930 if (va_status != VA_STATUS_SUCCESS)
1933 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1934 dri_bo_reference(obj_image->bo);
1936 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1937 obj_image->palette = malloc(image->num_palette_entries * sizeof(*obj_image->palette));
1938 if (!obj_image->palette)
1942 image->image_id = image_id;
1943 image->format = *format;
1944 image->width = width;
1945 image->height = height;
1947 *out_image = *image;
1948 return VA_STATUS_SUCCESS;
1951 i965_DestroyImage(ctx, image_id);
1956 i965_check_alloc_surface_bo(VADriverContextP ctx,
1957 struct object_surface *obj_surface,
1959 unsigned int fourcc,
1960 unsigned int subsampling)
1962 struct i965_driver_data *i965 = i965_driver_data(ctx);
1963 int region_width, region_height;
1965 if (obj_surface->bo) {
1966 assert(obj_surface->fourcc);
1967 assert(obj_surface->fourcc == fourcc);
1968 assert(obj_surface->subsampling == subsampling);
1972 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
1973 obj_surface->x_cr_offset = 0;
1976 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
1977 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
1978 fourcc == VA_FOURCC('I', 'M', 'C', '3'));
1980 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
1981 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
1982 obj_surface->cb_cr_pitch = obj_surface->width;
1983 region_width = obj_surface->width;
1984 region_height = obj_surface->height;
1986 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
1987 assert(subsampling == SUBSAMPLE_YUV420);
1988 obj_surface->y_cb_offset = obj_surface->height;
1989 obj_surface->y_cr_offset = obj_surface->height;
1990 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
1991 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
1992 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
1993 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
1994 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
1995 switch (subsampling) {
1996 case SUBSAMPLE_YUV400:
1997 obj_surface->cb_cr_width = 0;
1998 obj_surface->cb_cr_height = 0;
2001 case SUBSAMPLE_YUV420:
2002 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2003 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2006 case SUBSAMPLE_YUV422H:
2007 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2008 obj_surface->cb_cr_height = obj_surface->orig_height;
2011 case SUBSAMPLE_YUV422V:
2012 obj_surface->cb_cr_width = obj_surface->orig_width;
2013 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2016 case SUBSAMPLE_YUV444:
2017 obj_surface->cb_cr_width = obj_surface->orig_width;
2018 obj_surface->cb_cr_height = obj_surface->orig_height;
2021 case SUBSAMPLE_YUV411:
2022 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2023 obj_surface->cb_cr_height = obj_surface->orig_height;
2031 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2033 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2034 obj_surface->y_cr_offset = obj_surface->height;
2035 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2037 obj_surface->y_cb_offset = obj_surface->height;
2038 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2042 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2043 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2044 assert(subsampling == SUBSAMPLE_YUV420);
2046 region_width = obj_surface->width;
2047 region_height = obj_surface->height;
2050 case VA_FOURCC('N', 'V', '1', '2'):
2051 obj_surface->y_cb_offset = obj_surface->height;
2052 obj_surface->y_cr_offset = obj_surface->height;
2053 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2054 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2055 obj_surface->cb_cr_pitch = obj_surface->width;
2056 region_height = obj_surface->height + obj_surface->height / 2;
2059 case VA_FOURCC('Y', 'V', '1', '2'):
2060 case VA_FOURCC('I', '4', '2', '0'):
2061 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2062 obj_surface->y_cr_offset = obj_surface->height;
2063 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2065 obj_surface->y_cb_offset = obj_surface->height;
2066 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2069 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2070 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2071 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2072 region_height = obj_surface->height + obj_surface->height / 2;
2081 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2084 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2085 unsigned long pitch;
2087 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2095 assert(tiling_mode == I915_TILING_Y);
2096 assert(pitch == obj_surface->width);
2098 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2104 obj_surface->fourcc = fourcc;
2105 obj_surface->subsampling = subsampling;
2106 assert(obj_surface->bo);
2109 VAStatus i965_DeriveImage(VADriverContextP ctx,
2110 VASurfaceID surface,
2111 VAImage *out_image) /* out */
2113 struct i965_driver_data *i965 = i965_driver_data(ctx);
2114 struct i965_render_state *render_state = &i965->render_state;
2115 struct object_image *obj_image;
2116 struct object_surface *obj_surface;
2118 unsigned int w_pitch, h_pitch;
2121 out_image->image_id = VA_INVALID_ID;
2122 obj_surface = SURFACE(surface);
2125 return VA_STATUS_ERROR_INVALID_SURFACE;
2127 w_pitch = obj_surface->width;
2128 h_pitch = obj_surface->height;
2130 image_id = NEW_IMAGE_ID();
2132 if (image_id == VA_INVALID_ID)
2133 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2135 obj_image = IMAGE(image_id);
2138 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2140 obj_image->bo = NULL;
2141 obj_image->palette = NULL;
2142 obj_image->derived_surface = VA_INVALID_ID;
2144 VAImage * const image = &obj_image->image;
2146 memset(image, 0, sizeof(*image));
2147 image->image_id = image_id;
2148 image->buf = VA_INVALID_ID;
2149 image->num_palette_entries = 0;
2150 image->entry_bytes = 0;
2151 image->width = obj_surface->orig_width;
2152 image->height = obj_surface->orig_height;
2153 image->data_size = obj_surface->size;
2155 if (!render_state->inited) {
2156 image->format.fourcc = VA_FOURCC('Y','V','1','2');
2157 image->format.byte_order = VA_LSB_FIRST;
2158 image->format.bits_per_pixel = 12;
2159 image->num_planes = 3;
2160 image->pitches[0] = w_pitch;
2161 image->offsets[0] = 0;
2162 image->pitches[1] = w_pitch / 2;
2163 image->offsets[1] = w_pitch * h_pitch;
2164 image->pitches[2] = w_pitch / 2;
2165 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2167 if (render_state->interleaved_uv) {
2168 image->format.fourcc = VA_FOURCC('N','V','1','2');
2169 image->format.byte_order = VA_LSB_FIRST;
2170 image->format.bits_per_pixel = 12;
2171 image->num_planes = 2;
2172 image->pitches[0] = w_pitch;
2173 image->offsets[0] = 0;
2174 image->pitches[1] = w_pitch;
2175 image->offsets[1] = w_pitch * h_pitch;
2177 image->format.fourcc = VA_FOURCC('I','4','2','0');
2178 image->format.byte_order = VA_LSB_FIRST;
2179 image->format.bits_per_pixel = 12;
2180 image->num_planes = 3;
2181 image->pitches[0] = w_pitch;
2182 image->offsets[0] = 0;
2183 image->pitches[1] = w_pitch / 2;
2184 image->offsets[1] = w_pitch * h_pitch;
2185 image->pitches[2] = w_pitch / 2;
2186 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
2190 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), image->format.fourcc, SUBSAMPLE_YUV420);
2191 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2192 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2193 if (va_status != VA_STATUS_SUCCESS)
2196 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2197 dri_bo_reference(obj_image->bo);
2199 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2200 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2201 if (!obj_image->palette) {
2202 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2207 *out_image = *image;
2208 obj_surface->flags |= SURFACE_DERIVED;
2209 obj_image->derived_surface = surface;
2211 return VA_STATUS_SUCCESS;
2214 i965_DestroyImage(ctx, image_id);
2219 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2221 object_heap_free(heap, obj);
2226 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2228 struct i965_driver_data *i965 = i965_driver_data(ctx);
2229 struct object_image *obj_image = IMAGE(image);
2230 struct object_surface *obj_surface;
2233 return VA_STATUS_SUCCESS;
2235 dri_bo_unreference(obj_image->bo);
2236 obj_image->bo = NULL;
2238 if (obj_image->image.buf != VA_INVALID_ID) {
2239 i965_DestroyBuffer(ctx, obj_image->image.buf);
2240 obj_image->image.buf = VA_INVALID_ID;
2243 if (obj_image->palette) {
2244 free(obj_image->palette);
2245 obj_image->palette = NULL;
2248 obj_surface = SURFACE(obj_image->derived_surface);
2251 obj_surface->flags &= ~SURFACE_DERIVED;
2254 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2256 return VA_STATUS_SUCCESS;
2260 * pointer to an array holding the palette data. The size of the array is
2261 * num_palette_entries * entry_bytes in size. The order of the components
2262 * in the palette is described by the component_order in VASubpicture struct
2265 i965_SetImagePalette(VADriverContextP ctx,
2267 unsigned char *palette)
2269 struct i965_driver_data *i965 = i965_driver_data(ctx);
2272 struct object_image *obj_image = IMAGE(image);
2274 return VA_STATUS_ERROR_INVALID_IMAGE;
2276 if (!obj_image->palette)
2277 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2279 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2280 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2281 ((unsigned int)palette[3*i + 1] << 8) |
2282 (unsigned int)palette[3*i + 2]);
2283 return VA_STATUS_SUCCESS;
2287 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2288 const uint8_t *src, unsigned int src_stride,
2289 unsigned int len, unsigned int height)
2293 for (i = 0; i < height; i++) {
2294 memcpy(dst, src, len);
2301 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2302 struct object_surface *obj_surface,
2303 const VARectangle *rect)
2305 uint8_t *dst[3], *src[3];
2307 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2308 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2309 unsigned int tiling, swizzle;
2311 if (!obj_surface->bo)
2314 assert(obj_surface->fourcc);
2315 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2317 if (tiling != I915_TILING_NONE)
2318 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2320 dri_bo_map(obj_surface->bo, 0);
2322 if (!obj_surface->bo->virtual)
2325 /* Dest VA image has either I420 or YV12 format.
2326 Source VA surface alway has I420 format */
2327 dst[Y] = image_data + obj_image->image.offsets[Y];
2328 src[0] = (uint8_t *)obj_surface->bo->virtual;
2329 dst[U] = image_data + obj_image->image.offsets[U];
2330 src[1] = src[0] + obj_surface->width * obj_surface->height;
2331 dst[V] = image_data + obj_image->image.offsets[V];
2332 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2335 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2336 src[0] += rect->y * obj_surface->width + rect->x;
2337 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2338 src[0], obj_surface->width,
2339 rect->width, rect->height);
2342 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2343 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2344 memcpy_pic(dst[U], obj_image->image.pitches[U],
2345 src[1], obj_surface->width / 2,
2346 rect->width / 2, rect->height / 2);
2349 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2350 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2351 memcpy_pic(dst[V], obj_image->image.pitches[V],
2352 src[2], obj_surface->width / 2,
2353 rect->width / 2, rect->height / 2);
2355 if (tiling != I915_TILING_NONE)
2356 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2358 dri_bo_unmap(obj_surface->bo);
2362 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2363 struct object_surface *obj_surface,
2364 const VARectangle *rect)
2366 uint8_t *dst[2], *src[2];
2367 unsigned int tiling, swizzle;
2369 if (!obj_surface->bo)
2372 assert(obj_surface->fourcc);
2373 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2375 if (tiling != I915_TILING_NONE)
2376 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2378 dri_bo_map(obj_surface->bo, 0);
2380 if (!obj_surface->bo->virtual)
2383 /* Both dest VA image and source surface have NV12 format */
2384 dst[0] = image_data + obj_image->image.offsets[0];
2385 src[0] = (uint8_t *)obj_surface->bo->virtual;
2386 dst[1] = image_data + obj_image->image.offsets[1];
2387 src[1] = src[0] + obj_surface->width * obj_surface->height;
2390 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2391 src[0] += rect->y * obj_surface->width + rect->x;
2392 memcpy_pic(dst[0], obj_image->image.pitches[0],
2393 src[0], obj_surface->width,
2394 rect->width, rect->height);
2397 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2398 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2399 memcpy_pic(dst[1], obj_image->image.pitches[1],
2400 src[1], obj_surface->width,
2401 rect->width, rect->height / 2);
2403 if (tiling != I915_TILING_NONE)
2404 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2406 dri_bo_unmap(obj_surface->bo);
2410 i965_GetImage(VADriverContextP ctx,
2411 VASurfaceID surface,
2412 int x, /* coordinates of the upper left source pixel */
2414 unsigned int width, /* width and height of the region */
2415 unsigned int height,
2418 struct i965_driver_data *i965 = i965_driver_data(ctx);
2419 struct i965_render_state *render_state = &i965->render_state;
2421 struct object_surface *obj_surface = SURFACE(surface);
2423 return VA_STATUS_ERROR_INVALID_SURFACE;
2425 struct object_image *obj_image = IMAGE(image);
2427 return VA_STATUS_ERROR_INVALID_IMAGE;
2430 return VA_STATUS_ERROR_INVALID_PARAMETER;
2431 if (x + width > obj_surface->orig_width ||
2432 y + height > obj_surface->orig_height)
2433 return VA_STATUS_ERROR_INVALID_PARAMETER;
2434 if (x + width > obj_image->image.width ||
2435 y + height > obj_image->image.height)
2436 return VA_STATUS_ERROR_INVALID_PARAMETER;
2439 void *image_data = NULL;
2441 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2442 if (va_status != VA_STATUS_SUCCESS)
2449 rect.height = height;
2451 switch (obj_image->image.format.fourcc) {
2452 case VA_FOURCC('Y','V','1','2'):
2453 case VA_FOURCC('I','4','2','0'):
2454 /* I420 is native format for MPEG-2 decoded surfaces */
2455 if (render_state->interleaved_uv)
2456 goto operation_failed;
2457 get_image_i420(obj_image, image_data, obj_surface, &rect);
2459 case VA_FOURCC('N','V','1','2'):
2460 /* NV12 is native format for H.264 decoded surfaces */
2461 if (!render_state->interleaved_uv)
2462 goto operation_failed;
2463 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2467 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2471 i965_UnmapBuffer(ctx, obj_image->image.buf);
2476 i965_PutSurface(VADriverContextP ctx,
2477 VASurfaceID surface,
2478 void *draw, /* X Drawable */
2481 unsigned short srcw,
2482 unsigned short srch,
2485 unsigned short destw,
2486 unsigned short desth,
2487 VARectangle *cliprects, /* client supplied clip list */
2488 unsigned int number_cliprects, /* number of clip rects in the clip list */
2489 unsigned int flags) /* de-interlacing flags */
2492 if (IS_VA_X11(ctx)) {
2493 VARectangle src_rect, dst_rect;
2497 src_rect.width = srcw;
2498 src_rect.height = srch;
2502 dst_rect.width = destw;
2503 dst_rect.height = desth;
2505 return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
2506 cliprects, number_cliprects, flags);
2509 return VA_STATUS_ERROR_UNIMPLEMENTED;
2513 i965_Terminate(VADriverContextP ctx)
2515 struct i965_driver_data *i965 = i965_driver_data(ctx);
2518 intel_batchbuffer_free(i965->batch);
2520 _i965DestroyMutex(&i965->render_mutex);
2524 i965_output_dri_terminate(ctx);
2527 #ifdef HAVE_VA_WAYLAND
2528 if (IS_VA_WAYLAND(ctx))
2529 i965_output_wayland_terminate(ctx);
2532 if (i965_render_terminate(ctx) == False)
2533 return VA_STATUS_ERROR_UNKNOWN;
2535 if (i965_post_processing_terminate(ctx) == False)
2536 return VA_STATUS_ERROR_UNKNOWN;
2538 i965_display_attributes_terminate(ctx);
2540 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2541 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2542 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2543 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2544 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2545 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2547 if (intel_driver_terminate(ctx) == False)
2548 return VA_STATUS_ERROR_UNKNOWN;
2550 free(ctx->pDriverData);
2551 ctx->pDriverData = NULL;
2553 return VA_STATUS_SUCCESS;
2558 VADriverContextP ctx, /* in */
2559 VABufferID buf_id, /* in */
2560 VABufferType *type, /* out */
2561 unsigned int *size, /* out */
2562 unsigned int *num_elements /* out */
2565 struct i965_driver_data *i965 = NULL;
2566 struct object_buffer *obj_buffer = NULL;
2568 i965 = i965_driver_data(ctx);
2569 obj_buffer = BUFFER(buf_id);
2571 *type = obj_buffer->type;
2572 *size = obj_buffer->size_element;
2573 *num_elements = obj_buffer->num_elements;
2575 return VA_STATUS_SUCCESS;
2580 VADriverContextP ctx, /* in */
2581 VASurfaceID surface, /* in */
2582 unsigned int *fourcc, /* out */
2583 unsigned int *luma_stride, /* out */
2584 unsigned int *chroma_u_stride, /* out */
2585 unsigned int *chroma_v_stride, /* out */
2586 unsigned int *luma_offset, /* out */
2587 unsigned int *chroma_u_offset, /* out */
2588 unsigned int *chroma_v_offset, /* out */
2589 unsigned int *buffer_name, /* out */
2590 void **buffer /* out */
2593 VAStatus vaStatus = VA_STATUS_SUCCESS;
2594 struct i965_driver_data *i965 = i965_driver_data(ctx);
2595 struct object_surface *obj_surface = NULL;
2599 assert(luma_stride);
2600 assert(chroma_u_stride);
2601 assert(chroma_v_stride);
2602 assert(luma_offset);
2603 assert(chroma_u_offset);
2604 assert(chroma_v_offset);
2605 assert(buffer_name);
2608 tmpImage.image_id = VA_INVALID_ID;
2610 obj_surface = SURFACE(surface);
2611 if (obj_surface == NULL) {
2612 // Surface is absent.
2613 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2617 // Lock functionality is absent now.
2618 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2619 // Surface is locked already.
2620 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2624 vaStatus = i965_DeriveImage(
2628 if (vaStatus != VA_STATUS_SUCCESS) {
2632 obj_surface->locked_image_id = tmpImage.image_id;
2634 vaStatus = i965_MapBuffer(
2638 if (vaStatus != VA_STATUS_SUCCESS) {
2642 *fourcc = tmpImage.format.fourcc;
2643 *luma_offset = tmpImage.offsets[0];
2644 *luma_stride = tmpImage.pitches[0];
2645 *chroma_u_offset = tmpImage.offsets[1];
2646 *chroma_u_stride = tmpImage.pitches[1];
2647 *chroma_v_offset = tmpImage.offsets[2];
2648 *chroma_v_stride = tmpImage.pitches[2];
2649 *buffer_name = tmpImage.buf;
2652 if (vaStatus != VA_STATUS_SUCCESS) {
2661 VADriverContextP ctx, /* in */
2662 VASurfaceID surface /* in */
2665 VAStatus vaStatus = VA_STATUS_SUCCESS;
2666 struct i965_driver_data *i965 = i965_driver_data(ctx);
2667 struct object_image *locked_img = NULL;
2668 struct object_surface *obj_surface = NULL;
2670 obj_surface = SURFACE(surface);
2672 if (obj_surface == NULL) {
2673 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2676 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2677 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2681 locked_img = IMAGE(obj_surface->locked_image_id);
2682 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2683 // Work image was deallocated before i965_UnlockSurface()
2684 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2688 vaStatus = i965_UnmapBuffer(
2690 locked_img->image.buf);
2691 if (vaStatus != VA_STATUS_SUCCESS) {
2695 vaStatus = i965_DestroyImage(
2697 locked_img->image.image_id);
2698 if (vaStatus != VA_STATUS_SUCCESS) {
2702 locked_img->image.image_id = VA_INVALID_ID;
2705 obj_surface->locked_image_id = VA_INVALID_ID;
2711 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
2714 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2716 struct VADriverVTable * const vtable = ctx->vtable;
2717 struct i965_driver_data *i965;
2720 ctx->version_major = VA_MAJOR_VERSION;
2721 ctx->version_minor = VA_MINOR_VERSION;
2722 ctx->max_profiles = I965_MAX_PROFILES;
2723 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2724 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2725 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2726 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2727 ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
2729 vtable->vaTerminate = i965_Terminate;
2730 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2731 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2732 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2733 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2734 vtable->vaCreateConfig = i965_CreateConfig;
2735 vtable->vaDestroyConfig = i965_DestroyConfig;
2736 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2737 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2738 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2739 vtable->vaCreateContext = i965_CreateContext;
2740 vtable->vaDestroyContext = i965_DestroyContext;
2741 vtable->vaCreateBuffer = i965_CreateBuffer;
2742 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2743 vtable->vaMapBuffer = i965_MapBuffer;
2744 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2745 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2746 vtable->vaBeginPicture = i965_BeginPicture;
2747 vtable->vaRenderPicture = i965_RenderPicture;
2748 vtable->vaEndPicture = i965_EndPicture;
2749 vtable->vaSyncSurface = i965_SyncSurface;
2750 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2751 vtable->vaPutSurface = i965_PutSurface;
2752 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2753 vtable->vaCreateImage = i965_CreateImage;
2754 vtable->vaDeriveImage = i965_DeriveImage;
2755 vtable->vaDestroyImage = i965_DestroyImage;
2756 vtable->vaSetImagePalette = i965_SetImagePalette;
2757 vtable->vaGetImage = i965_GetImage;
2758 vtable->vaPutImage = i965_PutImage;
2759 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2760 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2761 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2762 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2763 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2764 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2765 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2766 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2767 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2768 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2769 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2770 vtable->vaBufferInfo = i965_BufferInfo;
2771 vtable->vaLockSurface = i965_LockSurface;
2772 vtable->vaUnlockSurface = i965_UnlockSurface;
2773 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2775 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2777 ctx->pDriverData = (void *)i965;
2779 result = object_heap_init(&i965->config_heap,
2780 sizeof(struct object_config),
2782 assert(result == 0);
2784 result = object_heap_init(&i965->context_heap,
2785 sizeof(struct object_context),
2787 assert(result == 0);
2789 result = object_heap_init(&i965->surface_heap,
2790 sizeof(struct object_surface),
2792 assert(result == 0);
2794 result = object_heap_init(&i965->buffer_heap,
2795 sizeof(struct object_buffer),
2797 assert(result == 0);
2799 result = object_heap_init(&i965->image_heap,
2800 sizeof(struct object_image),
2802 assert(result == 0);
2804 result = object_heap_init(&i965->subpic_heap,
2805 sizeof(struct object_subpic),
2807 assert(result == 0);
2809 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
2810 INTEL_STR_DRIVER_VENDOR,
2811 INTEL_STR_DRIVER_NAME,
2812 INTEL_DRIVER_MAJOR_VERSION,
2813 INTEL_DRIVER_MINOR_VERSION,
2814 INTEL_DRIVER_MICRO_VERSION);
2816 if (INTEL_DRIVER_PRE_VERSION > 0) {
2817 const int len = strlen(i965->va_vendor);
2818 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
2820 ctx->str_vendor = i965->va_vendor;
2822 return i965_Init(ctx);