2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37 #include "i965_defines.h"
38 #include "i965_drv_video.h"
40 #include "i965_media.h"
41 #include "i965_media_mpeg2.h"
42 #include "i965_media_h264.h"
43 #include "i965_decoder_utils.h"
46 i965_media_pipeline_select(VADriverContextP ctx, struct i965_media_context *media_context)
48 struct intel_batchbuffer *batch = media_context->base.batch;
50 BEGIN_BATCH(batch, 1);
51 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
56 i965_media_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
58 struct i965_driver_data *i965 = i965_driver_data(ctx);
59 struct intel_batchbuffer *batch = media_context->base.batch;
60 unsigned int vfe_fence, cs_fence;
62 vfe_fence = media_context->urb.cs_start;
63 cs_fence = i965->intel.device_info->urb_size;
65 BEGIN_BATCH(batch, 3);
66 OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
69 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
70 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
75 i965_media_state_base_address(VADriverContextP ctx, struct i965_media_context *media_context)
77 struct i965_driver_data *i965 = i965_driver_data(ctx);
78 struct intel_batchbuffer *batch = media_context->base.batch;
80 if (IS_IRONLAKE(i965->intel.device_info)) {
81 BEGIN_BATCH(batch, 8);
82 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
83 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
84 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
86 if (media_context->indirect_object.bo) {
87 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
88 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
90 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
93 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
94 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
95 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
96 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
99 BEGIN_BATCH(batch, 6);
100 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 4);
101 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
102 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
104 if (media_context->indirect_object.bo) {
105 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
106 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
108 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
111 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
112 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
113 ADVANCE_BATCH(batch);
118 i965_media_state_pointers(VADriverContextP ctx, struct i965_media_context *media_context)
120 struct intel_batchbuffer *batch = media_context->base.batch;
122 BEGIN_BATCH(batch, 3);
123 OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
125 if (media_context->extended_state.enabled)
126 OUT_RELOC(batch, media_context->extended_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 1);
130 OUT_RELOC(batch, media_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
131 ADVANCE_BATCH(batch);
135 i965_media_cs_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
137 struct intel_batchbuffer *batch = media_context->base.batch;
139 BEGIN_BATCH(batch, 2);
140 OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
142 ((media_context->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
143 (media_context->urb.num_cs_entries << 0)); /* Number of URB Entries */
144 ADVANCE_BATCH(batch);
148 i965_media_pipeline_state(VADriverContextP ctx, struct i965_media_context *media_context)
150 i965_media_state_base_address(ctx, media_context);
151 i965_media_state_pointers(ctx, media_context);
152 i965_media_cs_urb_layout(ctx, media_context);
156 i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state, struct i965_media_context *media_context)
158 struct intel_batchbuffer *batch = media_context->base.batch;
160 BEGIN_BATCH(batch, 2);
161 OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
162 OUT_RELOC(batch, media_context->curbe.bo,
163 I915_GEM_DOMAIN_INSTRUCTION, 0,
164 media_context->urb.size_cs_entry - 1);
165 ADVANCE_BATCH(batch);
169 i965_media_depth_buffer(VADriverContextP ctx, struct i965_media_context *media_context)
171 struct intel_batchbuffer *batch = media_context->base.batch;
173 BEGIN_BATCH(batch, 6);
174 OUT_BATCH(batch, CMD_DEPTH_BUFFER | 4);
175 OUT_BATCH(batch, (I965_DEPTHFORMAT_D32_FLOAT << 18) |
176 (I965_SURFACE_NULL << 29));
181 ADVANCE_BATCH(batch);
185 i965_media_pipeline_setup(VADriverContextP ctx,
186 struct decode_state *decode_state,
187 struct i965_media_context *media_context)
189 struct intel_batchbuffer *batch = media_context->base.batch;
191 intel_batchbuffer_start_atomic(batch, 0x1000);
192 intel_batchbuffer_emit_mi_flush(batch); /* step 1 */
193 i965_media_depth_buffer(ctx, media_context);
194 i965_media_pipeline_select(ctx, media_context); /* step 2 */
195 i965_media_urb_layout(ctx, media_context); /* step 3 */
196 i965_media_pipeline_state(ctx, media_context); /* step 4 */
197 i965_media_constant_buffer(ctx, decode_state, media_context); /* step 5 */
198 assert(media_context->media_objects);
199 media_context->media_objects(ctx, decode_state, media_context); /* step 6 */
200 intel_batchbuffer_end_atomic(batch);
204 i965_media_decode_init(VADriverContextP ctx,
206 struct decode_state *decode_state,
207 struct i965_media_context *media_context)
210 struct i965_driver_data *i965 = i965_driver_data(ctx);
213 /* constant buffer */
214 dri_bo_unreference(media_context->curbe.bo);
215 bo = dri_bo_alloc(i965->intel.bufmgr,
219 media_context->curbe.bo = bo;
222 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
223 dri_bo_unreference(media_context->surface_state[i].bo);
224 media_context->surface_state[i].bo = NULL;
228 dri_bo_unreference(media_context->binding_table.bo);
229 bo = dri_bo_alloc(i965->intel.bufmgr,
231 MAX_MEDIA_SURFACES * sizeof(unsigned int), 32);
233 media_context->binding_table.bo = bo;
235 /* interface descriptor remapping table */
236 dri_bo_unreference(media_context->idrt.bo);
237 bo = dri_bo_alloc(i965->intel.bufmgr,
238 "interface discriptor",
239 MAX_INTERFACE_DESC * sizeof(struct i965_interface_descriptor), 16);
241 media_context->idrt.bo = bo;
244 dri_bo_unreference(media_context->vfe_state.bo);
245 bo = dri_bo_alloc(i965->intel.bufmgr,
247 sizeof(struct i965_vfe_state), 32);
249 media_context->vfe_state.bo = bo;
252 media_context->extended_state.enabled = 0;
255 case VAProfileMPEG2Simple:
256 case VAProfileMPEG2Main:
257 i965_media_mpeg2_decode_init(ctx, decode_state, media_context);
260 case VAProfileH264ConstrainedBaseline:
261 case VAProfileH264Main:
262 case VAProfileH264High:
263 i965_media_h264_decode_init(ctx, decode_state, media_context);
273 i965_media_decode_picture(VADriverContextP ctx,
275 union codec_state *codec_state,
276 struct hw_context *hw_context)
278 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
279 struct decode_state *decode_state = &codec_state->decode;
282 vaStatus = intel_decoder_sanity_check_input(ctx, profile, decode_state);
284 if (vaStatus != VA_STATUS_SUCCESS)
287 i965_media_decode_init(ctx, profile, decode_state, media_context);
288 assert(media_context->media_states_setup);
289 media_context->media_states_setup(ctx, decode_state, media_context);
290 i965_media_pipeline_setup(ctx, decode_state, media_context);
291 intel_batchbuffer_flush(hw_context->batch);
293 vaStatus = VA_STATUS_SUCCESS;
300 i965_media_context_destroy(void *hw_context)
302 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
305 if (media_context->free_private_context)
306 media_context->free_private_context(&media_context->private_context);
308 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
309 dri_bo_unreference(media_context->surface_state[i].bo);
310 media_context->surface_state[i].bo = NULL;
313 dri_bo_unreference(media_context->extended_state.bo);
314 media_context->extended_state.bo = NULL;
316 dri_bo_unreference(media_context->vfe_state.bo);
317 media_context->vfe_state.bo = NULL;
319 dri_bo_unreference(media_context->idrt.bo);
320 media_context->idrt.bo = NULL;
322 dri_bo_unreference(media_context->binding_table.bo);
323 media_context->binding_table.bo = NULL;
325 dri_bo_unreference(media_context->curbe.bo);
326 media_context->curbe.bo = NULL;
328 dri_bo_unreference(media_context->indirect_object.bo);
329 media_context->indirect_object.bo = NULL;
331 intel_batchbuffer_free(media_context->base.batch);
336 g4x_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
338 struct intel_driver_data *intel = intel_driver_data(ctx);
339 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
341 assert(media_context);
342 media_context->base.destroy = i965_media_context_destroy;
343 media_context->base.run = i965_media_decode_picture;
344 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
346 switch (obj_config->profile) {
347 case VAProfileMPEG2Simple:
348 case VAProfileMPEG2Main:
349 i965_media_mpeg2_dec_context_init(ctx, media_context);
352 case VAProfileH264ConstrainedBaseline:
353 case VAProfileH264Main:
354 case VAProfileH264High:
355 case VAProfileVC1Simple:
356 case VAProfileVC1Main:
357 case VAProfileVC1Advanced:
363 return (struct hw_context *)media_context;
367 ironlake_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
369 struct intel_driver_data *intel = intel_driver_data(ctx);
370 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
372 assert(media_context);
373 media_context->base.destroy = i965_media_context_destroy;
374 media_context->base.run = i965_media_decode_picture;
375 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
377 switch (obj_config->profile) {
378 case VAProfileMPEG2Simple:
379 case VAProfileMPEG2Main:
380 i965_media_mpeg2_dec_context_init(ctx, media_context);
383 case VAProfileH264ConstrainedBaseline:
384 case VAProfileH264Main:
385 case VAProfileH264High:
386 i965_media_h264_dec_context_init(ctx, media_context);
389 case VAProfileVC1Simple:
390 case VAProfileVC1Main:
391 case VAProfileVC1Advanced:
397 return (struct hw_context *)media_context;