2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
34 #include <va/va_backend.h>
36 #include "intel_batchbuffer.h"
37 #include "intel_driver.h"
39 #include "i965_defines.h"
40 #include "i965_media_mpeg2.h"
41 #include "i965_media_h264.h"
42 #include "i965_media.h"
43 #include "i965_drv_video.h"
46 i965_media_pipeline_select(VADriverContextP ctx)
49 OUT_BATCH(ctx, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
54 i965_media_urb_layout(VADriverContextP ctx)
56 struct i965_driver_data *i965 = i965_driver_data(ctx);
57 struct i965_media_state *media_state = &i965->media_state;
58 unsigned int vfe_fence, cs_fence;
60 vfe_fence = media_state->urb.cs_start;
61 cs_fence = URB_SIZE((&i965->intel));
64 OUT_BATCH(ctx, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
67 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
68 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
73 i965_media_state_base_address(VADriverContextP ctx)
75 struct i965_driver_data *i965 = i965_driver_data(ctx);
76 struct i965_media_state *media_state = &i965->media_state;
78 if (IS_IRONLAKE(i965->intel.device_id)) {
80 OUT_BATCH(ctx, CMD_STATE_BASE_ADDRESS | 6);
81 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
82 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
84 if (media_state->indirect_object.bo) {
85 OUT_RELOC(ctx, media_state->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
86 media_state->indirect_object.offset | BASE_ADDRESS_MODIFY);
88 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
91 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
92 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
93 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
94 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
98 OUT_BATCH(ctx, CMD_STATE_BASE_ADDRESS | 4);
99 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
100 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
102 if (media_state->indirect_object.bo) {
103 OUT_RELOC(ctx, media_state->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
104 media_state->indirect_object.offset | BASE_ADDRESS_MODIFY);
106 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
109 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
110 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
116 i965_media_state_pointers(VADriverContextP ctx)
118 struct i965_driver_data *i965 = i965_driver_data(ctx);
119 struct i965_media_state *media_state = &i965->media_state;
122 OUT_BATCH(ctx, CMD_MEDIA_STATE_POINTERS | 1);
124 if (media_state->extended_state.enabled)
125 OUT_RELOC(ctx, media_state->extended_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 1);
129 OUT_RELOC(ctx, media_state->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
134 i965_media_cs_urb_layout(VADriverContextP ctx)
136 struct i965_driver_data *i965 = i965_driver_data(ctx);
137 struct i965_media_state *media_state = &i965->media_state;
140 OUT_BATCH(ctx, CMD_CS_URB_STATE | 0);
142 ((media_state->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
143 (media_state->urb.num_cs_entries << 0)); /* Number of URB Entries */
148 i965_media_pipeline_state(VADriverContextP ctx)
150 i965_media_state_base_address(ctx);
151 i965_media_state_pointers(ctx);
152 i965_media_cs_urb_layout(ctx);
156 i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state)
158 struct i965_driver_data *i965 = i965_driver_data(ctx);
159 struct i965_media_state *media_state = &i965->media_state;
162 OUT_BATCH(ctx, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
163 OUT_RELOC(ctx, media_state->curbe.bo,
164 I915_GEM_DOMAIN_INSTRUCTION, 0,
165 media_state->urb.size_cs_entry - 1);
170 i965_media_pipeline_setup(VADriverContextP ctx, struct decode_state *decode_state)
172 struct i965_driver_data *i965 = i965_driver_data(ctx);
173 struct i965_media_state *media_state = &i965->media_state;
175 intel_batchbuffer_start_atomic(ctx, 0x1000);
176 intel_batchbuffer_emit_mi_flush(ctx); /* step 1 */
177 i965_media_pipeline_select(ctx); /* step 2 */
178 i965_media_urb_layout(ctx); /* step 3 */
179 i965_media_pipeline_state(ctx); /* step 4 */
180 i965_media_constant_buffer(ctx, decode_state); /* step 5 */
181 assert(media_state->media_objects);
182 media_state->media_objects(ctx, decode_state); /* step 6 */
183 intel_batchbuffer_end_atomic(ctx);
187 i965_media_decode_init(VADriverContextP ctx, VAProfile profile)
190 struct i965_driver_data *i965 = i965_driver_data(ctx);
191 struct i965_media_state *media_state = &i965->media_state;
194 /* constant buffer */
195 dri_bo_unreference(media_state->curbe.bo);
196 bo = dri_bo_alloc(i965->intel.bufmgr,
200 media_state->curbe.bo = bo;
203 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
204 dri_bo_unreference(media_state->surface_state[i].bo);
205 media_state->surface_state[i].bo = NULL;
209 dri_bo_unreference(media_state->binding_table.bo);
210 bo = dri_bo_alloc(i965->intel.bufmgr,
212 MAX_MEDIA_SURFACES * sizeof(unsigned int), 32);
214 media_state->binding_table.bo = bo;
216 /* interface descriptor remapping table */
217 dri_bo_unreference(media_state->idrt.bo);
218 bo = dri_bo_alloc(i965->intel.bufmgr,
219 "interface discriptor",
220 MAX_INTERFACE_DESC * sizeof(struct i965_interface_descriptor), 16);
222 media_state->idrt.bo = bo;
225 dri_bo_unreference(media_state->vfe_state.bo);
226 bo = dri_bo_alloc(i965->intel.bufmgr,
228 sizeof(struct i965_vfe_state), 32);
230 media_state->vfe_state.bo = bo;
233 media_state->extended_state.enabled = 0;
236 case VAProfileMPEG2Simple:
237 case VAProfileMPEG2Main:
238 i965_media_mpeg2_decode_init(ctx);
241 case VAProfileH264Baseline:
242 case VAProfileH264Main:
243 case VAProfileH264High:
244 i965_media_h264_decode_init(ctx);
254 i965_media_decode_picture(VADriverContextP ctx,
256 struct decode_state *decode_state)
258 struct i965_driver_data *i965 = i965_driver_data(ctx);
259 struct i965_media_state *media_state = &i965->media_state;
261 i965_media_decode_init(ctx, profile);
262 assert(media_state->media_states_setup);
263 media_state->media_states_setup(ctx, decode_state);
264 i965_media_pipeline_setup(ctx, decode_state);
265 intel_batchbuffer_flush(ctx);
269 i965_media_init(VADriverContextP ctx)
271 i965_media_mpeg2_init(ctx);
272 i965_media_h264_init(ctx);
277 i965_media_terminate(VADriverContextP ctx)
279 struct i965_driver_data *i965 = i965_driver_data(ctx);
280 struct i965_media_state *media_state = &i965->media_state;
283 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
284 dri_bo_unreference(media_state->surface_state[i].bo);
285 media_state->surface_state[i].bo = NULL;
288 dri_bo_unreference(media_state->extended_state.bo);
289 media_state->extended_state.bo = NULL;
291 dri_bo_unreference(media_state->vfe_state.bo);
292 media_state->vfe_state.bo = NULL;
294 dri_bo_unreference(media_state->idrt.bo);
295 media_state->idrt.bo = NULL;
297 dri_bo_unreference(media_state->binding_table.bo);
298 media_state->binding_table.bo = NULL;
300 dri_bo_unreference(media_state->curbe.bo);
301 media_state->curbe.bo = NULL;
303 dri_bo_unreference(media_state->indirect_object.bo);
304 media_state->indirect_object.bo = NULL;
306 i965_media_mpeg2_ternimate(ctx);
307 i965_media_h264_ternimate(ctx);