2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
34 #include <va/va_backend.h>
36 #include "intel_batchbuffer.h"
37 #include "intel_driver.h"
39 #include "i965_defines.h"
40 #include "i965_media_mpeg2.h"
41 #include "i965_media.h"
42 #include "i965_drv_video.h"
45 i965_media_pipeline_select(VADriverContextP ctx)
48 OUT_BATCH(ctx, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
53 i965_media_urb_layout(VADriverContextP ctx)
55 struct i965_driver_data *i965 = i965_driver_data(ctx);
56 struct i965_media_state *media_state = &i965->media_state;
57 unsigned int vfe_fence, cs_fence;
59 vfe_fence = media_state->urb.cs_start;
60 cs_fence = URB_SIZE((&i965->intel));
63 OUT_BATCH(ctx, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
66 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
67 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
72 i965_media_state_base_address(VADriverContextP ctx)
74 struct i965_driver_data *i965 = i965_driver_data(ctx);
76 if (IS_IGDNG(i965->intel.device_id)) {
78 OUT_BATCH(ctx, CMD_STATE_BASE_ADDRESS | 6);
79 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
80 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
81 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
82 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
83 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
84 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
85 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
89 OUT_BATCH(ctx, CMD_STATE_BASE_ADDRESS | 4);
90 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
91 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
92 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
93 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
94 OUT_BATCH(ctx, 0 | BASE_ADDRESS_MODIFY);
100 i965_media_state_pointers(VADriverContextP ctx)
102 struct i965_driver_data *i965 = i965_driver_data(ctx);
103 struct i965_media_state *media_state = &i965->media_state;
106 OUT_BATCH(ctx, CMD_MEDIA_STATE_POINTERS | 1);
108 if (media_state->extended_state.enabled)
109 OUT_RELOC(ctx, media_state->extended_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 1);
113 OUT_RELOC(ctx, media_state->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
118 i965_media_cs_urb_layout(VADriverContextP ctx)
120 struct i965_driver_data *i965 = i965_driver_data(ctx);
121 struct i965_media_state *media_state = &i965->media_state;
124 OUT_BATCH(ctx, CMD_CS_URB_STATE | 0);
126 ((media_state->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
127 (media_state->urb.num_cs_entries << 0)); /* Number of URB Entries */
132 i965_media_pipeline_state(VADriverContextP ctx)
134 i965_media_state_base_address(ctx);
135 i965_media_state_pointers(ctx);
136 i965_media_cs_urb_layout(ctx);
140 i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state)
142 struct i965_driver_data *i965 = i965_driver_data(ctx);
143 struct i965_media_state *media_state = &i965->media_state;
146 OUT_BATCH(ctx, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
147 OUT_RELOC(ctx, media_state->curbe.bo,
148 I915_GEM_DOMAIN_INSTRUCTION, 0,
149 media_state->urb.size_cs_entry - 1);
154 i965_media_pipeline_setup(VADriverContextP ctx, struct decode_state *decode_state)
156 struct i965_driver_data *i965 = i965_driver_data(ctx);
157 struct i965_media_state *media_state = &i965->media_state;
159 intel_batchbuffer_start_atomic(ctx, 0x1000);
160 intel_batchbuffer_emit_mi_flush(ctx); /* step 1 */
161 i965_media_pipeline_select(ctx); /* step 2 */
162 i965_media_urb_layout(ctx); /* step 3 */
163 i965_media_pipeline_state(ctx); /* step 4 */
164 i965_media_constant_buffer(ctx, decode_state); /* step 5 */
165 assert(media_state->media_objects);
166 media_state->media_objects(ctx, decode_state); /* step 6 */
167 intel_batchbuffer_end_atomic(ctx);
171 i965_media_decode_init(VADriverContextP ctx, VAProfile profile)
174 struct i965_driver_data *i965 = i965_driver_data(ctx);
175 struct i965_media_state *media_state = &i965->media_state;
178 /* constant buffer */
179 dri_bo_unreference(media_state->curbe.bo);
180 bo = dri_bo_alloc(i965->intel.bufmgr,
184 media_state->curbe.bo = bo;
187 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
188 dri_bo_unreference(media_state->surface_state[i].bo);
189 media_state->surface_state[i].bo = NULL;
193 dri_bo_unreference(media_state->binding_table.bo);
194 bo = dri_bo_alloc(i965->intel.bufmgr,
196 MAX_MEDIA_SURFACES * sizeof(unsigned int), 32);
198 media_state->binding_table.bo = bo;
200 /* interface descriptor remapping table */
201 dri_bo_unreference(media_state->idrt.bo);
202 bo = dri_bo_alloc(i965->intel.bufmgr,
203 "interface discriptor",
204 MAX_INTERFACE_DESC * sizeof(struct i965_interface_descriptor), 16);
206 media_state->idrt.bo = bo;
209 dri_bo_unreference(media_state->vfe_state.bo);
210 bo = dri_bo_alloc(i965->intel.bufmgr,
212 sizeof(struct i965_vfe_state), 32);
214 media_state->vfe_state.bo = bo;
217 media_state->extended_state.enabled = 0;
220 case VAProfileMPEG2Simple:
221 case VAProfileMPEG2Main:
222 i965_media_mpeg2_decode_init(ctx);
232 i965_media_decode_picture(VADriverContextP ctx,
234 struct decode_state *decode_state)
236 struct i965_driver_data *i965 = i965_driver_data(ctx);
237 struct i965_media_state *media_state = &i965->media_state;
239 i965_media_decode_init(ctx, profile);
240 assert(media_state->states_setup);
241 media_state->states_setup(ctx, decode_state);
242 i965_media_pipeline_setup(ctx, decode_state);
243 intel_batchbuffer_flush(ctx);
247 i965_media_init(VADriverContextP ctx)
249 i965_media_mpeg2_init(ctx);
254 i965_media_terminate(VADriverContextP ctx)
256 struct i965_driver_data *i965 = i965_driver_data(ctx);
257 struct i965_media_state *media_state = &i965->media_state;
260 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
261 dri_bo_unreference(media_state->surface_state[i].bo);
262 media_state->surface_state[i].bo = NULL;
265 dri_bo_unreference(media_state->extended_state.bo);
266 media_state->extended_state.bo = NULL;
268 dri_bo_unreference(media_state->vfe_state.bo);
269 media_state->vfe_state.bo = NULL;
271 dri_bo_unreference(media_state->idrt.bo);
272 media_state->idrt.bo = NULL;
274 dri_bo_unreference(media_state->binding_table.bo);
275 media_state->binding_table.bo = NULL;
277 dri_bo_unreference(media_state->curbe.bo);
278 media_state->curbe.bo = NULL;
280 i965_media_mpeg2_ternimate(ctx);