2 * Copyright © 2017 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Peng Chen <peng.c.chen@intel.com>
35 #include "intel_batchbuffer.h"
36 #include "i965_defines.h"
37 #include "i965_drv_video.h"
38 #include "gen10_hcp_common.h"
40 #define HCP_WRITE_COMMANDS(command_flag) \
42 int cmd_size = sizeof(*param) / sizeof(uint32_t); \
43 BEGIN_BCS_BATCH(batch, cmd_size + 1); \
44 OUT_BCS_BATCH(batch, (command_flag) | (cmd_size - 1)); \
45 intel_batchbuffer_data(batch, param, sizeof(*param)); \
46 ADVANCE_BCS_BATCH(batch); \
50 gen10_hcp_pipe_mode_select(VADriverContextP ctx,
51 struct intel_batchbuffer *batch,
52 gen10_hcp_pipe_mode_select_param *param)
54 HCP_WRITE_COMMANDS(HCP_PIPE_MODE_SELECT);
58 gen10_hcp_surface_state(VADriverContextP ctx,
59 struct intel_batchbuffer *batch,
60 gen10_hcp_surface_state_param *param)
62 HCP_WRITE_COMMANDS(HCP_SURFACE_STATE);
66 gen10_hcp_pic_state(VADriverContextP ctx,
67 struct intel_batchbuffer *batch,
68 gen10_hcp_pic_state_param *param)
70 HCP_WRITE_COMMANDS(HCP_PIC_STATE);
74 gen10_hcp_vp9_pic_state(VADriverContextP ctx,
75 struct intel_batchbuffer *batch,
76 gen10_hcp_vp9_pic_state_param *param)
78 HCP_WRITE_COMMANDS(HCP_VP9_PIC_STATE);
82 gen10_hcp_qm_state(VADriverContextP ctx,
83 struct intel_batchbuffer *batch,
84 gen10_hcp_qm_state_param *param)
86 HCP_WRITE_COMMANDS(HCP_QM_STATE);
91 gen10_hcp_fqm_state(VADriverContextP ctx,
92 struct intel_batchbuffer *batch,
93 gen10_hcp_fqm_state_param *param)
95 HCP_WRITE_COMMANDS(HCP_FQM_STATE);
99 gen10_hcp_rdoq_state(VADriverContextP ctx,
100 struct intel_batchbuffer *batch,
101 gen10_hcp_rdoq_state_param *param)
103 HCP_WRITE_COMMANDS(HCP_RDOQ_STATE);
107 gen10_hcp_weightoffset_state(VADriverContextP ctx,
108 struct intel_batchbuffer *batch,
109 gen10_hcp_weightoffset_state_param *param)
111 HCP_WRITE_COMMANDS(HCP_WEIGHTOFFSET);
115 gen10_hcp_slice_state(VADriverContextP ctx,
116 struct intel_batchbuffer *batch,
117 gen10_hcp_slice_state_param *param)
119 HCP_WRITE_COMMANDS(HCP_SLICE_STATE);
123 gen10_hcp_ref_idx_state(VADriverContextP ctx,
124 struct intel_batchbuffer *batch,
125 gen10_hcp_ref_idx_state_param *param)
127 HCP_WRITE_COMMANDS(HCP_REF_IDX_STATE);
131 gen10_hcp_vp9_segment_state(VADriverContextP ctx,
132 struct intel_batchbuffer *batch,
133 gen10_hcp_vp9_segment_state_param *param)
135 HCP_WRITE_COMMANDS(HCP_VP9_SEGMENT_STATE);
139 gen10_hcp_pak_insert_object(VADriverContextP ctx,
140 struct intel_batchbuffer *batch,
141 gen10_hcp_pak_insert_object_param *param)
143 int payload_bits = param->inline_payload_bits;
144 int cmd_size_in_dw = ALIGN(payload_bits, 32) >> 5;
146 BEGIN_BCS_BATCH(batch, cmd_size_in_dw + 2);
148 OUT_BCS_BATCH(batch, HCP_INSERT_PAK_OBJECT | (cmd_size_in_dw));
150 OUT_BCS_BATCH(batch, param->dw1.value);
151 intel_batchbuffer_data(batch, param->inline_payload_ptr,
154 ADVANCE_BCS_BATCH(batch);
157 #define OUT_BUFFER_2DW(batch, gpe_res, is_target, delta) do { \
159 struct i965_gpe_resource * res = gpe_res; \
160 dri_bo *bo = res->bo; \
162 OUT_BCS_RELOC64(batch, \
164 I915_GEM_DOMAIN_RENDER, \
165 is_target ? I915_GEM_DOMAIN_RENDER : 0, \
169 OUT_BCS_BATCH(batch, 0); \
170 OUT_BCS_BATCH(batch, 0); \
173 OUT_BCS_BATCH(batch, 0); \
174 OUT_BCS_BATCH(batch, 0); \
178 #define OUT_BUFFER_3DW(batch, gpe_res, is_target, delta) do { \
179 OUT_BUFFER_2DW(batch, gpe_res, is_target, delta); \
181 OUT_BCS_BATCH(batch, i965->intel.mocs_state); \
183 OUT_BCS_BATCH(batch, 0); \
187 gen10_hcp_pipe_buf_addr_state(VADriverContextP ctx,
188 struct intel_batchbuffer *batch,
189 gen10_hcp_pipe_buf_addr_state_param *param)
191 struct i965_driver_data *i965 = i965_driver_data(ctx);
194 BEGIN_BCS_BATCH(batch, 104);
196 OUT_BCS_BATCH(batch, HCP_PIPE_BUF_ADDR_STATE | (104 - 2));
199 OUT_BUFFER_3DW(batch, param->reconstructed,
203 OUT_BUFFER_3DW(batch, param->deblocking_filter_line,
207 OUT_BUFFER_3DW(batch, param->deblocking_filter_tile_line,
211 OUT_BUFFER_3DW(batch, param->deblocking_filter_tile_column,
215 OUT_BUFFER_3DW(batch, param->metadata_line,
219 OUT_BUFFER_3DW(batch, param->metadata_tile_line,
223 OUT_BUFFER_3DW(batch, param->metadata_tile_column,
227 OUT_BUFFER_3DW(batch, param->sao_line,
231 OUT_BUFFER_3DW(batch, param->sao_tile_line,
235 OUT_BUFFER_3DW(batch, param->sao_tile_column,
239 OUT_BUFFER_3DW(batch, param->current_motion_vector_temporal,
243 OUT_BUFFER_3DW(batch, NULL, 0, 0);
246 for (i = 0; i < 8; i++)
247 OUT_BUFFER_2DW(batch, param->reference_picture[i],
250 OUT_BCS_BATCH(batch, i965->intel.mocs_state);
253 OUT_BUFFER_3DW(batch, param->uncompressed_picture,
257 OUT_BUFFER_3DW(batch, param->streamout_data_destination,
261 OUT_BUFFER_3DW(batch, param->picture_status,
265 OUT_BUFFER_3DW(batch, param->ildb_streamout,
269 for (i = 0; i < 8; i++)
270 OUT_BUFFER_2DW(batch, param->collocated_motion_vector_temporal[i],
273 OUT_BCS_BATCH(batch, i965->intel.mocs_state);
276 OUT_BUFFER_3DW(batch, param->vp9_probability,
280 OUT_BUFFER_3DW(batch, param->vp9_segmentid,
284 OUT_BUFFER_3DW(batch, param->vp9_hvd_line_rowstore,
288 OUT_BUFFER_3DW(batch, param->vp9_hvd_time_rowstore,
292 OUT_BUFFER_3DW(batch, param->sao_streamout_data_destination,
296 OUT_BUFFER_3DW(batch, param->frame_statics_streamout_data_destination,
300 OUT_BUFFER_3DW(batch, param->sse_source_pixel_rowstore,
303 ADVANCE_BCS_BATCH(batch);
307 gen10_hcp_ind_obj_base_addr_state(VADriverContextP ctx,
308 struct intel_batchbuffer *batch,
309 gen10_hcp_ind_obj_base_addr_state_param *param)
311 struct i965_driver_data *i965 = i965_driver_data(ctx);
313 BEGIN_BCS_BATCH(batch, 29);
315 OUT_BCS_BATCH(batch, HCP_IND_OBJ_BASE_ADDR_STATE | (29 - 2));
318 OUT_BUFFER_3DW(batch, NULL, 0, 0);
319 OUT_BUFFER_2DW(batch, NULL, 0, 0);
322 OUT_BUFFER_3DW(batch,
323 param->ind_cu_obj_bse,
325 param->ind_cu_obj_bse_offset);
328 OUT_BUFFER_3DW(batch,
331 param->ind_pak_bse_offset);
333 OUT_BUFFER_2DW(batch,
336 param->ind_pak_bse_upper);
339 OUT_BUFFER_3DW(batch, NULL, 0, 0);
342 OUT_BUFFER_3DW(batch, NULL, 0, 0);
345 OUT_BUFFER_3DW(batch, NULL, 0, 0);
348 OUT_BUFFER_3DW(batch, NULL, 0, 0);
349 OUT_BUFFER_3DW(batch, NULL, 0, 0);
351 ADVANCE_BCS_BATCH(batch);