OSDN Git Service

Code cleanup for vme/mfc initializing on SKL
[android-x86/hardware-intel-common-vaapi.git] / src / intel_batchbuffer.h
1 #ifndef _INTEL_BATCHBUFFER_H_
2 #define _INTEL_BATCHBUFFER_H_
3
4 #include <xf86drm.h>
5 #include <drm.h>
6 #include <i915_drm.h>
7 #include <intel_bufmgr.h>
8
9 #include "intel_driver.h"
10
11 struct intel_batchbuffer 
12 {
13     struct intel_driver_data *intel;
14     dri_bo *buffer;
15     unsigned int size;
16     unsigned char *map;
17     unsigned char *ptr;
18     int atomic;
19     int flag;
20
21     int emit_total;
22     unsigned char *emit_start;
23
24     int (*run)(drm_intel_bo *bo, int used,
25                drm_clip_rect_t *cliprects, int num_cliprects,
26                int DR4, unsigned int ring_flag);
27
28     /* Used for Sandybdrige workaround */
29     dri_bo *wa_render_bo;
30 };
31
32 struct intel_batchbuffer *intel_batchbuffer_new(struct intel_driver_data *intel, int flag, int buffer_size);
33 void intel_batchbuffer_free(struct intel_batchbuffer *batch);
34 void intel_batchbuffer_start_atomic(struct intel_batchbuffer *batch, unsigned int size);
35 void intel_batchbuffer_start_atomic_bcs(struct intel_batchbuffer *batch, unsigned int size);
36 void intel_batchbuffer_start_atomic_blt(struct intel_batchbuffer *batch, unsigned int size);
37 void intel_batchbuffer_start_atomic_veb(struct intel_batchbuffer *batch, unsigned int size);
38 void intel_batchbuffer_end_atomic(struct intel_batchbuffer *batch);
39 void intel_batchbuffer_emit_dword(struct intel_batchbuffer *batch, unsigned int x);
40 void intel_batchbuffer_emit_reloc(struct intel_batchbuffer *batch, dri_bo *bo, 
41                                   uint32_t read_domains, uint32_t write_domains, 
42                                   uint32_t delta);
43 void intel_batchbuffer_emit_reloc64(struct intel_batchbuffer *batch, dri_bo *bo,
44                                   uint32_t read_domains, uint32_t write_domains,
45                                   uint32_t delta);
46 void intel_batchbuffer_require_space(struct intel_batchbuffer *batch, unsigned int size);
47 void intel_batchbuffer_data(struct intel_batchbuffer *batch, void *data, unsigned int size);
48 void intel_batchbuffer_emit_mi_flush(struct intel_batchbuffer *batch);
49 void intel_batchbuffer_flush(struct intel_batchbuffer *batch);
50 void intel_batchbuffer_begin_batch(struct intel_batchbuffer *batch, int total);
51 void intel_batchbuffer_advance_batch(struct intel_batchbuffer *batch);
52 void intel_batchbuffer_check_batchbuffer_flag(struct intel_batchbuffer *batch, int flag);
53 int intel_batchbuffer_check_free_space(struct intel_batchbuffer *batch, int size);
54 int intel_batchbuffer_used_size(struct intel_batchbuffer *batch);
55 void intel_batchbuffer_align(struct intel_batchbuffer *batch, unsigned int alignedment);
56
57 typedef enum {
58     BSD_DEFAULT,
59     BSD_RING0,
60     BSD_RING1,
61 } bsd_ring_flag;
62
63 void intel_batchbuffer_start_atomic_bcs_override(struct intel_batchbuffer *batch, unsigned int size,
64                                                  bsd_ring_flag override_flag);
65
66 #define __BEGIN_BATCH(batch, n, f) do {                         \
67         assert(f == (batch->flag & I915_EXEC_RING_MASK));                               \
68         intel_batchbuffer_check_batchbuffer_flag(batch, batch->flag);     \
69         intel_batchbuffer_require_space(batch, (n) * 4);        \
70         intel_batchbuffer_begin_batch(batch, (n));              \
71     } while (0)
72
73 #define __OUT_BATCH(batch, d) do {              \
74         intel_batchbuffer_emit_dword(batch, d); \
75     } while (0)
76
77 #define __OUT_RELOC(batch, bo, read_domains, write_domain, delta) do {  \
78         assert((delta) >= 0);                                           \
79         intel_batchbuffer_emit_reloc(batch, bo,                         \
80                                      read_domains, write_domain,        \
81                                      delta);                            \
82     } while (0)
83
84 /* Handle 48-bit address relocations for Gen8+ */
85 #define __OUT_RELOC64(batch, bo, read_domains, write_domain, delta) do { \
86          intel_batchbuffer_emit_reloc64(batch, bo,                       \
87          read_domains, write_domain,                                     \
88          delta);                                                         \
89     } while (0)
90
91 #define __ADVANCE_BATCH(batch) do {             \
92         intel_batchbuffer_advance_batch(batch); \
93     } while (0)
94
95 #define BEGIN_BATCH(batch, n)           __BEGIN_BATCH(batch, n, I915_EXEC_RENDER)
96 #define BEGIN_BLT_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_BLT)
97 #define BEGIN_BCS_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_BSD)
98 #define BEGIN_VEB_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_VEBOX)
99
100 #define OUT_BATCH(batch, d)             __OUT_BATCH(batch, d)
101 #define OUT_BLT_BATCH(batch, d)         __OUT_BATCH(batch, d)
102 #define OUT_BCS_BATCH(batch, d)         __OUT_BATCH(batch, d)
103 #define OUT_VEB_BATCH(batch, d)         __OUT_BATCH(batch, d)
104
105 #define OUT_RELOC(batch, bo, read_domains, write_domain, delta) \
106     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
107 #define OUT_BLT_RELOC(batch, bo, read_domains, write_domain, delta)     \
108     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
109 #define OUT_BCS_RELOC(batch, bo, read_domains, write_domain, delta)     \
110     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
111 #define OUT_RELOC64(batch, bo, read_domains, write_domain, delta)       \
112     __OUT_RELOC64(batch, bo, read_domains, write_domain, delta)
113 #define OUT_BCS_RELOC64(batch, bo, read_domains, write_domain, delta)   \
114     __OUT_RELOC64(batch, bo, read_domains, write_domain, delta)
115
116 #define ADVANCE_BATCH(batch)            __ADVANCE_BATCH(batch)
117 #define ADVANCE_BLT_BATCH(batch)        __ADVANCE_BATCH(batch)
118 #define ADVANCE_BCS_BATCH(batch)        __ADVANCE_BATCH(batch)
119 #define ADVANCE_VEB_BATCH(batch)        __ADVANCE_BATCH(batch)
120
121 #endif /* _INTEL_BATCHBUFFER_H_ */