OSDN Git Service

Fix a typo
[android-x86/hardware-intel-common-vaapi.git] / src / intel_batchbuffer.h
1 #ifndef _INTEL_BATCHBUFFER_H_
2 #define _INTEL_BATCHBUFFER_H_
3
4 #include <xf86drm.h>
5 #include <drm.h>
6 #include <i915_drm.h>
7 #include <intel_bufmgr.h>
8
9 #include "intel_driver.h"
10
11 struct intel_batchbuffer {
12     struct intel_driver_data *intel;
13     dri_bo *buffer;
14     unsigned int size;
15     unsigned char *map;
16     unsigned char *ptr;
17     int atomic;
18     int flag;
19
20     int emit_total;
21     unsigned char *emit_start;
22
23     int (*run)(drm_intel_bo *bo, int used,
24                drm_clip_rect_t *cliprects, int num_cliprects,
25                int DR4, unsigned int ring_flag);
26
27     /* Used for Sandybdrige workaround */
28     dri_bo *wa_render_bo;
29 };
30
31 struct intel_batchbuffer *intel_batchbuffer_new(struct intel_driver_data *intel, int flag, int buffer_size);
32 void intel_batchbuffer_free(struct intel_batchbuffer *batch);
33 void intel_batchbuffer_start_atomic(struct intel_batchbuffer *batch, unsigned int size);
34 void intel_batchbuffer_start_atomic_bcs(struct intel_batchbuffer *batch, unsigned int size);
35 void intel_batchbuffer_start_atomic_blt(struct intel_batchbuffer *batch, unsigned int size);
36 void intel_batchbuffer_start_atomic_veb(struct intel_batchbuffer *batch, unsigned int size);
37 void intel_batchbuffer_end_atomic(struct intel_batchbuffer *batch);
38 void intel_batchbuffer_emit_dword(struct intel_batchbuffer *batch, unsigned int x);
39 void intel_batchbuffer_emit_reloc(struct intel_batchbuffer *batch, dri_bo *bo,
40                                   uint32_t read_domains, uint32_t write_domains,
41                                   uint32_t delta);
42 void intel_batchbuffer_emit_reloc64(struct intel_batchbuffer *batch, dri_bo *bo,
43                                     uint32_t read_domains, uint32_t write_domains,
44                                     uint32_t delta);
45 void intel_batchbuffer_require_space(struct intel_batchbuffer *batch, unsigned int size);
46 void intel_batchbuffer_data(struct intel_batchbuffer *batch, void *data, unsigned int size);
47 void intel_batchbuffer_emit_mi_flush(struct intel_batchbuffer *batch);
48 void intel_batchbuffer_flush(struct intel_batchbuffer *batch);
49 void intel_batchbuffer_begin_batch(struct intel_batchbuffer *batch, int total);
50 void intel_batchbuffer_advance_batch(struct intel_batchbuffer *batch);
51 void intel_batchbuffer_check_batchbuffer_flag(struct intel_batchbuffer *batch, int flag);
52 int intel_batchbuffer_check_free_space(struct intel_batchbuffer *batch, int size);
53 int intel_batchbuffer_used_size(struct intel_batchbuffer *batch);
54 void intel_batchbuffer_align(struct intel_batchbuffer *batch, unsigned int alignedment);
55
56 typedef enum {
57     BSD_DEFAULT,
58     BSD_RING0,
59     BSD_RING1,
60 } bsd_ring_flag;
61
62 void intel_batchbuffer_start_atomic_bcs_override(struct intel_batchbuffer *batch, unsigned int size,
63                                                  bsd_ring_flag override_flag);
64
65 #define __BEGIN_BATCH(batch, n, f) do {                         \
66         assert(f == (batch->flag & I915_EXEC_RING_MASK));                               \
67         intel_batchbuffer_check_batchbuffer_flag(batch, batch->flag);     \
68         intel_batchbuffer_require_space(batch, (n) * 4);        \
69         intel_batchbuffer_begin_batch(batch, (n));              \
70     } while (0)
71
72 #define __OUT_BATCH(batch, d) do {              \
73         intel_batchbuffer_emit_dword(batch, d); \
74     } while (0)
75
76 #define __OUT_RELOC(batch, bo, read_domains, write_domain, delta) do {  \
77         assert((delta) >= 0);                                           \
78         intel_batchbuffer_emit_reloc(batch, bo,                         \
79                                      read_domains, write_domain,        \
80                                      delta);                            \
81     } while (0)
82
83 /* Handle 48-bit address relocations for Gen8+ */
84 #define __OUT_RELOC64(batch, bo, read_domains, write_domain, delta) do { \
85          intel_batchbuffer_emit_reloc64(batch, bo,                       \
86          read_domains, write_domain,                                     \
87          delta);                                                         \
88     } while (0)
89
90 #define __ADVANCE_BATCH(batch) do {             \
91         intel_batchbuffer_advance_batch(batch); \
92     } while (0)
93
94 #define BEGIN_BATCH(batch, n)           __BEGIN_BATCH(batch, n, I915_EXEC_RENDER)
95 #define BEGIN_BLT_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_BLT)
96 #define BEGIN_BCS_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_BSD)
97 #define BEGIN_VEB_BATCH(batch, n)       __BEGIN_BATCH(batch, n, I915_EXEC_VEBOX)
98
99 #define OUT_BATCH(batch, d)             __OUT_BATCH(batch, d)
100 #define OUT_BLT_BATCH(batch, d)         __OUT_BATCH(batch, d)
101 #define OUT_BCS_BATCH(batch, d)         __OUT_BATCH(batch, d)
102 #define OUT_VEB_BATCH(batch, d)         __OUT_BATCH(batch, d)
103
104 #define OUT_RELOC(batch, bo, read_domains, write_domain, delta) \
105     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
106 #define OUT_BLT_RELOC(batch, bo, read_domains, write_domain, delta)     \
107     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
108 #define OUT_BCS_RELOC(batch, bo, read_domains, write_domain, delta)     \
109     __OUT_RELOC(batch, bo, read_domains, write_domain, delta)
110 #define OUT_RELOC64(batch, bo, read_domains, write_domain, delta)       \
111     __OUT_RELOC64(batch, bo, read_domains, write_domain, delta)
112 #define OUT_BCS_RELOC64(batch, bo, read_domains, write_domain, delta)   \
113     __OUT_RELOC64(batch, bo, read_domains, write_domain, delta)
114
115 #define ADVANCE_BATCH(batch)            __ADVANCE_BATCH(batch)
116 #define ADVANCE_BLT_BATCH(batch)        __ADVANCE_BATCH(batch)
117 #define ADVANCE_BCS_BATCH(batch)        __ADVANCE_BATCH(batch)
118 #define ADVANCE_VEB_BATCH(batch)        __ADVANCE_BATCH(batch)
119
120 #endif /* _INTEL_BATCHBUFFER_H_ */