1 #ifndef INTEL_BATCHBUFFER_H
2 #define INTEL_BATCHBUFFER_H
6 #include "dri_bufmgr.h"
10 #define BATCH_SZ 16384
11 #define BATCH_RESERVED 16
15 * Batchbuffer contents may be looped over per cliprect, but do not
20 * Batchbuffer contents require looping over per cliprect at batch submit
25 * Batchbuffer contents contain drawing that should not be executed multiple
30 * Batchbuffer contents contain drawing that already handles cliprects, such
31 * as 2D drawing to front/back/depth that doesn't respect DRAWING_RECTANGLE.
32 * Equivalent behavior to NO_LOOP_CLIPRECTS, but may not persist in batch
33 * outside of LOCK/UNLOCK.
38 struct intel_batchbuffer
40 struct intel_context
*intel
;
43 dri_fence
*last_fence
;
48 enum cliprect_mode cliprect_mode
;
55 struct intel_batchbuffer
*intel_batchbuffer_alloc(struct intel_context
58 void intel_batchbuffer_free(struct intel_batchbuffer
*batch
);
61 void intel_batchbuffer_finish(struct intel_batchbuffer
*batch
);
63 void _intel_batchbuffer_flush(struct intel_batchbuffer
*batch
,
64 const char *file
, int line
);
66 #define intel_batchbuffer_flush(batch) \
67 _intel_batchbuffer_flush(batch, __FILE__, __LINE__)
69 void intel_batchbuffer_reset(struct intel_batchbuffer
*batch
);
72 /* Unlike bmBufferData, this currently requires the buffer be mapped.
73 * Consider it a convenience function wrapping multple
74 * intel_buffer_dword() calls.
76 void intel_batchbuffer_data(struct intel_batchbuffer
*batch
,
77 const void *data
, GLuint bytes
,
78 enum cliprect_mode cliprect_mode
);
80 void intel_batchbuffer_release_space(struct intel_batchbuffer
*batch
,
83 GLboolean
intel_batchbuffer_emit_reloc(struct intel_batchbuffer
*batch
,
85 GLuint flags
, GLuint offset
);
87 /* Inline functions - might actually be better off with these
88 * non-inlined. Certainly better off switching all command packets to
89 * be passed as structs rather than dwords, but that's a little bit of
93 intel_batchbuffer_space(struct intel_batchbuffer
*batch
)
95 return (batch
->size
- BATCH_RESERVED
) - (batch
->ptr
- batch
->map
);
100 intel_batchbuffer_emit_dword(struct intel_batchbuffer
*batch
, GLuint dword
)
103 assert(intel_batchbuffer_space(batch
) >= 4);
104 *(GLuint
*) (batch
->ptr
) = dword
;
109 intel_batchbuffer_require_space(struct intel_batchbuffer
*batch
,
111 enum cliprect_mode cliprect_mode
)
113 assert(sz
< batch
->size
- 8);
114 if (intel_batchbuffer_space(batch
) < sz
)
115 intel_batchbuffer_flush(batch
);
117 if (cliprect_mode
!= IGNORE_CLIPRECTS
) {
118 if (batch
->cliprect_mode
== IGNORE_CLIPRECTS
) {
119 batch
->cliprect_mode
= cliprect_mode
;
121 if (batch
->cliprect_mode
!= cliprect_mode
) {
122 intel_batchbuffer_flush(batch
);
123 batch
->cliprect_mode
= cliprect_mode
;
129 /* Here are the crusty old macros, to be removed:
133 #define BEGIN_BATCH(n, cliprect_mode) do { \
134 intel_batchbuffer_require_space(intel->batch, (n)*4, cliprect_mode); \
137 #define OUT_BATCH(d) intel_batchbuffer_emit_dword(intel->batch, d)
139 #define OUT_RELOC(buf, cliprect_mode, delta) do { \
140 assert((delta) >= 0); \
141 intel_batchbuffer_emit_reloc(intel->batch, buf, cliprect_mode, delta); \
144 #define ADVANCE_BATCH() do { } while(0)