Support 16-bit modes.
[mesa.git] / src / mesa / drivers / dri / r300 / r300_cmdbuf.c
1 /*
2 Copyright (C) The Weather Channel, Inc. 2002. All Rights Reserved.
3
4 The Weather Channel (TM) funded Tungsten Graphics to develop the
5 initial release of the Radeon 8500 driver under the XFree86 license.
6 This notice must be preserved.
7
8 Permission is hereby granted, free of charge, to any person obtaining
9 a copy of this software and associated documentation files (the
10 "Software"), to deal in the Software without restriction, including
11 without limitation the rights to use, copy, modify, merge, publish,
12 distribute, sublicense, and/or sell copies of the Software, and to
13 permit persons to whom the Software is furnished to do so, subject to
14 the following conditions:
15
16 The above copyright notice and this permission notice (including the
17 next paragraph) shall be included in all copies or substantial
18 portions of the Software.
19
20 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23 IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
24 LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
25 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
26 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27
28 **************************************************************************/
29
30 /*
31 * Authors:
32 * Nicolai Haehnle <prefect_@gmx.net>
33 */
34
35 #include "glheader.h"
36 #include "state.h"
37 #include "imports.h"
38 #include "macros.h"
39 #include "context.h"
40 #include "swrast/swrast.h"
41 #include "simple_list.h"
42
43 #include "drm.h"
44 #include "radeon_drm.h"
45
46 #include "radeon_ioctl.h"
47 #include "r300_context.h"
48 #include "r300_ioctl.h"
49 #include "radeon_reg.h"
50 #include "r300_reg.h"
51 #include "r300_cmdbuf.h"
52 #include "r300_emit.h"
53
54
55 // Set this to 1 for extremely verbose debugging of command buffers
56 #define DEBUG_CMDBUF 0
57
58
59 /**
60 * Send the current command buffer via ioctl to the hardware.
61 */
62 int r300FlushCmdBufLocked(r300ContextPtr r300, const char* caller)
63 {
64 int ret;
65 int i;
66 drm_radeon_cmd_buffer_t cmd;
67 int start;
68
69 if (r300->radeon.lost_context) {
70 start = 0;
71 r300->radeon.lost_context = GL_FALSE;
72 } else
73 start = r300->cmdbuf.count_reemit;
74
75 if (RADEON_DEBUG & DEBUG_IOCTL) {
76 fprintf(stderr, "%s from %s - %i cliprects\n",
77 __FUNCTION__, caller, r300->radeon.numClipRects);
78
79 if (DEBUG_CMDBUF && RADEON_DEBUG & DEBUG_VERBOSE)
80 for (i = start; i < r300->cmdbuf.count_used; ++i)
81 fprintf(stderr, "%d: %08x\n", i,
82 r300->cmdbuf.cmd_buf[i]);
83 }
84
85 cmd.buf = (char*)(r300->cmdbuf.cmd_buf + start);
86 cmd.bufsz = (r300->cmdbuf.count_used - start) * 4;
87
88 if (r300->radeon.state.scissor.enabled) {
89 cmd.nbox = r300->radeon.state.scissor.numClipRects;
90 cmd.boxes = (drm_clip_rect_t *)r300->radeon.state.scissor.pClipRects;
91 } else {
92 cmd.nbox = r300->radeon.numClipRects;
93 cmd.boxes = (drm_clip_rect_t *)r300->radeon.pClipRects;
94 }
95
96 ret = drmCommandWrite(r300->radeon.dri.fd,
97 DRM_RADEON_CMDBUF, &cmd, sizeof(cmd));
98
99 if (RADEON_DEBUG & DEBUG_SYNC) {
100 fprintf(stderr, "Syncing in %s (from %s)\n\n", __FUNCTION__, caller);
101 radeonWaitForIdleLocked(&r300->radeon);
102 }
103
104 r300->dma.nr_released_bufs = 0;
105 r300->cmdbuf.count_used = 0;
106 r300->cmdbuf.count_reemit = 0;
107
108 return ret;
109 }
110
111
112 int r300FlushCmdBuf(r300ContextPtr r300, const char* caller)
113 {
114 int ret;
115 int i;
116 drm_radeon_cmd_buffer_t cmd;
117 int start;
118
119 LOCK_HARDWARE(&r300->radeon);
120
121 ret=r300FlushCmdBufLocked(r300, caller);
122
123 UNLOCK_HARDWARE(&r300->radeon);
124
125 if (ret) {
126 fprintf(stderr, "drmRadeonCmdBuffer: %d (exiting)\n", ret);
127 exit(ret);
128 }
129
130 return ret;
131 }
132
133
134 static void print_state_atom(struct r300_state_atom *state, int dwords)
135 {
136 int i;
137
138 fprintf(stderr, " emit %s/%d/%d\n", state->name, dwords, state->cmd_size);
139
140 if (RADEON_DEBUG & DEBUG_VERBOSE)
141 for (i = 0; i < dwords; i++)
142 fprintf(stderr, " %s[%d]: %08X\n", state->name, i,
143 state->cmd[i]);
144 }
145
146 /**
147 * Emit all atoms with a dirty field equal to dirty.
148 *
149 * The caller must have ensured that there is enough space in the command
150 * buffer.
151 */
152 static __inline__ void r300DoEmitState(r300ContextPtr r300, GLboolean dirty)
153 {
154 struct r300_state_atom* atom;
155 uint32_t* dest;
156
157 dest = r300->cmdbuf.cmd_buf + r300->cmdbuf.count_used;
158
159 if (DEBUG_CMDBUF && RADEON_DEBUG & DEBUG_STATE) {
160 foreach(atom, &r300->hw.atomlist) {
161 if ((atom->dirty || r300->hw.all_dirty) == dirty) {
162 int dwords = (*atom->check)(r300, atom);
163
164 if (dwords)
165 print_state_atom(atom, dwords);
166 else
167 fprintf(stderr, " skip state %s\n",
168 atom->name);
169 }
170 }
171 }
172
173 foreach(atom, &r300->hw.atomlist) {
174 if ((atom->dirty || r300->hw.all_dirty) == dirty) {
175 int dwords = (*atom->check)(r300, atom);
176
177 if (dwords) {
178 memcpy(dest, atom->cmd, dwords*4);
179 dest += dwords;
180 r300->cmdbuf.count_used += dwords;
181 atom->dirty = GL_FALSE;
182 }
183 }
184 }
185 }
186
187 /**
188 * Copy dirty hardware state atoms into the command buffer.
189 *
190 * We also copy out clean state if we're at the start of a buffer. That makes
191 * it easy to recover from lost contexts.
192 */
193 void r300EmitState(r300ContextPtr r300)
194 {
195 if (RADEON_DEBUG & (DEBUG_STATE | DEBUG_PRIMS))
196 fprintf(stderr, "%s\n", __FUNCTION__);
197
198 if (r300->cmdbuf.count_used && !r300->hw.is_dirty && !r300->hw.all_dirty)
199 return;
200
201 /* To avoid going across the entire set of states multiple times, just check
202 * for enough space for the case of emitting all state, and inline the
203 * r300AllocCmdBuf code here without all the checks.
204 */
205 r300EnsureCmdBufSpace(r300, r300->hw.max_state_size, __FUNCTION__);
206
207 if (!r300->cmdbuf.count_used) {
208 if (RADEON_DEBUG & DEBUG_STATE)
209 fprintf(stderr, "Begin reemit state\n");
210
211 r300DoEmitState(r300, GL_FALSE);
212 r300->cmdbuf.count_reemit = r300->cmdbuf.count_used;
213 }
214
215 if (RADEON_DEBUG & DEBUG_STATE)
216 fprintf(stderr, "Begin dirty state\n");
217
218 r300DoEmitState(r300, GL_TRUE);
219
220 assert(r300->cmdbuf.count_used < r300->cmdbuf.size);
221
222 r300->hw.is_dirty = GL_FALSE;
223 r300->hw.all_dirty = GL_FALSE;
224 }
225
226 #if 0
227
228 static __inline__ uint32_t cmducs(int reg, int count)
229 {
230 drm_r300_cmd_header_t cmd;
231
232 cmd.unchecked_state.cmd_type = R300_CMD_UNCHECKED_STATE;
233 cmd.unchecked_state.count = count;
234 cmd.unchecked_state.reghi = ((unsigned int)reg & 0xFF00) >> 8;
235 cmd.unchecked_state.reglo = ((unsigned int)reg & 0x00FF);
236
237 return cmd.u;
238 }
239
240 static __inline__ uint32_t cmdvpu(int addr, int count)
241 {
242 drm_r300_cmd_header_t cmd;
243
244 cmd.vpu.cmd_type = R300_CMD_VPU;
245 cmd.vpu.count = count;
246 cmd.vpu.adrhi = ((unsigned int)addr & 0xFF00) >> 8;
247 cmd.vpu.adrlo = ((unsigned int)addr & 0x00FF);
248
249 return cmd.u;
250 }
251 #endif
252
253 #define CHECK( NM, COUNT ) \
254 static int check_##NM( r300ContextPtr r300, \
255 struct r300_state_atom* atom ) \
256 { \
257 (void) atom; (void) r300; \
258 return (COUNT); \
259 }
260
261 #define ucscount(ptr) (((drm_r300_cmd_header_t*)(ptr))->unchecked_state.count)
262 #define vpucount(ptr) (((drm_r300_cmd_header_t*)(ptr))->vpu.count)
263
264 CHECK( always, atom->cmd_size )
265 CHECK( never, 0 )
266 CHECK( variable, ucscount(atom->cmd) ? (1 + ucscount(atom->cmd)) : 0 )
267 CHECK( vpu, vpucount(atom->cmd) ? (1 + vpucount(atom->cmd)*4) : 0 )
268
269 #undef ucscount
270
271 #define ALLOC_STATE( ATOM, CHK, SZ, NM, IDX ) \
272 do { \
273 r300->hw.ATOM.cmd_size = (SZ); \
274 r300->hw.ATOM.cmd = (uint32_t*)CALLOC((SZ) * sizeof(uint32_t)); \
275 r300->hw.ATOM.name = (NM); \
276 r300->hw.ATOM.idx = (IDX); \
277 r300->hw.ATOM.check = check_##CHK; \
278 r300->hw.ATOM.dirty = GL_FALSE; \
279 r300->hw.max_state_size += (SZ); \
280 } while (0)
281
282
283 /**
284 * Allocate memory for the command buffer and initialize the state atom
285 * list. Note that the initial hardware state is set by r300InitState().
286 */
287 void r300InitCmdBuf(r300ContextPtr r300)
288 {
289 int size, i, mtu;
290
291 r300->hw.max_state_size = 0;
292
293 mtu = r300->radeon.glCtx->Const.MaxTextureUnits;
294 if (RADEON_DEBUG & DEBUG_TEXTURE) {
295 fprintf(stderr, "Using %d maximum texture units..\n", mtu);
296 }
297
298 /* Initialize state atoms */
299 ALLOC_STATE( vpt, always, R300_VPT_CMDSIZE, "vpt", 0 );
300 r300->hw.vpt.cmd[R300_VPT_CMD_0] = cmducs(R300_SE_VPORT_XSCALE, 6);
301 ALLOC_STATE( unk2080, always, 2, "unk2080", 0 );
302 r300->hw.unk2080.cmd[0] = cmducs(0x2080, 1);
303 ALLOC_STATE( vte, always, 3, "vte", 0 );
304 r300->hw.vte.cmd[0] = cmducs(R300_SE_VTE_CNTL, 2);
305 ALLOC_STATE( unk2134, always, 3, "unk2134", 0 );
306 r300->hw.unk2134.cmd[0] = cmducs(0x2134, 2);
307 ALLOC_STATE( unk2140, always, 2, "unk2140", 0 );
308 r300->hw.unk2140.cmd[0] = cmducs(0x2140, 1);
309 ALLOC_STATE( vir[0], variable, R300_VIR_CMDSIZE, "vir/0", 0 );
310 r300->hw.vir[0].cmd[R300_VIR_CMD_0] = cmducs(R300_VAP_INPUT_ROUTE_0_0, 1);
311 ALLOC_STATE( vir[1], variable, R300_VIR_CMDSIZE, "vir/1", 1 );
312 r300->hw.vir[1].cmd[R300_VIR_CMD_0] = cmducs(R300_VAP_INPUT_ROUTE_1_0, 1);
313 ALLOC_STATE( vic, always, R300_VIC_CMDSIZE, "vic", 0 );
314 r300->hw.vic.cmd[R300_VIC_CMD_0] = cmducs(R300_VAP_INPUT_CNTL_0, 2);
315 ALLOC_STATE( unk21DC, always, 2, "unk21DC", 0 );
316 r300->hw.unk21DC.cmd[0] = cmducs(0x21DC, 1);
317 ALLOC_STATE( unk221C, always, 2, "unk221C", 0 );
318 r300->hw.unk221C.cmd[0] = cmducs(0x221C, 1);
319 ALLOC_STATE( unk2220, always, 5, "unk2220", 0 );
320 r300->hw.unk2220.cmd[0] = cmducs(0x2220, 4);
321 ALLOC_STATE( unk2288, always, 2, "unk2288", 0 );
322 r300->hw.unk2288.cmd[0] = cmducs(0x2288, 1);
323 ALLOC_STATE( vof, always, R300_VOF_CMDSIZE, "vof", 0 );
324 r300->hw.vof.cmd[R300_VOF_CMD_0] = cmducs(R300_VAP_OUTPUT_VTX_FMT_0, 2);
325 ALLOC_STATE( pvs, always, R300_PVS_CMDSIZE, "pvs", 0 );
326 r300->hw.pvs.cmd[R300_PVS_CMD_0] = cmducs(R300_VAP_PVS_CNTL_1, 3);
327 ALLOC_STATE( gb_enable, always, 2, "gb_enable", 0 );
328 r300->hw.gb_enable.cmd[0] = cmducs(R300_GB_ENABLE, 1);
329 ALLOC_STATE( gb_misc, always, R300_GB_MISC_CMDSIZE, "gb_misc", 0 );
330 r300->hw.gb_misc.cmd[0] = cmducs(R300_GB_MSPOS0, 5);
331 ALLOC_STATE( txe, always, R300_TXE_CMDSIZE, "txe", 0 );
332 r300->hw.txe.cmd[R300_TXE_CMD_0] = cmducs(R300_TX_ENABLE, 1);
333 ALLOC_STATE( unk4200, always, 5, "unk4200", 0 );
334 r300->hw.unk4200.cmd[0] = cmducs(0x4200, 4);
335 ALLOC_STATE( unk4214, always, 2, "unk4214", 0 );
336 r300->hw.unk4214.cmd[0] = cmducs(0x4214, 1);
337 ALLOC_STATE( ps, always, R300_PS_CMDSIZE, "ps", 0 );
338 r300->hw.ps.cmd[0] = cmducs(R300_RE_POINTSIZE, 1);
339 ALLOC_STATE( unk4230, always, 4, "unk4230", 0 );
340 r300->hw.unk4230.cmd[0] = cmducs(0x4230, 3);
341 ALLOC_STATE( lcntl, always, 2, "lcntl", 0 );
342 r300->hw.lcntl.cmd[0] = cmducs(R300_RE_LINE_CNT, 1);
343 ALLOC_STATE( unk4260, always, 4, "unk4260", 0 );
344 r300->hw.unk4260.cmd[0] = cmducs(0x4260, 3);
345 ALLOC_STATE( unk4274, always, 5, "unk4274", 0 );
346 r300->hw.unk4274.cmd[0] = cmducs(0x4274, 4);
347 ALLOC_STATE( unk4288, always, 6, "unk4288", 0 );
348 r300->hw.unk4288.cmd[0] = cmducs(0x4288, 5);
349 ALLOC_STATE( unk42A0, always, 2, "unk42A0", 0 );
350 r300->hw.unk42A0.cmd[0] = cmducs(0x42A0, 1);
351 ALLOC_STATE( zbs, always, R300_ZBS_CMDSIZE, "zbs", 0 );
352 r300->hw.zbs.cmd[R300_ZBS_CMD_0] = cmducs(R300_RE_ZBIAS_T_FACTOR, 4);
353 ALLOC_STATE( unk42B4, always, 2, "unk42B4", 0 );
354 r300->hw.unk42B4.cmd[0] = cmducs(0x42B4, 1);
355 ALLOC_STATE( cul, always, R300_CUL_CMDSIZE, "cul", 0 );
356 r300->hw.cul.cmd[R300_CUL_CMD_0] = cmducs(R300_RE_CULL_CNTL, 1);
357 ALLOC_STATE( unk42C0, always, 3, "unk42C0", 0 );
358 r300->hw.unk42C0.cmd[0] = cmducs(0x42C0, 2);
359 ALLOC_STATE( rc, always, R300_RC_CMDSIZE, "rc", 0 );
360 r300->hw.rc.cmd[R300_RC_CMD_0] = cmducs(R300_RS_CNTL_0, 2);
361 ALLOC_STATE( ri, always, R300_RI_CMDSIZE, "ri", 0 );
362 r300->hw.ri.cmd[R300_RI_CMD_0] = cmducs(R300_RS_INTERP_0, 8);
363 ALLOC_STATE( rr, variable, R300_RR_CMDSIZE, "rr", 0 );
364 r300->hw.rr.cmd[R300_RR_CMD_0] = cmducs(R300_RS_ROUTE_0, 1);
365 ALLOC_STATE( unk43A4, always, 3, "unk43A4", 0 );
366 r300->hw.unk43A4.cmd[0] = cmducs(0x43A4, 2);
367 ALLOC_STATE( unk43E8, always, 2, "unk43E8", 0 );
368 r300->hw.unk43E8.cmd[0] = cmducs(0x43E8, 1);
369 ALLOC_STATE( fp, always, R300_FP_CMDSIZE, "fp", 0 );
370 r300->hw.fp.cmd[R300_FP_CMD_0] = cmducs(R300_PFS_CNTL_0, 3);
371 r300->hw.fp.cmd[R300_FP_CMD_1] = cmducs(R300_PFS_NODE_0, 4);
372 ALLOC_STATE( fpt, variable, R300_FPT_CMDSIZE, "fpt", 0 );
373 r300->hw.fpt.cmd[R300_FPT_CMD_0] = cmducs(R300_PFS_TEXI_0, 0);
374 ALLOC_STATE( unk46A4, always, 6, "unk46A4", 0 );
375 r300->hw.unk46A4.cmd[0] = cmducs(0x46A4, 5);
376 ALLOC_STATE( fpi[0], variable, R300_FPI_CMDSIZE, "fpi/0", 0 );
377 r300->hw.fpi[0].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR0_0, 1);
378 ALLOC_STATE( fpi[1], variable, R300_FPI_CMDSIZE, "fpi/1", 1 );
379 r300->hw.fpi[1].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR1_0, 1);
380 ALLOC_STATE( fpi[2], variable, R300_FPI_CMDSIZE, "fpi/2", 2 );
381 r300->hw.fpi[2].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR2_0, 1);
382 ALLOC_STATE( fpi[3], variable, R300_FPI_CMDSIZE, "fpi/3", 3 );
383 r300->hw.fpi[3].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR3_0, 1);
384 ALLOC_STATE( unk4BC0, always, 2, "unk4BC0", 0 );
385 r300->hw.unk4BC0.cmd[0] = cmducs(0x4BC0, 1);
386 ALLOC_STATE( unk4BC8, always, 4, "unk4BC8", 0 );
387 r300->hw.unk4BC8.cmd[0] = cmducs(0x4BC8, 3);
388 ALLOC_STATE( at, always, R300_AT_CMDSIZE, "at", 0 );
389 r300->hw.at.cmd[R300_AT_CMD_0] = cmducs(R300_PP_ALPHA_TEST, 2);
390 ALLOC_STATE( unk4BD8, always, 2, "unk4BD8", 0 );
391 r300->hw.unk4BD8.cmd[0] = cmducs(0x4BD8, 1);
392 ALLOC_STATE( fpp, variable, R300_FPP_CMDSIZE, "fpp", 0 );
393 r300->hw.fpp.cmd[R300_FPP_CMD_0] = cmducs(R300_PFS_PARAM_0_X, 0);
394 ALLOC_STATE( unk4E00, always, 2, "unk4E00", 0 );
395 r300->hw.unk4E00.cmd[0] = cmducs(0x4E00, 1);
396 ALLOC_STATE( bld, always, R300_BLD_CMDSIZE, "bld", 0 );
397 r300->hw.bld.cmd[R300_BLD_CMD_0] = cmducs(R300_RB3D_CBLEND, 2);
398 ALLOC_STATE( cmk, always, R300_CMK_CMDSIZE, "cmk", 0 );
399 r300->hw.cmk.cmd[R300_CMK_CMD_0] = cmducs(R300_RB3D_COLORMASK, 1);
400 ALLOC_STATE( unk4E10, always, 4, "unk4E10", 0 );
401 r300->hw.unk4E10.cmd[0] = cmducs(0x4E10, 3);
402 ALLOC_STATE( cb, always, R300_CB_CMDSIZE, "cb", 0 );
403 r300->hw.cb.cmd[R300_CB_CMD_0] = cmducs(R300_RB3D_COLOROFFSET0, 1);
404 r300->hw.cb.cmd[R300_CB_CMD_1] = cmducs(R300_RB3D_COLORPITCH0, 1);
405 ALLOC_STATE( unk4E50, always, 10, "unk4E50", 0 );
406 r300->hw.unk4E50.cmd[0] = cmducs(0x4E50, 9);
407 ALLOC_STATE( unk4E88, always, 2, "unk4E88", 0 );
408 r300->hw.unk4E88.cmd[0] = cmducs(0x4E88, 1);
409 ALLOC_STATE( unk4EA0, always, 3, "unk4EA0 R350 only", 0 );
410 r300->hw.unk4EA0.cmd[0] = cmducs(0x4EA0, 2);
411 ALLOC_STATE( zs, always, R300_ZS_CMDSIZE, "zstencil", 0 );
412 r300->hw.zs.cmd[R300_ZS_CMD_0] = cmducs(R300_RB3D_ZSTENCIL_CNTL_0, 3);
413 ALLOC_STATE( unk4F10, always, 5, "unk4F10", 0 );
414 r300->hw.unk4F10.cmd[0] = cmducs(0x4F10, 4);
415 ALLOC_STATE( zb, always, R300_ZB_CMDSIZE, "zb", 0 );
416 r300->hw.zb.cmd[R300_ZB_CMD_0] = cmducs(R300_RB3D_DEPTHOFFSET, 2);
417 ALLOC_STATE( unk4F28, always, 2, "unk4F28", 0 );
418 r300->hw.unk4F28.cmd[0] = cmducs(0x4F28, 1);
419 ALLOC_STATE( unk4F30, always, 3, "unk4F30", 0 );
420 r300->hw.unk4F30.cmd[0] = cmducs(0x4F30, 2);
421 ALLOC_STATE( unk4F44, always, 2, "unk4F44", 0 );
422 r300->hw.unk4F44.cmd[0] = cmducs(0x4F44, 1);
423 ALLOC_STATE( unk4F54, always, 2, "unk4F54", 0 );
424 r300->hw.unk4F54.cmd[0] = cmducs(0x4F54, 1);
425
426 ALLOC_STATE( vpi, vpu, R300_VPI_CMDSIZE, "vpi", 0 );
427 r300->hw.vpi.cmd[R300_VPI_CMD_0] = cmdvpu(R300_PVS_UPLOAD_PROGRAM, 0);
428 ALLOC_STATE( vpp, vpu, R300_VPP_CMDSIZE, "vpp", 0 );
429 r300->hw.vpp.cmd[R300_VPP_CMD_0] = cmdvpu(R300_PVS_UPLOAD_PARAMETERS, 0);
430 ALLOC_STATE( vps, vpu, R300_VPS_CMDSIZE, "vps", 0 );
431 r300->hw.vps.cmd[R300_VPS_CMD_0] = cmdvpu(R300_PVS_UPLOAD_POINTSIZE, 1);
432
433 /* Textures */
434 ALLOC_STATE( tex.filter, variable, mtu+1, "tex_filter", 0 );
435 r300->hw.tex.filter.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_FILTER_0, 0);
436
437 ALLOC_STATE( tex.unknown1, variable, mtu+1, "tex_unknown1", 0 );
438 r300->hw.tex.unknown1.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_UNK1_0, 0);
439
440 ALLOC_STATE( tex.size, variable, mtu+1, "tex_size", 0 );
441 r300->hw.tex.size.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_SIZE_0, 0);
442
443 ALLOC_STATE( tex.format, variable, mtu+1, "tex_format", 0 );
444 r300->hw.tex.format.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_FORMAT_0, 0);
445
446 ALLOC_STATE( tex.offset, variable, mtu+1, "tex_offset", 0 );
447 r300->hw.tex.offset.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_OFFSET_0, 0);
448
449 ALLOC_STATE( tex.unknown4, variable, mtu+1, "tex_unknown4", 0 );
450 r300->hw.tex.unknown4.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_UNK4_0, 0);
451
452 ALLOC_STATE( tex.border_color, variable, mtu+1, "tex_border_color", 0 );
453 r300->hw.tex.border_color.cmd[R300_TEX_CMD_0] = cmducs(R300_TX_BORDER_COLOR_0, 0);
454
455
456 /* Setup the atom linked list */
457 make_empty_list(&r300->hw.atomlist);
458 r300->hw.atomlist.name = "atom-list";
459
460 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpt);
461 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2080);
462 insert_at_tail(&r300->hw.atomlist, &r300->hw.vte);
463 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2134);
464 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2140);
465 insert_at_tail(&r300->hw.atomlist, &r300->hw.vir[0]);
466 insert_at_tail(&r300->hw.atomlist, &r300->hw.vir[1]);
467 insert_at_tail(&r300->hw.atomlist, &r300->hw.vic);
468 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk21DC);
469 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk221C);
470 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2220);
471 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2288);
472 insert_at_tail(&r300->hw.atomlist, &r300->hw.vof);
473 insert_at_tail(&r300->hw.atomlist, &r300->hw.pvs);
474 insert_at_tail(&r300->hw.atomlist, &r300->hw.gb_enable);
475 insert_at_tail(&r300->hw.atomlist, &r300->hw.gb_misc);
476 insert_at_tail(&r300->hw.atomlist, &r300->hw.txe);
477 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4200);
478 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4214);
479 insert_at_tail(&r300->hw.atomlist, &r300->hw.ps);
480 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4230);
481 insert_at_tail(&r300->hw.atomlist, &r300->hw.lcntl);
482 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4260);
483 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4274);
484 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4288);
485 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42A0);
486 insert_at_tail(&r300->hw.atomlist, &r300->hw.zbs);
487 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42B4);
488 insert_at_tail(&r300->hw.atomlist, &r300->hw.cul);
489 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42C0);
490 insert_at_tail(&r300->hw.atomlist, &r300->hw.rc);
491 insert_at_tail(&r300->hw.atomlist, &r300->hw.ri);
492 insert_at_tail(&r300->hw.atomlist, &r300->hw.rr);
493 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk43A4);
494 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk43E8);
495 insert_at_tail(&r300->hw.atomlist, &r300->hw.fp);
496 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpt);
497 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk46A4);
498 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[0]);
499 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[1]);
500 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[2]);
501 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[3]);
502 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BC0);
503 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BC8);
504 insert_at_tail(&r300->hw.atomlist, &r300->hw.at);
505 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BD8);
506 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpp);
507 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E00);
508 insert_at_tail(&r300->hw.atomlist, &r300->hw.bld);
509 insert_at_tail(&r300->hw.atomlist, &r300->hw.cmk);
510 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E10);
511 insert_at_tail(&r300->hw.atomlist, &r300->hw.cb);
512 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E50);
513 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E88);
514 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4EA0);
515 insert_at_tail(&r300->hw.atomlist, &r300->hw.zs);
516 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F10);
517 insert_at_tail(&r300->hw.atomlist, &r300->hw.zb);
518 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F28);
519 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F30);
520 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F44);
521 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F54);
522
523 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpi);
524 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpp);
525 insert_at_tail(&r300->hw.atomlist, &r300->hw.vps);
526
527 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.filter);
528 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.unknown1);
529 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.size);
530 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.format);
531 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.offset);
532 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.unknown4);
533 insert_at_tail(&r300->hw.atomlist, &r300->hw.tex.border_color);
534
535 r300->hw.is_dirty = GL_TRUE;
536 r300->hw.all_dirty = GL_TRUE;
537
538 /* Initialize command buffer */
539 size = 256 * driQueryOptioni(&r300->radeon.optionCache, "command_buffer_size");
540 if (size < 2*r300->hw.max_state_size) {
541 size = 2*r300->hw.max_state_size+65535;
542 }
543 if (size > 64*256)
544 size = 64*256;
545
546 if (RADEON_DEBUG & (DEBUG_IOCTL|DEBUG_DMA)) {
547 fprintf(stderr, "sizeof(drm_r300_cmd_header_t)=%d\n",
548 sizeof(drm_r300_cmd_header_t));
549 fprintf(stderr, "sizeof(drm_radeon_cmd_buffer_t)=%d\n",
550 sizeof(drm_radeon_cmd_buffer_t));
551 fprintf(stderr,
552 "Allocating %d bytes command buffer (max state is %d bytes)\n",
553 size*4, r300->hw.max_state_size*4);
554 }
555
556 r300->cmdbuf.size = size;
557 r300->cmdbuf.cmd_buf = (uint32_t*)CALLOC(size*4);
558 r300->cmdbuf.count_used = 0;
559 r300->cmdbuf.count_reemit = 0;
560 }
561
562
563 /**
564 * Destroy the command buffer and state atoms.
565 */
566 void r300DestroyCmdBuf(r300ContextPtr r300)
567 {
568 struct r300_state_atom* atom;
569
570 FREE(r300->cmdbuf.cmd_buf);
571
572 foreach(atom, &r300->hw.atomlist) {
573 FREE(atom->cmd);
574 }
575 }
576
577 void r300EmitBlit(r300ContextPtr rmesa,
578 GLuint color_fmt,
579 GLuint src_pitch,
580 GLuint src_offset,
581 GLuint dst_pitch,
582 GLuint dst_offset,
583 GLint srcx, GLint srcy,
584 GLint dstx, GLint dsty, GLuint w, GLuint h)
585 {
586 drm_radeon_cmd_header_t *cmd;
587
588 if (RADEON_DEBUG & DEBUG_IOCTL)
589 fprintf(stderr,
590 "%s src %x/%x %d,%d dst: %x/%x %d,%d sz: %dx%d\n",
591 __FUNCTION__, src_pitch, src_offset, srcx, srcy,
592 dst_pitch, dst_offset, dstx, dsty, w, h);
593
594 assert((src_pitch & 63) == 0);
595 assert((dst_pitch & 63) == 0);
596 assert((src_offset & 1023) == 0);
597 assert((dst_offset & 1023) == 0);
598 assert(w < (1 << 16));
599 assert(h < (1 << 16));
600
601 cmd =
602 (drm_radeon_cmd_header_t *) r300AllocCmdBuf(rmesa, 8,
603 __FUNCTION__);
604
605 cmd[0].header.cmd_type = R300_CMD_PACKET3;
606 cmd[1].i = R200_CP_CMD_BITBLT_MULTI | (5 << 16);
607 cmd[2].i = (RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
608 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
609 RADEON_GMC_BRUSH_NONE |
610 (color_fmt << 8) |
611 RADEON_GMC_SRC_DATATYPE_COLOR |
612 RADEON_ROP3_S |
613 RADEON_DP_SRC_SOURCE_MEMORY |
614 RADEON_GMC_CLR_CMP_CNTL_DIS | RADEON_GMC_WR_MSK_DIS);
615
616 cmd[3].i = ((src_pitch / 64) << 22) | (src_offset >> 10);
617 cmd[4].i = ((dst_pitch / 64) << 22) | (dst_offset >> 10);
618 cmd[5].i = (srcx << 16) | srcy;
619 cmd[6].i = (dstx << 16) | dsty; /* dst */
620 cmd[7].i = (w << 16) | h;
621 }
622
623 void r300EmitWait(r300ContextPtr rmesa, GLuint flags)
624 {
625 if (rmesa->radeon.dri.drmMinor >= 6) {
626 drm_radeon_cmd_header_t *cmd;
627
628 assert(!(flags & ~(R300_WAIT_2D | R300_WAIT_3D)));
629
630 cmd =
631 (drm_radeon_cmd_header_t *) r300AllocCmdBuf(rmesa,
632 1,
633 __FUNCTION__);
634 cmd[0].i = 0;
635 cmd[0].wait.cmd_type = R300_CMD_WAIT;
636 cmd[0].wait.flags = flags;
637 }
638 }
639
640 void r300EmitAOS(r300ContextPtr rmesa, GLuint nr, GLuint offset)
641 {
642 if (RADEON_DEBUG & DEBUG_VERTS)
643 fprintf(stderr, "%s: nr=%d, ofs=0x%08x\n", __func__, nr, offset);
644 int sz = 1 + (nr >> 1) * 3 + (nr & 1) * 2;
645 int i;
646 LOCAL_VARS
647
648 start_packet3(RADEON_CP_PACKET3_3D_LOAD_VBPNTR, sz-1);
649 e32(nr);
650 for(i=0;i+1<nr;i+=2){
651 e32( (rmesa->state.aos[i].aos_size << 0)
652 |(rmesa->state.aos[i].aos_stride << 8)
653 |(rmesa->state.aos[i+1].aos_size << 16)
654 |(rmesa->state.aos[i+1].aos_stride << 24)
655 );
656 e32(rmesa->state.aos[i].aos_offset+offset*4*rmesa->state.aos[i].aos_stride);
657 e32(rmesa->state.aos[i+1].aos_offset+offset*4*rmesa->state.aos[i+1].aos_stride);
658 }
659 if(nr & 1){
660 e32( (rmesa->state.aos[nr-1].aos_size << 0)
661 |(rmesa->state.aos[nr-1].aos_stride << 8)
662 );
663 e32(rmesa->state.aos[nr-1].aos_offset+offset*4*rmesa->state.aos[nr-1].aos_stride);
664 }
665
666 }
667