Hardware accelerated depth clear
[mesa.git] / src / mesa / drivers / dri / r300 / r300_cmdbuf.c
1 /*
2 Copyright (C) The Weather Channel, Inc. 2002. All Rights Reserved.
3
4 The Weather Channel (TM) funded Tungsten Graphics to develop the
5 initial release of the Radeon 8500 driver under the XFree86 license.
6 This notice must be preserved.
7
8 Permission is hereby granted, free of charge, to any person obtaining
9 a copy of this software and associated documentation files (the
10 "Software"), to deal in the Software without restriction, including
11 without limitation the rights to use, copy, modify, merge, publish,
12 distribute, sublicense, and/or sell copies of the Software, and to
13 permit persons to whom the Software is furnished to do so, subject to
14 the following conditions:
15
16 The above copyright notice and this permission notice (including the
17 next paragraph) shall be included in all copies or substantial
18 portions of the Software.
19
20 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23 IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
24 LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
25 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
26 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27
28 **************************************************************************/
29
30 /*
31 * Authors:
32 * Nicolai Haehnle <prefect_@gmx.net>
33 */
34
35 #include "glheader.h"
36 #include "state.h"
37 #include "imports.h"
38 #include "macros.h"
39 #include "context.h"
40 #include "swrast/swrast.h"
41 #include "simple_list.h"
42
43 #include "drm.h"
44 #include "radeon_drm.h"
45
46 #include "radeon_ioctl.h"
47 #include "r300_context.h"
48 #include "r300_ioctl.h"
49 #include "r300_reg.h"
50 #include "r300_cmdbuf.h"
51
52
53 /**
54 * Send the current command buffer via ioctl to the hardware.
55 */
56 int r300FlushCmdBuf(r300ContextPtr r300, const char* caller)
57 {
58 int ret;
59 int i;
60 drm_radeon_cmd_buffer_t cmd;
61 int start;
62
63 if (r300->radeon.lost_context)
64 start = 0;
65 else
66 start = r300->cmdbuf.count_reemit;
67
68 if (RADEON_DEBUG & DEBUG_IOCTL) {
69 fprintf(stderr, "%s from %s - %i cliprects\n",
70 __FUNCTION__, caller, r300->radeon.numClipRects);
71
72 if (RADEON_DEBUG & DEBUG_VERBOSE)
73 for (i = start; i < r300->cmdbuf.count_used; ++i)
74 fprintf(stderr, "%d: %08x\n", i,
75 r300->cmdbuf.cmd_buf[i]);
76 }
77
78 LOCK_HARDWARE(&r300->radeon);
79
80 cmd.buf = (char*)(r300->cmdbuf.cmd_buf + start);
81 cmd.bufsz = (r300->cmdbuf.count_used - start) * 4;
82
83 if (r300->radeon.state.scissor.enabled) {
84 cmd.nbox = r300->radeon.state.scissor.numClipRects;
85 cmd.boxes = (drm_clip_rect_t *)r300->radeon.state.scissor.pClipRects;
86 } else {
87 cmd.nbox = r300->radeon.numClipRects;
88 cmd.boxes = (drm_clip_rect_t *)r300->radeon.pClipRects;
89 }
90
91 if (cmd.nbox) {
92 ret = drmCommandWrite(r300->radeon.dri.fd,
93 DRM_RADEON_CMDBUF, &cmd, sizeof(cmd));
94 if (ret) {
95 UNLOCK_HARDWARE(&r300->radeon);
96 fprintf(stderr, "drmCommandWrite: %d\n", ret);
97 exit(-1);
98 }
99
100 if (RADEON_DEBUG & DEBUG_SYNC) {
101 fprintf(stderr, "Syncing in %s\n\n", __FUNCTION__);
102 radeonWaitForIdleLocked(&r300->radeon);
103 }
104 } else {
105 if (RADEON_DEBUG & DEBUG_IOCTL)
106 fprintf(stderr, "%s: No cliprects\n", __FUNCTION__);
107 }
108
109 UNLOCK_HARDWARE(&r300->radeon);
110
111 r300->cmdbuf.count_used = 0;
112 r300->cmdbuf.count_reemit = 0;
113
114 return 0;
115 }
116
117
118 static void print_state_atom(struct r300_state_atom *state, int dwords)
119 {
120 int i;
121
122 fprintf(stderr, " emit %s/%d/%d\n", state->name, dwords, state->cmd_size);
123
124 if (RADEON_DEBUG & DEBUG_VERBOSE)
125 for (i = 0; i < dwords; i++)
126 fprintf(stderr, " %s[%d]: %08X\n", state->name, i,
127 state->cmd[i]);
128 }
129
130 /**
131 * Emit all atoms with a dirty field equal to dirty.
132 *
133 * The caller must have ensured that there is enough space in the command
134 * buffer.
135 */
136 static __inline__ void r300DoEmitState(r300ContextPtr r300, GLboolean dirty)
137 {
138 struct r300_state_atom* atom;
139 uint32_t* dest;
140
141 dest = r300->cmdbuf.cmd_buf + r300->cmdbuf.count_used;
142
143 if (RADEON_DEBUG & DEBUG_STATE) {
144 foreach(atom, &r300->hw.atomlist) {
145 if ((atom->dirty || r300->hw.all_dirty) == dirty) {
146 int dwords = (*atom->check)(r300, atom);
147
148 if (dwords)
149 print_state_atom(atom, dwords);
150 else
151 fprintf(stderr, " skip state %s\n",
152 atom->name);
153 }
154 }
155 }
156
157 foreach(atom, &r300->hw.atomlist) {
158 if ((atom->dirty || r300->hw.all_dirty) == dirty) {
159 int dwords = (*atom->check)(r300, atom);
160
161 if (dwords) {
162 memcpy(dest, atom->cmd, dwords*4);
163 dest += dwords;
164 r300->cmdbuf.count_used += dwords;
165 atom->dirty = GL_FALSE;
166 }
167 }
168 }
169 }
170
171
172 /**
173 * Copy dirty hardware state atoms into the command buffer.
174 *
175 * We also copy out clean state if we're at the start of a buffer. That makes
176 * it easy to recover from lost contexts.
177 */
178 void r300EmitState(r300ContextPtr r300)
179 {
180 if (RADEON_DEBUG & (DEBUG_STATE | DEBUG_PRIMS))
181 fprintf(stderr, "%s\n", __FUNCTION__);
182
183 if (r300->cmdbuf.count_used && !r300->hw.is_dirty && !r300->hw.all_dirty)
184 return;
185
186 /* To avoid going across the entire set of states multiple times, just check
187 * for enough space for the case of emitting all state, and inline the
188 * r300AllocCmdBuf code here without all the checks.
189 */
190 r300EnsureCmdBufSpace(r300, r300->hw.max_state_size, __FUNCTION__);
191
192 if (!r300->cmdbuf.count_used) {
193 if (RADEON_DEBUG & DEBUG_STATE)
194 fprintf(stderr, "Begin reemit state\n");
195
196 r300DoEmitState(r300, GL_FALSE);
197 r300->cmdbuf.count_reemit = r300->cmdbuf.count_used;
198 }
199
200 if (RADEON_DEBUG & DEBUG_STATE)
201 fprintf(stderr, "Begin dirty state\n");
202
203 r300DoEmitState(r300, GL_TRUE);
204
205 assert(r300->cmdbuf.count_used < r300->cmdbuf.size);
206
207 r300->hw.is_dirty = GL_FALSE;
208 r300->hw.all_dirty = GL_FALSE;
209 }
210
211
212 static __inline__ uint32_t cmducs(int reg, int count)
213 {
214 drm_r300_cmd_header_t cmd;
215
216 cmd.unchecked_state.cmd_type = R300_CMD_UNCHECKED_STATE;
217 cmd.unchecked_state.count = count;
218 cmd.unchecked_state.reghi = ((unsigned int)reg & 0xFF00) >> 8;
219 cmd.unchecked_state.reglo = ((unsigned int)reg & 0x00FF);
220
221 return cmd.u;
222 }
223
224 static __inline__ uint32_t cmdvpu(int addr, int count)
225 {
226 drm_r300_cmd_header_t cmd;
227
228 cmd.vpu.cmd_type = R300_CMD_VPU;
229 cmd.vpu.count = count;
230 cmd.vpu.adrhi = ((unsigned int)addr & 0xFF00) >> 8;
231 cmd.vpu.adrlo = ((unsigned int)addr & 0x00FF);
232
233 return cmd.u;
234 }
235
236 #define CHECK( NM, COUNT ) \
237 static int check_##NM( r300ContextPtr r300, \
238 struct r300_state_atom* atom ) \
239 { \
240 (void) atom; (void) r300; \
241 return (COUNT); \
242 }
243
244 #define ucscount(ptr) (((drm_r300_cmd_header_t*)(ptr))->unchecked_state.count)
245 #define vpucount(ptr) (((drm_r300_cmd_header_t*)(ptr))->vpu.count)
246
247 CHECK( always, atom->cmd_size )
248 CHECK( never, 0 )
249 CHECK( variable, ucscount(atom->cmd) ? (1 + ucscount(atom->cmd)) : 0 )
250 CHECK( vpu, vpucount(atom->cmd) ? (1 + vpucount(atom->cmd)*4) : 0 )
251
252 #undef ucscount
253
254 #define ALLOC_STATE( ATOM, CHK, SZ, NM, IDX ) \
255 do { \
256 r300->hw.ATOM.cmd_size = SZ; \
257 r300->hw.ATOM.cmd = (uint32_t*)CALLOC(SZ * sizeof(uint32_t)); \
258 r300->hw.ATOM.name = NM; \
259 r300->hw.ATOM.idx = IDX; \
260 r300->hw.ATOM.check = check_##CHK; \
261 r300->hw.ATOM.dirty = GL_FALSE; \
262 r300->hw.max_state_size += SZ; \
263 } while (0)
264
265
266 /**
267 * Allocate memory for the command buffer and initialize the state atom
268 * list. Note that the initial hardware state is set by r300InitState().
269 */
270 void r300InitCmdBuf(r300ContextPtr r300)
271 {
272 int size;
273
274 r300->hw.max_state_size = 0;
275
276 /* Initialize state atoms */
277 ALLOC_STATE( vpt, always, R300_VPT_CMDSIZE, "vpt", 0 );
278 r300->hw.vpt.cmd[R300_VPT_CMD_0] = cmducs(R300_SE_VPORT_XSCALE, 6);
279 ALLOC_STATE( unk2080, always, 2, "unk2080", 0 );
280 r300->hw.unk2080.cmd[0] = cmducs(0x2080, 1);
281 ALLOC_STATE( ovf, always, R300_OVF_CMDSIZE, "ovf", 0 );
282 r300->hw.ovf.cmd[R300_OVF_CMD_0] = cmducs(R300_VAP_OUTPUT_VTX_FMT_0, 2);
283 ALLOC_STATE( unk20B0, always, 3, "unk20B0", 0 );
284 r300->hw.unk20B0.cmd[0] = cmducs(0x20B0, 2);
285 ALLOC_STATE( unk2134, always, 3, "unk2134", 0 );
286 r300->hw.unk2134.cmd[0] = cmducs(0x2134, 2);
287 ALLOC_STATE( unk2140, always, 2, "unk2140", 0 );
288 r300->hw.unk2140.cmd[0] = cmducs(0x2140, 1);
289 ALLOC_STATE( vir[0], variable, R300_VIR_CMDSIZE, "vir/0", 0 );
290 r300->hw.vir[0].cmd[R300_VIR_CMD_0] = cmducs(R300_VAP_INPUT_ROUTE_0_0, 1);
291 ALLOC_STATE( vir[1], variable, R300_VIR_CMDSIZE, "vir/1", 1 );
292 r300->hw.vir[1].cmd[R300_VIR_CMD_0] = cmducs(R300_VAP_INPUT_ROUTE_1_0, 1);
293 ALLOC_STATE( vic, always, R300_VIC_CMDSIZE, "vic", 0 );
294 r300->hw.vic.cmd[R300_VIC_CMD_0] = cmducs(R300_VAP_INPUT_CNTL_0, 2);
295 ALLOC_STATE( unk21DC, always, 2, "unk21DC", 0 );
296 r300->hw.unk21DC.cmd[0] = cmducs(0x21DC, 1);
297 ALLOC_STATE( unk221C, always, 2, "unk221C", 0 );
298 r300->hw.unk221C.cmd[0] = cmducs(0x221C, 1);
299 ALLOC_STATE( unk2220, always, 5, "unk2220", 0 );
300 r300->hw.unk2220.cmd[0] = cmducs(0x2220, 4);
301 ALLOC_STATE( unk2288, always, 2, "unk2288", 0 );
302 r300->hw.unk2288.cmd[0] = cmducs(0x2288, 1);
303 ALLOC_STATE( pvs, always, R300_PVS_CMDSIZE, "pvs", 0 );
304 r300->hw.pvs.cmd[R300_PVS_CMD_0] = cmducs(R300_VAP_PVS_CNTL_1, 3);
305 ALLOC_STATE( unk4008, always, 2, "unk4008", 0 );
306 r300->hw.unk4008.cmd[0] = cmducs(0x4008, 1);
307 ALLOC_STATE( unk4010, always, 6, "unk4010", 0 );
308 r300->hw.unk4010.cmd[0] = cmducs(0x4010, 5);
309 ALLOC_STATE( txe, always, R300_TXE_CMDSIZE, "txe", 0 );
310 r300->hw.txe.cmd[R300_TXE_CMD_0] = cmducs(R300_TX_ENABLE, 1);
311 ALLOC_STATE( unk4200, always, 5, "unk4200", 0 );
312 r300->hw.unk4200.cmd[0] = cmducs(0x4200, 4);
313 ALLOC_STATE( unk4214, always, 2, "unk4214", 0 );
314 r300->hw.unk4214.cmd[0] = cmducs(0x4214, 1);
315 ALLOC_STATE( ps, always, R300_PS_CMDSIZE, "ps", 0 );
316 r300->hw.ps.cmd[0] = cmducs(R300_RE_POINTSIZE, 1);
317 ALLOC_STATE( unk4230, always, 4, "unk4230", 0 );
318 r300->hw.unk4230.cmd[0] = cmducs(0x4230, 3);
319 ALLOC_STATE( unk4260, always, 4, "unk4260", 0 );
320 r300->hw.unk4260.cmd[0] = cmducs(0x4260, 3);
321 ALLOC_STATE( unk4274, always, 5, "unk4274", 0 );
322 r300->hw.unk4274.cmd[0] = cmducs(0x4274, 4);
323 ALLOC_STATE( unk4288, always, 6, "unk4288", 0 );
324 r300->hw.unk4288.cmd[0] = cmducs(0x4288, 5);
325 ALLOC_STATE( unk42A0, always, 2, "unk42A0", 0 );
326 r300->hw.unk42A0.cmd[0] = cmducs(0x42A0, 1);
327 ALLOC_STATE( unk42B4, always, 3, "unk42B4", 0 );
328 r300->hw.unk42B4.cmd[0] = cmducs(0x42B4, 2);
329 ALLOC_STATE( unk42C0, always, 3, "unk42C0", 0 );
330 r300->hw.unk42C0.cmd[0] = cmducs(0x42C0, 2);
331 ALLOC_STATE( rc, always, R300_RC_CMDSIZE, "rc", 0 );
332 r300->hw.rc.cmd[R300_RC_CMD_0] = cmducs(R300_RS_CNTL_0, 2);
333 ALLOC_STATE( ri, always, R300_RI_CMDSIZE, "ri", 0 );
334 r300->hw.ri.cmd[R300_RI_CMD_0] = cmducs(R300_RS_INTERP_0, 8);
335 ALLOC_STATE( rr, variable, R300_RR_CMDSIZE, "rr", 0 );
336 r300->hw.rr.cmd[R300_RR_CMD_0] = cmducs(R300_RS_ROUTE_0, 1);
337 ALLOC_STATE( unk43A4, always, 3, "unk43A4", 0 );
338 r300->hw.unk43A4.cmd[0] = cmducs(0x43A4, 2);
339 ALLOC_STATE( unk43E8, always, 2, "unk43E8", 0 );
340 r300->hw.unk43E8.cmd[0] = cmducs(0x43E8, 1);
341 ALLOC_STATE( fp, always, R300_FP_CMDSIZE, "fp", 0 );
342 r300->hw.fp.cmd[R300_FP_CMD_0] = cmducs(R300_PFS_CNTL_0, 3);
343 r300->hw.fp.cmd[R300_FP_CMD_1] = cmducs(R300_PFS_NODE_0, 4);
344 ALLOC_STATE( unk46A4, always, 6, "unk46A4", 0 );
345 r300->hw.unk46A4.cmd[0] = cmducs(0x46A4, 5);
346 ALLOC_STATE( fpi[0], variable, R300_FPI_CMDSIZE, "fpi/0", 0 );
347 r300->hw.fpi[0].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR0_0, 1);
348 ALLOC_STATE( fpi[1], variable, R300_FPI_CMDSIZE, "fpi/1", 1 );
349 r300->hw.fpi[1].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR1_0, 1);
350 ALLOC_STATE( fpi[2], variable, R300_FPI_CMDSIZE, "fpi/2", 2 );
351 r300->hw.fpi[2].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR2_0, 1);
352 ALLOC_STATE( fpi[3], variable, R300_FPI_CMDSIZE, "fpi/3", 3 );
353 r300->hw.fpi[3].cmd[R300_FPI_CMD_0] = cmducs(R300_PFS_INSTR3_0, 1);
354 ALLOC_STATE( unk4BC0, always, 2, "unk4BC0", 0 );
355 r300->hw.unk4BC0.cmd[0] = cmducs(0x4BC0, 1);
356 ALLOC_STATE( unk4BC8, always, 4, "unk4BC8", 0 );
357 r300->hw.unk4BC8.cmd[0] = cmducs(0x4BC8, 3);
358 ALLOC_STATE( at, always, R300_AT_CMDSIZE, "at", 0 );
359 r300->hw.at.cmd[R300_AT_CMD_0] = cmducs(R300_PP_ALPHA_TEST, 1);
360 ALLOC_STATE( unk4BD8, always, 2, "unk4BD8", 0 );
361 r300->hw.unk4BD8.cmd[0] = cmducs(0x4BD8, 1);
362 ALLOC_STATE( unk4E00, always, 2, "unk4E00", 0 );
363 r300->hw.unk4E00.cmd[0] = cmducs(0x4E00, 1);
364 ALLOC_STATE( bld, always, R300_BLD_CMDSIZE, "bld", 0 );
365 r300->hw.bld.cmd[R300_BLD_CMD_0] = cmducs(R300_RB3D_CBLEND, 2);
366 ALLOC_STATE( cmk, always, R300_CMK_CMDSIZE, "cmk", 0 );
367 r300->hw.cmk.cmd[R300_CMK_CMD_0] = cmducs(R300_RB3D_COLORMASK, 1);
368 ALLOC_STATE( unk4E10, always, 4, "unk4E10", 0 );
369 r300->hw.unk4E10.cmd[0] = cmducs(0x4E10, 3);
370 ALLOC_STATE( cb, always, R300_CB_CMDSIZE, "cb", 0 );
371 r300->hw.cb.cmd[R300_CB_CMD_0] = cmducs(R300_RB3D_COLOROFFSET0, 1);
372 r300->hw.cb.cmd[R300_CB_CMD_1] = cmducs(R300_RB3D_COLORPITCH0, 1);
373 ALLOC_STATE( unk4E50, always, 10, "unk4E50", 0 );
374 r300->hw.unk4E50.cmd[0] = cmducs(0x4E50, 9);
375 ALLOC_STATE( unk4E88, always, 2, "unk4E88", 0 );
376 r300->hw.unk4E88.cmd[0] = cmducs(0x4E88, 1);
377 ALLOC_STATE( zc, always, R300_ZC_CMDSIZE, "zc", 0 );
378 r300->hw.zc.cmd[R300_ZC_CMD_0] = cmducs(R300_RB3D_ZCNTL_0, 2);
379 ALLOC_STATE( unk4F08, always, 2, "unk4F08", 0 );
380 r300->hw.unk4F08.cmd[0] = cmducs(0x4F08, 1);
381 ALLOC_STATE( unk4F10, always, 5, "unk4F10", 0 );
382 r300->hw.unk4F10.cmd[0] = cmducs(0x4F10, 4);
383 ALLOC_STATE( zb, always, R300_ZB_CMDSIZE, "zb", 0 );
384 r300->hw.zb.cmd[R300_ZB_CMD_0] = cmducs(R300_RB3D_DEPTHOFFSET, 2);
385 ALLOC_STATE( unk4F28, always, 2, "unk4F28", 0 );
386 r300->hw.unk4F28.cmd[0] = cmducs(0x4F28, 1);
387 ALLOC_STATE( unk4F30, always, 3, "unk4F30", 0 );
388 r300->hw.unk4F30.cmd[0] = cmducs(0x4F30, 2);
389 ALLOC_STATE( unk4F44, always, 2, "unk4F44", 0 );
390 r300->hw.unk4F44.cmd[0] = cmducs(0x4F44, 1);
391 ALLOC_STATE( unk4F54, always, 2, "unk4F54", 0 );
392 r300->hw.unk4F54.cmd[0] = cmducs(0x4F54, 1);
393
394 ALLOC_STATE( vpi, vpu, R300_VPI_CMDSIZE, "vpi", 0 );
395 r300->hw.vpi.cmd[R300_VPI_CMD_0] = cmdvpu(R300_PVS_UPLOAD_PROGRAM, 0);
396 ALLOC_STATE( vpp, vpu, R300_VPP_CMDSIZE, "vpp", 0 );
397 r300->hw.vpp.cmd[R300_VPP_CMD_0] = cmdvpu(R300_PVS_UPLOAD_PARAMETERS, 0);
398 ALLOC_STATE( vps, vpu, R300_VPS_CMDSIZE, "vps", 0 );
399 r300->hw.vps.cmd[R300_VPS_CMD_0] = cmdvpu(R300_PVS_UPLOAD_POINTSIZE, 1);
400
401 /* Setup the atom linked list */
402 make_empty_list(&r300->hw.atomlist);
403 r300->hw.atomlist.name = "atom-list";
404
405 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpt);
406 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2080);
407 insert_at_tail(&r300->hw.atomlist, &r300->hw.ovf);
408 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk20B0);
409 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2134);
410 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2140);
411 insert_at_tail(&r300->hw.atomlist, &r300->hw.vir[0]);
412 insert_at_tail(&r300->hw.atomlist, &r300->hw.vir[1]);
413 insert_at_tail(&r300->hw.atomlist, &r300->hw.vic);
414 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk21DC);
415 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk221C);
416 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2220);
417 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk2288);
418 insert_at_tail(&r300->hw.atomlist, &r300->hw.pvs);
419 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4008);
420 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4010);
421 insert_at_tail(&r300->hw.atomlist, &r300->hw.txe);
422 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4200);
423 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4214);
424 insert_at_tail(&r300->hw.atomlist, &r300->hw.ps);
425 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4230);
426 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4260);
427 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4274);
428 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4288);
429 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42A0);
430 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42B4);
431 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk42C0);
432 insert_at_tail(&r300->hw.atomlist, &r300->hw.rc);
433 insert_at_tail(&r300->hw.atomlist, &r300->hw.ri);
434 insert_at_tail(&r300->hw.atomlist, &r300->hw.rr);
435 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk43A4);
436 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk43E8);
437 insert_at_tail(&r300->hw.atomlist, &r300->hw.fp);
438 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk46A4);
439 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[0]);
440 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[1]);
441 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[2]);
442 insert_at_tail(&r300->hw.atomlist, &r300->hw.fpi[3]);
443 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BC0);
444 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BC8);
445 insert_at_tail(&r300->hw.atomlist, &r300->hw.at);
446 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4BD8);
447 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E00);
448 insert_at_tail(&r300->hw.atomlist, &r300->hw.bld);
449 insert_at_tail(&r300->hw.atomlist, &r300->hw.cmk);
450 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E10);
451 insert_at_tail(&r300->hw.atomlist, &r300->hw.cb);
452 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E50);
453 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4E88);
454 insert_at_tail(&r300->hw.atomlist, &r300->hw.zc);
455 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F08);
456 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F10);
457 insert_at_tail(&r300->hw.atomlist, &r300->hw.zb);
458 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F28);
459 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F30);
460 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F44);
461 insert_at_tail(&r300->hw.atomlist, &r300->hw.unk4F54);
462
463 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpi);
464 insert_at_tail(&r300->hw.atomlist, &r300->hw.vpp);
465 insert_at_tail(&r300->hw.atomlist, &r300->hw.vps);
466
467 r300->hw.is_dirty = GL_TRUE;
468 r300->hw.all_dirty = GL_TRUE;
469
470 /* Initialize command buffer */
471 size = 256 * driQueryOptioni(&r300->radeon.optionCache, "command_buffer_size");
472 if (size < 2*r300->hw.max_state_size)
473 size = 2*r300->hw.max_state_size;
474
475 if (RADEON_DEBUG & DEBUG_IOCTL)
476 fprintf(stderr,
477 "Allocating %d bytes command buffer (max state is %d bytes)\n",
478 size*4, r300->hw.max_state_size*4);
479
480 r300->cmdbuf.size = size;
481 r300->cmdbuf.cmd_buf = (uint32_t*)CALLOC(size*4);
482 r300->cmdbuf.count_used = 0;
483 r300->cmdbuf.count_reemit = 0;
484 }
485
486
487 /**
488 * Destroy the command buffer and state atoms.
489 */
490 void r300DestroyCmdBuf(r300ContextPtr r300)
491 {
492 struct r300_state_atom* atom;
493
494 FREE(r300->cmdbuf.cmd_buf);
495
496 foreach(atom, &r300->hw.atomlist) {
497 FREE(atom->cmd);
498 }
499 }
500