r300g: fix build
[mesa.git] / src / gallium / drivers / r300 / r300_context.c
1 /*
2 * Copyright 2008 Corbin Simpson <MostAwesomeDude@gmail.com>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE. */
22
23 #include "draw/draw_context.h"
24
25 #include "util/u_memory.h"
26 #include "util/u_sampler.h"
27 #include "util/u_simple_list.h"
28 #include "util/u_upload_mgr.h"
29
30 #include "r300_cb.h"
31 #include "r300_context.h"
32 #include "r300_emit.h"
33 #include "r300_hyperz.h"
34 #include "r300_screen.h"
35 #include "r300_screen_buffer.h"
36 #include "r300_winsys.h"
37
38 #include <inttypes.h>
39
40 static void r300_update_num_contexts(struct r300_screen *r300screen,
41 int diff)
42 {
43 if (diff > 0) {
44 p_atomic_inc(&r300screen->num_contexts);
45
46 if (r300screen->num_contexts > 1)
47 util_slab_set_thread_safety(&r300screen->pool_buffers,
48 UTIL_SLAB_MULTITHREADED);
49 } else {
50 p_atomic_dec(&r300screen->num_contexts);
51
52 if (r300screen->num_contexts <= 1)
53 util_slab_set_thread_safety(&r300screen->pool_buffers,
54 UTIL_SLAB_SINGLETHREADED);
55 }
56 }
57
58 static void r300_release_referenced_objects(struct r300_context *r300)
59 {
60 struct pipe_framebuffer_state *fb =
61 (struct pipe_framebuffer_state*)r300->fb_state.state;
62 struct r300_textures_state *textures =
63 (struct r300_textures_state*)r300->textures_state.state;
64 struct r300_query *query, *temp;
65 unsigned i;
66
67 /* Framebuffer state. */
68 util_unreference_framebuffer_state(fb);
69
70 /* Textures. */
71 for (i = 0; i < textures->sampler_view_count; i++)
72 pipe_sampler_view_reference(
73 (struct pipe_sampler_view**)&textures->sampler_views[i], NULL);
74
75 /* The special dummy texture for texkill. */
76 if (r300->texkill_sampler) {
77 pipe_sampler_view_reference(
78 (struct pipe_sampler_view**)&r300->texkill_sampler,
79 NULL);
80 }
81
82 /* The dummy VBO. */
83 pipe_resource_reference(&r300->dummy_vb, NULL);
84
85 /* The SWTCL VBO. */
86 pipe_resource_reference(&r300->vbo, NULL);
87
88 /* Vertex buffers. */
89 for (i = 0; i < r300->vertex_buffer_count; i++) {
90 pipe_resource_reference(&r300->vertex_buffer[i].buffer, NULL);
91 }
92
93 /* If there are any queries pending or not destroyed, remove them now. */
94 foreach_s(query, temp, &r300->query_list) {
95 remove_from_list(query);
96 FREE(query);
97 }
98 }
99
100 static void r300_destroy_context(struct pipe_context* context)
101 {
102 struct r300_context* r300 = r300_context(context);
103 struct r300_atom *atom;
104
105 if (r300->blitter)
106 util_blitter_destroy(r300->blitter);
107 if (r300->draw)
108 draw_destroy(r300->draw);
109
110 /* Print stats, if enabled. */
111 if (SCREEN_DBG_ON(r300->screen, DBG_STATS)) {
112 fprintf(stderr, "r300: Stats for context %p:\n", r300);
113 fprintf(stderr, " : Flushes: %" PRIu64 "\n", r300->flush_counter);
114 foreach(atom, &r300->atom_list) {
115 fprintf(stderr, " : %s: %" PRIu64 " emits\n",
116 atom->name, atom->counter);
117 }
118 }
119
120 if (r300->upload_vb)
121 u_upload_destroy(r300->upload_vb);
122 if (r300->upload_ib)
123 u_upload_destroy(r300->upload_ib);
124
125 if (r300->tran.translate_cache)
126 translate_cache_destroy(r300->tran.translate_cache);
127
128 /* XXX: This function assumes r300->query_list was initialized */
129 r300_release_referenced_objects(r300);
130
131 if (r300->zmask_mm)
132 r300_hyperz_destroy_mm(r300);
133
134 if (r300->cs)
135 r300->rws->cs_destroy(r300->cs);
136
137 /* XXX: No way to tell if this was initialized or not? */
138 util_slab_destroy(&r300->pool_transfers);
139
140 r300_update_num_contexts(r300->screen, -1);
141
142 /* Free the structs allocated in r300_setup_atoms() */
143 if (r300->aa_state.state) {
144 FREE(r300->aa_state.state);
145 FREE(r300->blend_color_state.state);
146 FREE(r300->clip_state.state);
147 FREE(r300->fb_state.state);
148 FREE(r300->gpu_flush.state);
149 FREE(r300->hyperz_state.state);
150 FREE(r300->invariant_state.state);
151 FREE(r300->rs_block_state.state);
152 FREE(r300->scissor_state.state);
153 FREE(r300->textures_state.state);
154 FREE(r300->vap_invariant_state.state);
155 FREE(r300->viewport_state.state);
156 FREE(r300->ztop_state.state);
157 FREE(r300->fs_constants.state);
158 FREE(r300->vs_constants.state);
159 if (!r300->screen->caps.has_tcl) {
160 FREE(r300->vertex_stream_state.state);
161 }
162 }
163 FREE(r300);
164 }
165
166 void r300_flush_cb(void *data)
167 {
168 struct r300_context* const cs_context_copy = data;
169
170 cs_context_copy->context.flush(&cs_context_copy->context, 0, NULL);
171 }
172
173 #define R300_INIT_ATOM(atomname, atomsize) \
174 do { \
175 r300->atomname.name = #atomname; \
176 r300->atomname.state = NULL; \
177 r300->atomname.size = atomsize; \
178 r300->atomname.emit = r300_emit_##atomname; \
179 r300->atomname.dirty = FALSE; \
180 insert_at_tail(&r300->atom_list, &r300->atomname); \
181 } while (0)
182
183 #define R300_ALLOC_ATOM(atomname, statetype) \
184 do { \
185 r300->atomname.state = CALLOC_STRUCT(statetype); \
186 if (r300->atomname.state == NULL) \
187 return FALSE; \
188 } while (0)
189
190 static boolean r300_setup_atoms(struct r300_context* r300)
191 {
192 boolean is_rv350 = r300->screen->caps.is_rv350;
193 boolean is_r500 = r300->screen->caps.is_r500;
194 boolean has_tcl = r300->screen->caps.has_tcl;
195 boolean drm_2_3_0 = r300->rws->get_value(r300->rws, R300_VID_DRM_2_3_0);
196 boolean drm_2_6_0 = r300->rws->get_value(r300->rws, R300_VID_DRM_2_6_0);
197 boolean can_hyperz = r300->rws->get_value(r300->rws, R300_CAN_HYPERZ);
198 boolean has_hiz_ram = r300->screen->caps.hiz_ram > 0;
199
200 /* Create the actual atom list.
201 *
202 * Each atom is examined and emitted in the order it appears here, which
203 * can affect performance and conformance if not handled with care.
204 *
205 * Some atoms never change size, others change every emit - those have
206 * the size of 0 here.
207 *
208 * NOTE: The framebuffer state is split into these atoms:
209 * - gpu_flush (unpipelined regs)
210 * - aa_state (unpipelined regs)
211 * - fb_state (unpipelined regs)
212 * - hyperz_state (unpipelined regs followed by pipelined ones)
213 * - fb_state_pipelined (pipelined regs)
214 * The motivation behind this is to be able to emit a strict
215 * subset of the regs, and to have reasonable register ordering. */
216 make_empty_list(&r300->atom_list);
217 /* SC, GB (unpipelined), RB3D (unpipelined), ZB (unpipelined). */
218 R300_INIT_ATOM(gpu_flush, 9);
219 R300_INIT_ATOM(aa_state, 4);
220 R300_INIT_ATOM(fb_state, 0);
221 R300_INIT_ATOM(hyperz_state, is_r500 || (is_rv350 && drm_2_6_0) ? 10 : 8);
222 /* ZB (unpipelined), SC. */
223 R300_INIT_ATOM(ztop_state, 2);
224 /* ZB, FG. */
225 R300_INIT_ATOM(dsa_state, is_r500 ? 8 : 6);
226 /* RB3D. */
227 R300_INIT_ATOM(blend_state, 8);
228 R300_INIT_ATOM(blend_color_state, is_r500 ? 3 : 2);
229 /* SC. */
230 R300_INIT_ATOM(scissor_state, 3);
231 /* GB, FG, GA, SU, SC, RB3D. */
232 R300_INIT_ATOM(invariant_state, 16 + (is_rv350 ? 4 : 0));
233 /* VAP. */
234 R300_INIT_ATOM(viewport_state, 9);
235 R300_INIT_ATOM(pvs_flush, 2);
236 R300_INIT_ATOM(vap_invariant_state, 9);
237 R300_INIT_ATOM(vertex_stream_state, 0);
238 R300_INIT_ATOM(vs_state, 0);
239 R300_INIT_ATOM(vs_constants, 0);
240 R300_INIT_ATOM(clip_state, has_tcl ? 5 + (6 * 4) : 2);
241 /* VAP, RS, GA, GB, SU, SC. */
242 R300_INIT_ATOM(rs_block_state, 0);
243 R300_INIT_ATOM(rs_state, 0);
244 /* SC, US. */
245 R300_INIT_ATOM(fb_state_pipelined, 5 + (drm_2_3_0 ? 3 : 0));
246 /* US. */
247 R300_INIT_ATOM(fs, 0);
248 R300_INIT_ATOM(fs_rc_constant_state, 0);
249 R300_INIT_ATOM(fs_constants, 0);
250 /* TX. */
251 R300_INIT_ATOM(texture_cache_inval, 2);
252 R300_INIT_ATOM(textures_state, 0);
253 if (can_hyperz) {
254 /* HiZ Clear */
255 if (has_hiz_ram)
256 R300_INIT_ATOM(hiz_clear, 0);
257 /* zmask clear */
258 R300_INIT_ATOM(zmask_clear, 0);
259 }
260 /* ZB (unpipelined), SU. */
261 R300_INIT_ATOM(query_start, 4);
262
263 /* Replace emission functions for r500. */
264 if (is_r500) {
265 r300->fs.emit = r500_emit_fs;
266 r300->fs_rc_constant_state.emit = r500_emit_fs_rc_constant_state;
267 r300->fs_constants.emit = r500_emit_fs_constants;
268 }
269
270 /* Some non-CSO atoms need explicit space to store the state locally. */
271 R300_ALLOC_ATOM(aa_state, r300_aa_state);
272 R300_ALLOC_ATOM(blend_color_state, r300_blend_color_state);
273 R300_ALLOC_ATOM(clip_state, r300_clip_state);
274 R300_ALLOC_ATOM(hyperz_state, r300_hyperz_state);
275 R300_ALLOC_ATOM(invariant_state, r300_invariant_state);
276 R300_ALLOC_ATOM(textures_state, r300_textures_state);
277 R300_ALLOC_ATOM(vap_invariant_state, r300_vap_invariant_state);
278 R300_ALLOC_ATOM(viewport_state, r300_viewport_state);
279 R300_ALLOC_ATOM(ztop_state, r300_ztop_state);
280 R300_ALLOC_ATOM(fb_state, pipe_framebuffer_state);
281 R300_ALLOC_ATOM(gpu_flush, pipe_framebuffer_state);
282 R300_ALLOC_ATOM(scissor_state, pipe_scissor_state);
283 R300_ALLOC_ATOM(rs_block_state, r300_rs_block);
284 R300_ALLOC_ATOM(fs_constants, r300_constant_buffer);
285 R300_ALLOC_ATOM(vs_constants, r300_constant_buffer);
286 if (!r300->screen->caps.has_tcl) {
287 R300_ALLOC_ATOM(vertex_stream_state, r300_vertex_stream_state);
288 }
289
290 /* Some non-CSO atoms don't use the state pointer. */
291 r300->fb_state_pipelined.allow_null_state = TRUE;
292 r300->fs_rc_constant_state.allow_null_state = TRUE;
293 r300->pvs_flush.allow_null_state = TRUE;
294 r300->query_start.allow_null_state = TRUE;
295 r300->texture_cache_inval.allow_null_state = TRUE;
296
297 /* Some states must be marked as dirty here to properly set up
298 * hardware in the first command stream. */
299 r300->invariant_state.dirty = TRUE;
300 r300->pvs_flush.dirty = TRUE;
301 r300->vap_invariant_state.dirty = TRUE;
302 r300->texture_cache_inval.dirty = TRUE;
303 r300->textures_state.dirty = TRUE;
304
305 return TRUE;
306 }
307
308 /* Not every state tracker calls every driver function before the first draw
309 * call and we must initialize the command buffers somehow. */
310 static void r300_init_states(struct pipe_context *pipe)
311 {
312 struct r300_context *r300 = r300_context(pipe);
313 struct pipe_blend_color bc = {{0}};
314 struct pipe_clip_state cs = {{{0}}};
315 struct pipe_scissor_state ss = {0};
316 struct r300_clip_state *clip =
317 (struct r300_clip_state*)r300->clip_state.state;
318 struct r300_gpu_flush *gpuflush =
319 (struct r300_gpu_flush*)r300->gpu_flush.state;
320 struct r300_vap_invariant_state *vap_invariant =
321 (struct r300_vap_invariant_state*)r300->vap_invariant_state.state;
322 struct r300_invariant_state *invariant =
323 (struct r300_invariant_state*)r300->invariant_state.state;
324
325 CB_LOCALS;
326
327 pipe->set_blend_color(pipe, &bc);
328 pipe->set_scissor_state(pipe, &ss);
329
330 /* Initialize the clip state. */
331 if (r300->screen->caps.has_tcl) {
332 pipe->set_clip_state(pipe, &cs);
333 } else {
334 BEGIN_CB(clip->cb, 2);
335 OUT_CB_REG(R300_VAP_CLIP_CNTL, R300_CLIP_DISABLE);
336 END_CB;
337 }
338
339 /* Initialize the GPU flush. */
340 {
341 BEGIN_CB(gpuflush->cb_flush_clean, 6);
342
343 /* Flush and free renderbuffer caches. */
344 OUT_CB_REG(R300_RB3D_DSTCACHE_CTLSTAT,
345 R300_RB3D_DSTCACHE_CTLSTAT_DC_FREE_FREE_3D_TAGS |
346 R300_RB3D_DSTCACHE_CTLSTAT_DC_FLUSH_FLUSH_DIRTY_3D);
347 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
348 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE |
349 R300_ZB_ZCACHE_CTLSTAT_ZC_FREE_FREE);
350
351 /* Wait until the GPU is idle.
352 * This fixes random pixels sometimes appearing probably caused
353 * by incomplete rendering. */
354 OUT_CB_REG(RADEON_WAIT_UNTIL, RADEON_WAIT_3D_IDLECLEAN);
355 END_CB;
356 }
357
358 /* Initialize the VAP invariant state. */
359 {
360 BEGIN_CB(vap_invariant->cb, 9);
361 OUT_CB_REG(VAP_PVS_VTX_TIMEOUT_REG, 0xffff);
362 OUT_CB_REG_SEQ(R300_VAP_GB_VERT_CLIP_ADJ, 4);
363 OUT_CB_32F(1.0);
364 OUT_CB_32F(1.0);
365 OUT_CB_32F(1.0);
366 OUT_CB_32F(1.0);
367 OUT_CB_REG(R300_VAP_PSC_SGN_NORM_CNTL, R300_SGN_NORM_NO_ZERO);
368 END_CB;
369 }
370
371 /* Initialize the invariant state. */
372 {
373 BEGIN_CB(invariant->cb, r300->invariant_state.size);
374 OUT_CB_REG(R300_GB_SELECT, 0);
375 OUT_CB_REG(R300_FG_FOG_BLEND, 0);
376 OUT_CB_REG(R300_GA_ROUND_MODE, 1);
377 OUT_CB_REG(R300_GA_OFFSET, 0);
378 OUT_CB_REG(R300_SU_TEX_WRAP, 0);
379 OUT_CB_REG(R300_SU_DEPTH_SCALE, 0x4B7FFFFF);
380 OUT_CB_REG(R300_SU_DEPTH_OFFSET, 0);
381 OUT_CB_REG(R300_SC_EDGERULE, 0x2DA49525);
382
383 if (r300->screen->caps.is_rv350) {
384 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD, 0x01010101);
385 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD, 0xFEFEFEFE);
386 }
387 END_CB;
388 }
389
390 /* Initialize the hyperz state. */
391 {
392 struct r300_hyperz_state *hyperz =
393 (struct r300_hyperz_state*)r300->hyperz_state.state;
394 BEGIN_CB(&hyperz->cb_flush_begin, r300->hyperz_state.size);
395 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
396 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE);
397 OUT_CB_REG(R300_ZB_BW_CNTL, 0);
398 OUT_CB_REG(R300_ZB_DEPTHCLEARVALUE, 0);
399 OUT_CB_REG(R300_SC_HYPERZ, R300_SC_HYPERZ_ADJ_2);
400
401 if (r300->screen->caps.is_r500 ||
402 (r300->screen->caps.is_rv350 &&
403 r300->rws->get_value(r300->rws, R300_VID_DRM_2_6_0))) {
404 OUT_CB_REG(R300_GB_Z_PEQ_CONFIG, 0);
405 }
406 END_CB;
407 }
408 }
409
410 struct pipe_context* r300_create_context(struct pipe_screen* screen,
411 void *priv)
412 {
413 struct r300_context* r300 = CALLOC_STRUCT(r300_context);
414 struct r300_screen* r300screen = r300_screen(screen);
415 struct r300_winsys_screen *rws = r300screen->rws;
416
417 if (!r300)
418 return NULL;
419
420 r300_update_num_contexts(r300screen, 1);
421
422 r300->rws = rws;
423 r300->screen = r300screen;
424
425 r300->context.winsys = (struct pipe_winsys*)rws;
426 r300->context.screen = screen;
427 r300->context.priv = priv;
428
429 r300->context.destroy = r300_destroy_context;
430
431 make_empty_list(&r300->query_list);
432
433 util_slab_create(&r300->pool_transfers,
434 sizeof(struct pipe_transfer), 64,
435 UTIL_SLAB_SINGLETHREADED);
436
437 r300->cs = rws->cs_create(rws);
438 if (r300->cs == NULL)
439 goto fail;
440
441 if (!r300screen->caps.has_tcl) {
442 /* Create a Draw. This is used for SW TCL. */
443 r300->draw = draw_create(&r300->context);
444 if (r300->draw == NULL)
445 goto fail;
446 /* Enable our renderer. */
447 draw_set_rasterize_stage(r300->draw, r300_draw_stage(r300));
448 /* Disable converting points/lines to triangles. */
449 draw_wide_line_threshold(r300->draw, 10000000.f);
450 draw_wide_point_threshold(r300->draw, 10000000.f);
451 }
452
453 if (!r300_setup_atoms(r300))
454 goto fail;
455
456 r300_init_blit_functions(r300);
457 r300_init_flush_functions(r300);
458 r300_init_query_functions(r300);
459 r300_init_state_functions(r300);
460 r300_init_resource_functions(r300);
461
462 r300->blitter = util_blitter_create(&r300->context);
463 if (r300->blitter == NULL)
464 goto fail;
465
466 /* Render functions must be initialized after blitter. */
467 r300_init_render_functions(r300);
468
469 rws->cs_set_flush(r300->cs, r300_flush_cb, r300);
470
471 /* setup hyper-z mm */
472 if (r300->rws->get_value(r300->rws, R300_CAN_HYPERZ))
473 if (!r300_hyperz_init_mm(r300))
474 goto fail;
475
476 r300->upload_ib = u_upload_create(&r300->context,
477 32 * 1024, 16,
478 PIPE_BIND_INDEX_BUFFER);
479
480 if (r300->upload_ib == NULL)
481 goto fail;
482
483 r300->upload_vb = u_upload_create(&r300->context,
484 128 * 1024, 16,
485 PIPE_BIND_VERTEX_BUFFER);
486 if (r300->upload_vb == NULL)
487 goto fail;
488
489 r300->tran.translate_cache = translate_cache_create();
490 if (r300->tran.translate_cache == NULL)
491 goto fail;
492
493 r300_init_states(&r300->context);
494
495 /* The KIL opcode needs the first texture unit to be enabled
496 * on r3xx-r4xx. In order to calm down the CS checker, we bind this
497 * dummy texture there. */
498 if (!r300->screen->caps.is_r500) {
499 struct pipe_resource *tex;
500 struct pipe_resource rtempl = {{0}};
501 struct pipe_sampler_view vtempl = {{0}};
502
503 rtempl.target = PIPE_TEXTURE_2D;
504 rtempl.format = PIPE_FORMAT_I8_UNORM;
505 rtempl.bind = PIPE_BIND_SAMPLER_VIEW;
506 rtempl.usage = PIPE_USAGE_IMMUTABLE;
507 rtempl.width0 = 1;
508 rtempl.height0 = 1;
509 rtempl.depth0 = 1;
510 tex = screen->resource_create(screen, &rtempl);
511
512 u_sampler_view_default_template(&vtempl, tex, tex->format);
513
514 r300->texkill_sampler = (struct r300_sampler_view*)
515 r300->context.create_sampler_view(&r300->context, tex, &vtempl);
516
517 pipe_resource_reference(&tex, NULL);
518 }
519
520 {
521 struct pipe_resource vb = {};
522 vb.target = PIPE_BUFFER;
523 vb.format = PIPE_FORMAT_R8_UNORM;
524 vb.bind = PIPE_BIND_VERTEX_BUFFER;
525 vb.usage = PIPE_USAGE_IMMUTABLE;
526 vb.width0 = sizeof(float) * 16;
527 vb.height0 = 1;
528 vb.depth0 = 1;
529
530 r300->dummy_vb = screen->resource_create(screen, &vb);
531 }
532
533 return &r300->context;
534
535 fail:
536 r300_destroy_context(&r300->context);
537 return NULL;
538 }
539
540 void r300_finish(struct r300_context *r300)
541 {
542 struct pipe_framebuffer_state *fb;
543 unsigned i;
544
545 /* This is a preliminary implementation of glFinish.
546 *
547 * The ideal implementation should use something like EmitIrqLocked and
548 * WaitIrq, or better, real fences.
549 */
550 if (r300->fb_state.state) {
551 fb = r300->fb_state.state;
552
553 for (i = 0; i < fb->nr_cbufs; i++) {
554 if (fb->cbufs[i]->texture) {
555 r300->rws->buffer_wait(r300->rws,
556 r300_texture(fb->cbufs[i]->texture)->buffer);
557 return;
558 }
559 }
560 if (fb->zsbuf && fb->zsbuf->texture) {
561 r300->rws->buffer_wait(r300->rws,
562 r300_texture(fb->zsbuf->texture)->buffer);
563 }
564 }
565 }