util: rename u_mempool -> u_slab
[mesa.git] / src / gallium / drivers / r300 / r300_context.c
1 /*
2 * Copyright 2008 Corbin Simpson <MostAwesomeDude@gmail.com>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE. */
22
23 #include "draw/draw_context.h"
24
25 #include "util/u_memory.h"
26 #include "util/u_sampler.h"
27 #include "util/u_simple_list.h"
28 #include "util/u_upload_mgr.h"
29
30 #include "r300_cb.h"
31 #include "r300_context.h"
32 #include "r300_emit.h"
33 #include "r300_hyperz.h"
34 #include "r300_screen.h"
35 #include "r300_screen_buffer.h"
36 #include "r300_winsys.h"
37
38 #include <inttypes.h>
39
40 static void r300_update_num_contexts(struct r300_screen *r300screen,
41 int diff)
42 {
43 if (diff > 0) {
44 p_atomic_inc(&r300screen->num_contexts);
45
46 if (r300screen->num_contexts > 1)
47 util_slab_set_thread_safety(&r300screen->pool_buffers,
48 UTIL_SLAB_MULTITHREADED);
49 } else {
50 p_atomic_dec(&r300screen->num_contexts);
51
52 if (r300screen->num_contexts <= 1)
53 util_slab_set_thread_safety(&r300screen->pool_buffers,
54 UTIL_SLAB_SINGLETHREADED);
55 }
56 }
57
58 static void r300_release_referenced_objects(struct r300_context *r300)
59 {
60 struct pipe_framebuffer_state *fb =
61 (struct pipe_framebuffer_state*)r300->fb_state.state;
62 struct r300_textures_state *textures =
63 (struct r300_textures_state*)r300->textures_state.state;
64 struct r300_query *query, *temp;
65 unsigned i;
66
67 /* Framebuffer state. */
68 util_unreference_framebuffer_state(fb);
69
70 /* Textures. */
71 for (i = 0; i < textures->sampler_view_count; i++)
72 pipe_sampler_view_reference(
73 (struct pipe_sampler_view**)&textures->sampler_views[i], NULL);
74
75 /* The special dummy texture for texkill. */
76 if (r300->texkill_sampler) {
77 pipe_sampler_view_reference(
78 (struct pipe_sampler_view**)&r300->texkill_sampler,
79 NULL);
80 }
81
82 /* The dummy VBO. */
83 pipe_resource_reference(&r300->dummy_vb, NULL);
84
85 /* The SWTCL VBO. */
86 pipe_resource_reference(&r300->vbo, NULL);
87
88 /* Vertex buffers. */
89 for (i = 0; i < r300->vertex_buffer_count; i++) {
90 pipe_resource_reference(&r300->vertex_buffer[i].buffer, NULL);
91 }
92
93 /* If there are any queries pending or not destroyed, remove them now. */
94 foreach_s(query, temp, &r300->query_list) {
95 remove_from_list(query);
96 FREE(query);
97 }
98 }
99
100 static void r300_destroy_context(struct pipe_context* context)
101 {
102 struct r300_context* r300 = r300_context(context);
103 struct r300_atom *atom;
104
105 if (r300->blitter)
106 util_blitter_destroy(r300->blitter);
107 if (r300->draw)
108 draw_destroy(r300->draw);
109
110 /* Print stats, if enabled. */
111 if (SCREEN_DBG_ON(r300->screen, DBG_STATS)) {
112 fprintf(stderr, "r300: Stats for context %p:\n", r300);
113 fprintf(stderr, " : Flushes: %" PRIu64 "\n", r300->flush_counter);
114 foreach(atom, &r300->atom_list) {
115 fprintf(stderr, " : %s: %" PRIu64 " emits\n",
116 atom->name, atom->counter);
117 }
118 }
119
120 if (r300->upload_vb)
121 u_upload_destroy(r300->upload_vb);
122 if (r300->upload_ib)
123 u_upload_destroy(r300->upload_ib);
124
125 if (r300->tran.translate_cache)
126 translate_cache_destroy(r300->tran.translate_cache);
127
128 /* XXX: This function assumes r300->query_list was initialized */
129 r300_release_referenced_objects(r300);
130
131 if (r300->zmask_mm)
132 r300_hyperz_destroy_mm(r300);
133
134 if (r300->cs)
135 r300->rws->cs_destroy(r300->cs);
136
137 /* XXX: No way to tell if this was initialized or not? */
138 util_slab_destroy(&r300->pool_transfers);
139
140 r300_update_num_contexts(r300->screen, -1);
141
142 /* Free the structs allocated in r300_setup_atoms() */
143 if (r300->aa_state.state) {
144 FREE(r300->aa_state.state);
145 FREE(r300->blend_color_state.state);
146 FREE(r300->clip_state.state);
147 FREE(r300->fb_state.state);
148 FREE(r300->gpu_flush.state);
149 FREE(r300->hyperz_state.state);
150 FREE(r300->invariant_state.state);
151 FREE(r300->rs_block_state.state);
152 FREE(r300->scissor_state.state);
153 FREE(r300->textures_state.state);
154 FREE(r300->vap_invariant_state.state);
155 FREE(r300->viewport_state.state);
156 FREE(r300->ztop_state.state);
157 FREE(r300->fs_constants.state);
158 FREE(r300->vs_constants.state);
159 if (!r300->screen->caps.has_tcl) {
160 FREE(r300->vertex_stream_state.state);
161 }
162 }
163 FREE(r300);
164 }
165
166 void r300_flush_cb(void *data)
167 {
168 struct r300_context* const cs_context_copy = data;
169
170 cs_context_copy->context.flush(&cs_context_copy->context, 0, NULL);
171 }
172
173 #define R300_INIT_ATOM(atomname, atomsize) \
174 do { \
175 r300->atomname.name = #atomname; \
176 r300->atomname.state = NULL; \
177 r300->atomname.size = atomsize; \
178 r300->atomname.emit = r300_emit_##atomname; \
179 r300->atomname.dirty = FALSE; \
180 insert_at_tail(&r300->atom_list, &r300->atomname); \
181 } while (0)
182
183 static void r300_setup_atoms(struct r300_context* r300)
184 {
185 boolean is_rv350 = r300->screen->caps.is_rv350;
186 boolean is_r500 = r300->screen->caps.is_r500;
187 boolean has_tcl = r300->screen->caps.has_tcl;
188 boolean drm_2_3_0 = r300->rws->get_value(r300->rws, R300_VID_DRM_2_3_0);
189 boolean drm_2_6_0 = r300->rws->get_value(r300->rws, R300_VID_DRM_2_6_0);
190 boolean can_hyperz = r300->rws->get_value(r300->rws, R300_CAN_HYPERZ);
191 boolean has_hiz_ram = r300->screen->caps.hiz_ram > 0;
192
193 /* Create the actual atom list.
194 *
195 * Each atom is examined and emitted in the order it appears here, which
196 * can affect performance and conformance if not handled with care.
197 *
198 * Some atoms never change size, others change every emit - those have
199 * the size of 0 here.
200 *
201 * NOTE: The framebuffer state is split into these atoms:
202 * - gpu_flush (unpipelined regs)
203 * - aa_state (unpipelined regs)
204 * - fb_state (unpipelined regs)
205 * - hyperz_state (unpipelined regs followed by pipelined ones)
206 * - fb_state_pipelined (pipelined regs)
207 * The motivation behind this is to be able to emit a strict
208 * subset of the regs, and to have reasonable register ordering. */
209 make_empty_list(&r300->atom_list);
210 /* SC, GB (unpipelined), RB3D (unpipelined), ZB (unpipelined). */
211 R300_INIT_ATOM(gpu_flush, 9);
212 R300_INIT_ATOM(aa_state, 4);
213 R300_INIT_ATOM(fb_state, 0);
214 R300_INIT_ATOM(hyperz_state, is_r500 || (is_rv350 && drm_2_6_0) ? 10 : 8);
215 /* ZB (unpipelined), SC. */
216 R300_INIT_ATOM(ztop_state, 2);
217 /* ZB, FG. */
218 R300_INIT_ATOM(dsa_state, is_r500 ? 8 : 6);
219 /* RB3D. */
220 R300_INIT_ATOM(blend_state, 8);
221 R300_INIT_ATOM(blend_color_state, is_r500 ? 3 : 2);
222 /* SC. */
223 R300_INIT_ATOM(scissor_state, 3);
224 /* GB, FG, GA, SU, SC, RB3D. */
225 R300_INIT_ATOM(invariant_state, 16 + (is_rv350 ? 4 : 0));
226 /* VAP. */
227 R300_INIT_ATOM(viewport_state, 9);
228 R300_INIT_ATOM(pvs_flush, 2);
229 R300_INIT_ATOM(vap_invariant_state, 9);
230 R300_INIT_ATOM(vertex_stream_state, 0);
231 R300_INIT_ATOM(vs_state, 0);
232 R300_INIT_ATOM(vs_constants, 0);
233 R300_INIT_ATOM(clip_state, has_tcl ? 5 + (6 * 4) : 2);
234 /* VAP, RS, GA, GB, SU, SC. */
235 R300_INIT_ATOM(rs_block_state, 0);
236 R300_INIT_ATOM(rs_state, 0);
237 /* SC, US. */
238 R300_INIT_ATOM(fb_state_pipelined, 5 + (drm_2_3_0 ? 3 : 0));
239 /* US. */
240 R300_INIT_ATOM(fs, 0);
241 R300_INIT_ATOM(fs_rc_constant_state, 0);
242 R300_INIT_ATOM(fs_constants, 0);
243 /* TX. */
244 R300_INIT_ATOM(texture_cache_inval, 2);
245 R300_INIT_ATOM(textures_state, 0);
246 if (can_hyperz) {
247 /* HiZ Clear */
248 if (has_hiz_ram)
249 R300_INIT_ATOM(hiz_clear, 0);
250 /* zmask clear */
251 R300_INIT_ATOM(zmask_clear, 0);
252 }
253 /* ZB (unpipelined), SU. */
254 R300_INIT_ATOM(query_start, 4);
255
256 /* Replace emission functions for r500. */
257 if (is_r500) {
258 r300->fs.emit = r500_emit_fs;
259 r300->fs_rc_constant_state.emit = r500_emit_fs_rc_constant_state;
260 r300->fs_constants.emit = r500_emit_fs_constants;
261 }
262
263 /* Some non-CSO atoms need explicit space to store the state locally. */
264 r300->aa_state.state = CALLOC_STRUCT(r300_aa_state);
265 r300->blend_color_state.state = CALLOC_STRUCT(r300_blend_color_state);
266 r300->clip_state.state = CALLOC_STRUCT(r300_clip_state);
267 r300->fb_state.state = CALLOC_STRUCT(pipe_framebuffer_state);
268 r300->gpu_flush.state = CALLOC_STRUCT(pipe_framebuffer_state);
269 r300->hyperz_state.state = CALLOC_STRUCT(r300_hyperz_state);
270 r300->invariant_state.state = CALLOC_STRUCT(r300_invariant_state);
271 r300->rs_block_state.state = CALLOC_STRUCT(r300_rs_block);
272 r300->scissor_state.state = CALLOC_STRUCT(pipe_scissor_state);
273 r300->textures_state.state = CALLOC_STRUCT(r300_textures_state);
274 r300->vap_invariant_state.state = CALLOC_STRUCT(r300_vap_invariant_state);
275 r300->viewport_state.state = CALLOC_STRUCT(r300_viewport_state);
276 r300->ztop_state.state = CALLOC_STRUCT(r300_ztop_state);
277 r300->fs_constants.state = CALLOC_STRUCT(r300_constant_buffer);
278 r300->vs_constants.state = CALLOC_STRUCT(r300_constant_buffer);
279 if (!r300->screen->caps.has_tcl) {
280 r300->vertex_stream_state.state = CALLOC_STRUCT(r300_vertex_stream_state);
281 }
282
283 /* Some non-CSO atoms don't use the state pointer. */
284 r300->fb_state_pipelined.allow_null_state = TRUE;
285 r300->fs_rc_constant_state.allow_null_state = TRUE;
286 r300->pvs_flush.allow_null_state = TRUE;
287 r300->query_start.allow_null_state = TRUE;
288 r300->texture_cache_inval.allow_null_state = TRUE;
289
290 /* Some states must be marked as dirty here to properly set up
291 * hardware in the first command stream. */
292 r300->invariant_state.dirty = TRUE;
293 r300->pvs_flush.dirty = TRUE;
294 r300->vap_invariant_state.dirty = TRUE;
295 r300->texture_cache_inval.dirty = TRUE;
296 r300->textures_state.dirty = TRUE;
297 }
298
299 /* Not every state tracker calls every driver function before the first draw
300 * call and we must initialize the command buffers somehow. */
301 static void r300_init_states(struct pipe_context *pipe)
302 {
303 struct r300_context *r300 = r300_context(pipe);
304 struct pipe_blend_color bc = {{0}};
305 struct pipe_clip_state cs = {{{0}}};
306 struct pipe_scissor_state ss = {0};
307 struct r300_clip_state *clip =
308 (struct r300_clip_state*)r300->clip_state.state;
309 struct r300_gpu_flush *gpuflush =
310 (struct r300_gpu_flush*)r300->gpu_flush.state;
311 struct r300_vap_invariant_state *vap_invariant =
312 (struct r300_vap_invariant_state*)r300->vap_invariant_state.state;
313 struct r300_invariant_state *invariant =
314 (struct r300_invariant_state*)r300->invariant_state.state;
315
316 CB_LOCALS;
317
318 pipe->set_blend_color(pipe, &bc);
319 pipe->set_scissor_state(pipe, &ss);
320
321 /* Initialize the clip state. */
322 if (r300_context(pipe)->screen->caps.has_tcl) {
323 pipe->set_clip_state(pipe, &cs);
324 } else {
325 BEGIN_CB(clip->cb, 2);
326 OUT_CB_REG(R300_VAP_CLIP_CNTL, R300_CLIP_DISABLE);
327 END_CB;
328 }
329
330 /* Initialize the GPU flush. */
331 {
332 BEGIN_CB(gpuflush->cb_flush_clean, 6);
333
334 /* Flush and free renderbuffer caches. */
335 OUT_CB_REG(R300_RB3D_DSTCACHE_CTLSTAT,
336 R300_RB3D_DSTCACHE_CTLSTAT_DC_FREE_FREE_3D_TAGS |
337 R300_RB3D_DSTCACHE_CTLSTAT_DC_FLUSH_FLUSH_DIRTY_3D);
338 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
339 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE |
340 R300_ZB_ZCACHE_CTLSTAT_ZC_FREE_FREE);
341
342 /* Wait until the GPU is idle.
343 * This fixes random pixels sometimes appearing probably caused
344 * by incomplete rendering. */
345 OUT_CB_REG(RADEON_WAIT_UNTIL, RADEON_WAIT_3D_IDLECLEAN);
346 END_CB;
347 }
348
349 /* Initialize the VAP invariant state. */
350 {
351 BEGIN_CB(vap_invariant->cb, 9);
352 OUT_CB_REG(VAP_PVS_VTX_TIMEOUT_REG, 0xffff);
353 OUT_CB_REG_SEQ(R300_VAP_GB_VERT_CLIP_ADJ, 4);
354 OUT_CB_32F(1.0);
355 OUT_CB_32F(1.0);
356 OUT_CB_32F(1.0);
357 OUT_CB_32F(1.0);
358 OUT_CB_REG(R300_VAP_PSC_SGN_NORM_CNTL, R300_SGN_NORM_NO_ZERO);
359 END_CB;
360 }
361
362 /* Initialize the invariant state. */
363 {
364 BEGIN_CB(invariant->cb, r300->invariant_state.size);
365 OUT_CB_REG(R300_GB_SELECT, 0);
366 OUT_CB_REG(R300_FG_FOG_BLEND, 0);
367 OUT_CB_REG(R300_GA_ROUND_MODE, 1);
368 OUT_CB_REG(R300_GA_OFFSET, 0);
369 OUT_CB_REG(R300_SU_TEX_WRAP, 0);
370 OUT_CB_REG(R300_SU_DEPTH_SCALE, 0x4B7FFFFF);
371 OUT_CB_REG(R300_SU_DEPTH_OFFSET, 0);
372 OUT_CB_REG(R300_SC_EDGERULE, 0x2DA49525);
373
374 if (r300->screen->caps.is_rv350) {
375 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD, 0x01010101);
376 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD, 0xFEFEFEFE);
377 }
378 END_CB;
379 }
380
381 /* Initialize the hyperz state. */
382 {
383 struct r300_hyperz_state *hyperz =
384 (struct r300_hyperz_state*)r300->hyperz_state.state;
385 BEGIN_CB(&hyperz->cb_flush_begin, r300->hyperz_state.size);
386 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
387 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE);
388 OUT_CB_REG(R300_ZB_BW_CNTL, 0);
389 OUT_CB_REG(R300_ZB_DEPTHCLEARVALUE, 0);
390 OUT_CB_REG(R300_SC_HYPERZ, R300_SC_HYPERZ_ADJ_2);
391
392 if (r300->screen->caps.is_r500 ||
393 (r300->screen->caps.is_rv350 &&
394 r300->rws->get_value(r300->rws, R300_VID_DRM_2_6_0))) {
395 OUT_CB_REG(R300_GB_Z_PEQ_CONFIG, 0);
396 }
397 END_CB;
398 }
399 }
400
401 struct pipe_context* r300_create_context(struct pipe_screen* screen,
402 void *priv)
403 {
404 struct r300_context* r300 = CALLOC_STRUCT(r300_context);
405 struct r300_screen* r300screen = r300_screen(screen);
406 struct r300_winsys_screen *rws = r300screen->rws;
407
408 if (!r300)
409 return NULL;
410
411 r300_update_num_contexts(r300screen, 1);
412
413 r300->rws = rws;
414 r300->screen = r300screen;
415
416 r300->context.winsys = (struct pipe_winsys*)rws;
417 r300->context.screen = screen;
418 r300->context.priv = priv;
419
420 r300->context.destroy = r300_destroy_context;
421
422 make_empty_list(&r300->query_list);
423
424 util_slab_create(&r300->pool_transfers,
425 sizeof(struct pipe_transfer), 64,
426 UTIL_SLAB_SINGLETHREADED);
427
428 r300->cs = rws->cs_create(rws);
429 if (r300->cs == NULL)
430 goto fail;
431
432 if (!r300screen->caps.has_tcl) {
433 /* Create a Draw. This is used for SW TCL. */
434 r300->draw = draw_create(&r300->context);
435 /* Enable our renderer. */
436 draw_set_rasterize_stage(r300->draw, r300_draw_stage(r300));
437 /* Disable converting points/lines to triangles. */
438 draw_wide_line_threshold(r300->draw, 10000000.f);
439 draw_wide_point_threshold(r300->draw, 10000000.f);
440 }
441
442 r300_setup_atoms(r300);
443
444 r300_init_blit_functions(r300);
445 r300_init_flush_functions(r300);
446 r300_init_query_functions(r300);
447 r300_init_state_functions(r300);
448 r300_init_resource_functions(r300);
449
450 r300->blitter = util_blitter_create(&r300->context);
451 if (r300->blitter == NULL)
452 goto fail;
453
454 /* Render functions must be initialized after blitter. */
455 r300_init_render_functions(r300);
456
457 rws->cs_set_flush(r300->cs, r300_flush_cb, r300);
458
459 /* setup hyper-z mm */
460 if (r300->rws->get_value(r300->rws, R300_CAN_HYPERZ))
461 if (!r300_hyperz_init_mm(r300))
462 goto fail;
463
464 r300->upload_ib = u_upload_create(&r300->context,
465 32 * 1024, 16,
466 PIPE_BIND_INDEX_BUFFER);
467
468 if (r300->upload_ib == NULL)
469 goto fail;
470
471 r300->upload_vb = u_upload_create(&r300->context,
472 128 * 1024, 16,
473 PIPE_BIND_VERTEX_BUFFER);
474 if (r300->upload_vb == NULL)
475 goto fail;
476
477 r300->tran.translate_cache = translate_cache_create();
478 if (r300->tran.translate_cache == NULL)
479 goto fail;
480
481 r300_init_states(&r300->context);
482
483 /* The KIL opcode needs the first texture unit to be enabled
484 * on r3xx-r4xx. In order to calm down the CS checker, we bind this
485 * dummy texture there. */
486 if (!r300->screen->caps.is_r500) {
487 struct pipe_resource *tex;
488 struct pipe_resource rtempl = {{0}};
489 struct pipe_sampler_view vtempl = {{0}};
490
491 rtempl.target = PIPE_TEXTURE_2D;
492 rtempl.format = PIPE_FORMAT_I8_UNORM;
493 rtempl.bind = PIPE_BIND_SAMPLER_VIEW;
494 rtempl.usage = PIPE_USAGE_IMMUTABLE;
495 rtempl.width0 = 1;
496 rtempl.height0 = 1;
497 rtempl.depth0 = 1;
498 tex = screen->resource_create(screen, &rtempl);
499
500 u_sampler_view_default_template(&vtempl, tex, tex->format);
501
502 r300->texkill_sampler = (struct r300_sampler_view*)
503 r300->context.create_sampler_view(&r300->context, tex, &vtempl);
504
505 pipe_resource_reference(&tex, NULL);
506 }
507
508 {
509 struct pipe_resource vb = {};
510 vb.target = PIPE_BUFFER;
511 vb.format = PIPE_FORMAT_R8_UNORM;
512 vb.bind = PIPE_BIND_VERTEX_BUFFER;
513 vb.usage = PIPE_USAGE_IMMUTABLE;
514 vb.width0 = sizeof(float) * 16;
515 vb.height0 = 1;
516 vb.depth0 = 1;
517
518 r300->dummy_vb = screen->resource_create(screen, &vb);
519 }
520
521 return &r300->context;
522
523 fail:
524 r300_destroy_context(&r300->context);
525 return NULL;
526 }
527
528 void r300_finish(struct r300_context *r300)
529 {
530 struct pipe_framebuffer_state *fb;
531 unsigned i;
532
533 /* This is a preliminary implementation of glFinish.
534 *
535 * The ideal implementation should use something like EmitIrqLocked and
536 * WaitIrq, or better, real fences.
537 */
538 if (r300->fb_state.state) {
539 fb = r300->fb_state.state;
540
541 for (i = 0; i < fb->nr_cbufs; i++) {
542 if (fb->cbufs[i]->texture) {
543 r300->rws->buffer_wait(r300->rws,
544 r300_texture(fb->cbufs[i]->texture)->buffer);
545 return;
546 }
547 }
548 if (fb->zsbuf && fb->zsbuf->texture) {
549 r300->rws->buffer_wait(r300->rws,
550 r300_texture(fb->zsbuf->texture)->buffer);
551 }
552 }
553 }