cso: check count == 0 in cso_set_vertex_buffers
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59 void *samplers[PIPE_MAX_SAMPLERS];
60 };
61
62
63
64 struct cso_context {
65 struct pipe_context *pipe;
66 struct cso_cache *cache;
67 struct u_vbuf *vbuf;
68
69 boolean has_geometry_shader;
70 boolean has_tessellation;
71 boolean has_compute_shader;
72 boolean has_streamout;
73
74 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
75
76 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
77 unsigned nr_fragment_views;
78
79 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80 unsigned nr_fragment_views_saved;
81
82 struct sampler_info fragment_samplers_saved;
83 struct sampler_info samplers[PIPE_SHADER_TYPES];
84
85 /* Temporary number until cso_single_sampler_done is called.
86 * It tracks the highest sampler seen in cso_single_sampler.
87 */
88 int max_sampler_seen;
89
90 struct pipe_vertex_buffer vertex_buffer0_current;
91 struct pipe_vertex_buffer vertex_buffer0_saved;
92
93 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
94 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
95
96 struct pipe_image_view fragment_image0_current;
97 struct pipe_image_view fragment_image0_saved;
98
99 unsigned nr_so_targets;
100 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
101
102 unsigned nr_so_targets_saved;
103 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
104
105 /** Current and saved state.
106 * The saved state is used as a 1-deep stack.
107 */
108 void *blend, *blend_saved;
109 void *depth_stencil, *depth_stencil_saved;
110 void *rasterizer, *rasterizer_saved;
111 void *fragment_shader, *fragment_shader_saved;
112 void *vertex_shader, *vertex_shader_saved;
113 void *geometry_shader, *geometry_shader_saved;
114 void *tessctrl_shader, *tessctrl_shader_saved;
115 void *tesseval_shader, *tesseval_shader_saved;
116 void *compute_shader;
117 void *velements, *velements_saved;
118 struct pipe_query *render_condition, *render_condition_saved;
119 uint render_condition_mode, render_condition_mode_saved;
120 boolean render_condition_cond, render_condition_cond_saved;
121
122 struct pipe_framebuffer_state fb, fb_saved;
123 struct pipe_viewport_state vp, vp_saved;
124 struct pipe_blend_color blend_color;
125 unsigned sample_mask, sample_mask_saved;
126 unsigned min_samples, min_samples_saved;
127 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
128 };
129
130 struct pipe_context *cso_get_pipe_context(struct cso_context *cso)
131 {
132 return cso->pipe;
133 }
134
135 static boolean delete_blend_state(struct cso_context *ctx, void *state)
136 {
137 struct cso_blend *cso = (struct cso_blend *)state;
138
139 if (ctx->blend == cso->data)
140 return FALSE;
141
142 if (cso->delete_state)
143 cso->delete_state(cso->context, cso->data);
144 FREE(state);
145 return TRUE;
146 }
147
148 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
149 {
150 struct cso_depth_stencil_alpha *cso =
151 (struct cso_depth_stencil_alpha *)state;
152
153 if (ctx->depth_stencil == cso->data)
154 return FALSE;
155
156 if (cso->delete_state)
157 cso->delete_state(cso->context, cso->data);
158 FREE(state);
159
160 return TRUE;
161 }
162
163 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
164 {
165 struct cso_sampler *cso = (struct cso_sampler *)state;
166 if (cso->delete_state)
167 cso->delete_state(cso->context, cso->data);
168 FREE(state);
169 return TRUE;
170 }
171
172 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
173 {
174 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
175
176 if (ctx->rasterizer == cso->data)
177 return FALSE;
178 if (cso->delete_state)
179 cso->delete_state(cso->context, cso->data);
180 FREE(state);
181 return TRUE;
182 }
183
184 static boolean delete_vertex_elements(struct cso_context *ctx,
185 void *state)
186 {
187 struct cso_velements *cso = (struct cso_velements *)state;
188
189 if (ctx->velements == cso->data)
190 return FALSE;
191
192 if (cso->delete_state)
193 cso->delete_state(cso->context, cso->data);
194 FREE(state);
195 return TRUE;
196 }
197
198
199 static inline boolean delete_cso(struct cso_context *ctx,
200 void *state, enum cso_cache_type type)
201 {
202 switch (type) {
203 case CSO_BLEND:
204 return delete_blend_state(ctx, state);
205 case CSO_SAMPLER:
206 return delete_sampler_state(ctx, state);
207 case CSO_DEPTH_STENCIL_ALPHA:
208 return delete_depth_stencil_state(ctx, state);
209 case CSO_RASTERIZER:
210 return delete_rasterizer_state(ctx, state);
211 case CSO_VELEMENTS:
212 return delete_vertex_elements(ctx, state);
213 default:
214 assert(0);
215 FREE(state);
216 }
217 return FALSE;
218 }
219
220 static inline void
221 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
222 int max_size, void *user_data)
223 {
224 struct cso_context *ctx = (struct cso_context *)user_data;
225 /* if we're approach the maximum size, remove fourth of the entries
226 * otherwise every subsequent call will go through the same */
227 int hash_size = cso_hash_size(hash);
228 int max_entries = (max_size > hash_size) ? max_size : hash_size;
229 int to_remove = (max_size < max_entries) * max_entries/4;
230 struct cso_hash_iter iter;
231 struct cso_sampler **samplers_to_restore = NULL;
232 unsigned to_restore = 0;
233
234 if (hash_size > max_size)
235 to_remove += hash_size - max_size;
236
237 if (to_remove == 0)
238 return;
239
240 if (type == CSO_SAMPLER) {
241 int i, j;
242
243 samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
244 sizeof(*samplers_to_restore));
245
246 /* Temporarily remove currently bound sampler states from the hash
247 * table, to prevent them from being deleted
248 */
249 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
250 for (j = 0; j < PIPE_MAX_SAMPLERS; j++) {
251 struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
252
253 if (sampler && cso_hash_take(hash, sampler->hash_key))
254 samplers_to_restore[to_restore++] = sampler;
255 }
256 }
257 }
258
259 iter = cso_hash_first_node(hash);
260 while (to_remove) {
261 /*remove elements until we're good */
262 /*fixme: currently we pick the nodes to remove at random*/
263 void *cso = cso_hash_iter_data(iter);
264
265 if (!cso)
266 break;
267
268 if (delete_cso(ctx, cso, type)) {
269 iter = cso_hash_erase(hash, iter);
270 --to_remove;
271 } else
272 iter = cso_hash_iter_next(iter);
273 }
274
275 if (type == CSO_SAMPLER) {
276 /* Put currently bound sampler states back into the hash table */
277 while (to_restore--) {
278 struct cso_sampler *sampler = samplers_to_restore[to_restore];
279
280 cso_hash_insert(hash, sampler->hash_key, sampler);
281 }
282
283 FREE(samplers_to_restore);
284 }
285 }
286
287 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
288 {
289 struct u_vbuf_caps caps;
290
291 /* Install u_vbuf if there is anything unsupported. */
292 if (u_vbuf_get_caps(cso->pipe->screen, &caps, flags)) {
293 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
294 }
295 }
296
297 struct cso_context *
298 cso_create_context(struct pipe_context *pipe, unsigned u_vbuf_flags)
299 {
300 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
301 if (!ctx)
302 return NULL;
303
304 ctx->cache = cso_cache_create();
305 if (ctx->cache == NULL)
306 goto out;
307 cso_cache_set_sanitize_callback(ctx->cache,
308 sanitize_hash,
309 ctx);
310
311 ctx->pipe = pipe;
312 ctx->sample_mask = ~0;
313
314 cso_init_vbuf(ctx, u_vbuf_flags);
315
316 /* Enable for testing: */
317 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
318
319 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
320 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
321 ctx->has_geometry_shader = TRUE;
322 }
323 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
324 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
325 ctx->has_tessellation = TRUE;
326 }
327 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
328 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
329 int supported_irs =
330 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
331 PIPE_SHADER_CAP_SUPPORTED_IRS);
332 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
333 ctx->has_compute_shader = TRUE;
334 }
335 }
336 if (pipe->screen->get_param(pipe->screen,
337 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
338 ctx->has_streamout = TRUE;
339 }
340
341 ctx->max_sampler_seen = -1;
342 return ctx;
343
344 out:
345 cso_destroy_context( ctx );
346 return NULL;
347 }
348
349 /**
350 * Free the CSO context.
351 */
352 void cso_destroy_context( struct cso_context *ctx )
353 {
354 unsigned i;
355
356 if (ctx->pipe) {
357 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
358 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
359
360 {
361 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
362 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
363 struct pipe_screen *scr = ctx->pipe->screen;
364 enum pipe_shader_type sh;
365 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
366 int maxsam = scr->get_shader_param(scr, sh,
367 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
368 int maxview = scr->get_shader_param(scr, sh,
369 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
370 assert(maxsam <= PIPE_MAX_SAMPLERS);
371 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
372 if (maxsam > 0) {
373 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
374 }
375 if (maxview > 0) {
376 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
377 }
378 }
379 }
380
381 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
382 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
383 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
384 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
385 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
386 if (ctx->has_geometry_shader) {
387 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
388 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
389 }
390 if (ctx->has_tessellation) {
391 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
392 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
393 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
394 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
395 }
396 if (ctx->has_compute_shader) {
397 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
398 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
399 }
400 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
401
402 if (ctx->has_streamout)
403 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
404 }
405
406 for (i = 0; i < ctx->nr_fragment_views; i++) {
407 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
408 }
409 for (i = 0; i < ctx->nr_fragment_views_saved; i++) {
410 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
411 }
412
413 util_unreference_framebuffer_state(&ctx->fb);
414 util_unreference_framebuffer_state(&ctx->fb_saved);
415
416 pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
417 pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
418
419 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
420 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
421 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
422 }
423
424 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
425 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
426
427 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
428 pipe_so_target_reference(&ctx->so_targets[i], NULL);
429 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
430 }
431
432 if (ctx->cache) {
433 cso_cache_delete( ctx->cache );
434 ctx->cache = NULL;
435 }
436
437 if (ctx->vbuf)
438 u_vbuf_destroy(ctx->vbuf);
439 FREE( ctx );
440 }
441
442
443 /* Those function will either find the state of the given template
444 * in the cache or they will create a new state from the given
445 * template, insert it in the cache and return it.
446 */
447
448 /*
449 * If the driver returns 0 from the create method then they will assign
450 * the data member of the cso to be the template itself.
451 */
452
453 enum pipe_error cso_set_blend(struct cso_context *ctx,
454 const struct pipe_blend_state *templ)
455 {
456 unsigned key_size, hash_key;
457 struct cso_hash_iter iter;
458 void *handle;
459
460 key_size = templ->independent_blend_enable ?
461 sizeof(struct pipe_blend_state) :
462 (char *)&(templ->rt[1]) - (char *)templ;
463 hash_key = cso_construct_key((void*)templ, key_size);
464 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
465 (void*)templ, key_size);
466
467 if (cso_hash_iter_is_null(iter)) {
468 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
469 if (!cso)
470 return PIPE_ERROR_OUT_OF_MEMORY;
471
472 memset(&cso->state, 0, sizeof cso->state);
473 memcpy(&cso->state, templ, key_size);
474 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
475 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
476 cso->context = ctx->pipe;
477
478 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
479 if (cso_hash_iter_is_null(iter)) {
480 FREE(cso);
481 return PIPE_ERROR_OUT_OF_MEMORY;
482 }
483
484 handle = cso->data;
485 }
486 else {
487 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
488 }
489
490 if (ctx->blend != handle) {
491 ctx->blend = handle;
492 ctx->pipe->bind_blend_state(ctx->pipe, handle);
493 }
494 return PIPE_OK;
495 }
496
497 static void
498 cso_save_blend(struct cso_context *ctx)
499 {
500 assert(!ctx->blend_saved);
501 ctx->blend_saved = ctx->blend;
502 }
503
504 static void
505 cso_restore_blend(struct cso_context *ctx)
506 {
507 if (ctx->blend != ctx->blend_saved) {
508 ctx->blend = ctx->blend_saved;
509 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
510 }
511 ctx->blend_saved = NULL;
512 }
513
514
515
516 enum pipe_error
517 cso_set_depth_stencil_alpha(struct cso_context *ctx,
518 const struct pipe_depth_stencil_alpha_state *templ)
519 {
520 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
521 unsigned hash_key = cso_construct_key((void*)templ, key_size);
522 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
523 hash_key,
524 CSO_DEPTH_STENCIL_ALPHA,
525 (void*)templ, key_size);
526 void *handle;
527
528 if (cso_hash_iter_is_null(iter)) {
529 struct cso_depth_stencil_alpha *cso =
530 MALLOC(sizeof(struct cso_depth_stencil_alpha));
531 if (!cso)
532 return PIPE_ERROR_OUT_OF_MEMORY;
533
534 memcpy(&cso->state, templ, sizeof(*templ));
535 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
536 &cso->state);
537 cso->delete_state =
538 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
539 cso->context = ctx->pipe;
540
541 iter = cso_insert_state(ctx->cache, hash_key,
542 CSO_DEPTH_STENCIL_ALPHA, cso);
543 if (cso_hash_iter_is_null(iter)) {
544 FREE(cso);
545 return PIPE_ERROR_OUT_OF_MEMORY;
546 }
547
548 handle = cso->data;
549 }
550 else {
551 handle = ((struct cso_depth_stencil_alpha *)
552 cso_hash_iter_data(iter))->data;
553 }
554
555 if (ctx->depth_stencil != handle) {
556 ctx->depth_stencil = handle;
557 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
558 }
559 return PIPE_OK;
560 }
561
562 static void
563 cso_save_depth_stencil_alpha(struct cso_context *ctx)
564 {
565 assert(!ctx->depth_stencil_saved);
566 ctx->depth_stencil_saved = ctx->depth_stencil;
567 }
568
569 static void
570 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
571 {
572 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
573 ctx->depth_stencil = ctx->depth_stencil_saved;
574 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
575 ctx->depth_stencil_saved);
576 }
577 ctx->depth_stencil_saved = NULL;
578 }
579
580
581
582 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
583 const struct pipe_rasterizer_state *templ)
584 {
585 unsigned key_size = sizeof(struct pipe_rasterizer_state);
586 unsigned hash_key = cso_construct_key((void*)templ, key_size);
587 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
588 hash_key,
589 CSO_RASTERIZER,
590 (void*)templ, key_size);
591 void *handle = NULL;
592
593 /* We can't have both point_quad_rasterization (sprites) and point_smooth
594 * (round AA points) enabled at the same time.
595 */
596 assert(!(templ->point_quad_rasterization && templ->point_smooth));
597
598 if (cso_hash_iter_is_null(iter)) {
599 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
600 if (!cso)
601 return PIPE_ERROR_OUT_OF_MEMORY;
602
603 memcpy(&cso->state, templ, sizeof(*templ));
604 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
605 cso->delete_state =
606 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
607 cso->context = ctx->pipe;
608
609 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
610 if (cso_hash_iter_is_null(iter)) {
611 FREE(cso);
612 return PIPE_ERROR_OUT_OF_MEMORY;
613 }
614
615 handle = cso->data;
616 }
617 else {
618 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
619 }
620
621 if (ctx->rasterizer != handle) {
622 ctx->rasterizer = handle;
623 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
624 }
625 return PIPE_OK;
626 }
627
628 static void
629 cso_save_rasterizer(struct cso_context *ctx)
630 {
631 assert(!ctx->rasterizer_saved);
632 ctx->rasterizer_saved = ctx->rasterizer;
633 }
634
635 static void
636 cso_restore_rasterizer(struct cso_context *ctx)
637 {
638 if (ctx->rasterizer != ctx->rasterizer_saved) {
639 ctx->rasterizer = ctx->rasterizer_saved;
640 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
641 }
642 ctx->rasterizer_saved = NULL;
643 }
644
645
646 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
647 {
648 if (ctx->fragment_shader != handle) {
649 ctx->fragment_shader = handle;
650 ctx->pipe->bind_fs_state(ctx->pipe, handle);
651 }
652 }
653
654 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
655 {
656 if (handle == ctx->fragment_shader) {
657 /* unbind before deleting */
658 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
659 ctx->fragment_shader = NULL;
660 }
661 ctx->pipe->delete_fs_state(ctx->pipe, handle);
662 }
663
664 static void
665 cso_save_fragment_shader(struct cso_context *ctx)
666 {
667 assert(!ctx->fragment_shader_saved);
668 ctx->fragment_shader_saved = ctx->fragment_shader;
669 }
670
671 static void
672 cso_restore_fragment_shader(struct cso_context *ctx)
673 {
674 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
675 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
676 ctx->fragment_shader = ctx->fragment_shader_saved;
677 }
678 ctx->fragment_shader_saved = NULL;
679 }
680
681
682 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
683 {
684 if (ctx->vertex_shader != handle) {
685 ctx->vertex_shader = handle;
686 ctx->pipe->bind_vs_state(ctx->pipe, handle);
687 }
688 }
689
690 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
691 {
692 if (handle == ctx->vertex_shader) {
693 /* unbind before deleting */
694 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
695 ctx->vertex_shader = NULL;
696 }
697 ctx->pipe->delete_vs_state(ctx->pipe, handle);
698 }
699
700 static void
701 cso_save_vertex_shader(struct cso_context *ctx)
702 {
703 assert(!ctx->vertex_shader_saved);
704 ctx->vertex_shader_saved = ctx->vertex_shader;
705 }
706
707 static void
708 cso_restore_vertex_shader(struct cso_context *ctx)
709 {
710 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
711 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
712 ctx->vertex_shader = ctx->vertex_shader_saved;
713 }
714 ctx->vertex_shader_saved = NULL;
715 }
716
717
718 void cso_set_framebuffer(struct cso_context *ctx,
719 const struct pipe_framebuffer_state *fb)
720 {
721 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
722 util_copy_framebuffer_state(&ctx->fb, fb);
723 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
724 }
725 }
726
727 static void
728 cso_save_framebuffer(struct cso_context *ctx)
729 {
730 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
731 }
732
733 static void
734 cso_restore_framebuffer(struct cso_context *ctx)
735 {
736 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
737 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
738 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
739 util_unreference_framebuffer_state(&ctx->fb_saved);
740 }
741 }
742
743
744 void cso_set_viewport(struct cso_context *ctx,
745 const struct pipe_viewport_state *vp)
746 {
747 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
748 ctx->vp = *vp;
749 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
750 }
751 }
752
753 /**
754 * Setup viewport state for given width and height (position is always (0,0)).
755 * Invert the Y axis if 'invert' is true.
756 */
757 void
758 cso_set_viewport_dims(struct cso_context *ctx,
759 float width, float height, boolean invert)
760 {
761 struct pipe_viewport_state vp;
762 vp.scale[0] = width * 0.5f;
763 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
764 vp.scale[2] = 0.5f;
765 vp.translate[0] = 0.5f * width;
766 vp.translate[1] = 0.5f * height;
767 vp.translate[2] = 0.5f;
768 cso_set_viewport(ctx, &vp);
769 }
770
771 static void
772 cso_save_viewport(struct cso_context *ctx)
773 {
774 ctx->vp_saved = ctx->vp;
775 }
776
777
778 static void
779 cso_restore_viewport(struct cso_context *ctx)
780 {
781 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
782 ctx->vp = ctx->vp_saved;
783 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
784 }
785 }
786
787
788 void cso_set_blend_color(struct cso_context *ctx,
789 const struct pipe_blend_color *bc)
790 {
791 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
792 ctx->blend_color = *bc;
793 ctx->pipe->set_blend_color(ctx->pipe, bc);
794 }
795 }
796
797 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
798 {
799 if (ctx->sample_mask != sample_mask) {
800 ctx->sample_mask = sample_mask;
801 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
802 }
803 }
804
805 static void
806 cso_save_sample_mask(struct cso_context *ctx)
807 {
808 ctx->sample_mask_saved = ctx->sample_mask;
809 }
810
811 static void
812 cso_restore_sample_mask(struct cso_context *ctx)
813 {
814 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
815 }
816
817 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
818 {
819 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
820 ctx->min_samples = min_samples;
821 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
822 }
823 }
824
825 static void
826 cso_save_min_samples(struct cso_context *ctx)
827 {
828 ctx->min_samples_saved = ctx->min_samples;
829 }
830
831 static void
832 cso_restore_min_samples(struct cso_context *ctx)
833 {
834 cso_set_min_samples(ctx, ctx->min_samples_saved);
835 }
836
837 void cso_set_stencil_ref(struct cso_context *ctx,
838 const struct pipe_stencil_ref *sr)
839 {
840 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
841 ctx->stencil_ref = *sr;
842 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
843 }
844 }
845
846 static void
847 cso_save_stencil_ref(struct cso_context *ctx)
848 {
849 ctx->stencil_ref_saved = ctx->stencil_ref;
850 }
851
852
853 static void
854 cso_restore_stencil_ref(struct cso_context *ctx)
855 {
856 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
857 sizeof(ctx->stencil_ref))) {
858 ctx->stencil_ref = ctx->stencil_ref_saved;
859 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
860 }
861 }
862
863 void cso_set_render_condition(struct cso_context *ctx,
864 struct pipe_query *query,
865 boolean condition,
866 enum pipe_render_cond_flag mode)
867 {
868 struct pipe_context *pipe = ctx->pipe;
869
870 if (ctx->render_condition != query ||
871 ctx->render_condition_mode != mode ||
872 ctx->render_condition_cond != condition) {
873 pipe->render_condition(pipe, query, condition, mode);
874 ctx->render_condition = query;
875 ctx->render_condition_cond = condition;
876 ctx->render_condition_mode = mode;
877 }
878 }
879
880 static void
881 cso_save_render_condition(struct cso_context *ctx)
882 {
883 ctx->render_condition_saved = ctx->render_condition;
884 ctx->render_condition_cond_saved = ctx->render_condition_cond;
885 ctx->render_condition_mode_saved = ctx->render_condition_mode;
886 }
887
888 static void
889 cso_restore_render_condition(struct cso_context *ctx)
890 {
891 cso_set_render_condition(ctx, ctx->render_condition_saved,
892 ctx->render_condition_cond_saved,
893 ctx->render_condition_mode_saved);
894 }
895
896 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
897 {
898 assert(ctx->has_geometry_shader || !handle);
899
900 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
901 ctx->geometry_shader = handle;
902 ctx->pipe->bind_gs_state(ctx->pipe, handle);
903 }
904 }
905
906 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
907 {
908 if (handle == ctx->geometry_shader) {
909 /* unbind before deleting */
910 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
911 ctx->geometry_shader = NULL;
912 }
913 ctx->pipe->delete_gs_state(ctx->pipe, handle);
914 }
915
916 static void
917 cso_save_geometry_shader(struct cso_context *ctx)
918 {
919 if (!ctx->has_geometry_shader) {
920 return;
921 }
922
923 assert(!ctx->geometry_shader_saved);
924 ctx->geometry_shader_saved = ctx->geometry_shader;
925 }
926
927 static void
928 cso_restore_geometry_shader(struct cso_context *ctx)
929 {
930 if (!ctx->has_geometry_shader) {
931 return;
932 }
933
934 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
935 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
936 ctx->geometry_shader = ctx->geometry_shader_saved;
937 }
938 ctx->geometry_shader_saved = NULL;
939 }
940
941 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
942 {
943 assert(ctx->has_tessellation || !handle);
944
945 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
946 ctx->tessctrl_shader = handle;
947 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
948 }
949 }
950
951 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
952 {
953 if (handle == ctx->tessctrl_shader) {
954 /* unbind before deleting */
955 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
956 ctx->tessctrl_shader = NULL;
957 }
958 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
959 }
960
961 static void
962 cso_save_tessctrl_shader(struct cso_context *ctx)
963 {
964 if (!ctx->has_tessellation) {
965 return;
966 }
967
968 assert(!ctx->tessctrl_shader_saved);
969 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
970 }
971
972 static void
973 cso_restore_tessctrl_shader(struct cso_context *ctx)
974 {
975 if (!ctx->has_tessellation) {
976 return;
977 }
978
979 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
980 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
981 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
982 }
983 ctx->tessctrl_shader_saved = NULL;
984 }
985
986 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
987 {
988 assert(ctx->has_tessellation || !handle);
989
990 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
991 ctx->tesseval_shader = handle;
992 ctx->pipe->bind_tes_state(ctx->pipe, handle);
993 }
994 }
995
996 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
997 {
998 if (handle == ctx->tesseval_shader) {
999 /* unbind before deleting */
1000 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
1001 ctx->tesseval_shader = NULL;
1002 }
1003 ctx->pipe->delete_tes_state(ctx->pipe, handle);
1004 }
1005
1006 static void
1007 cso_save_tesseval_shader(struct cso_context *ctx)
1008 {
1009 if (!ctx->has_tessellation) {
1010 return;
1011 }
1012
1013 assert(!ctx->tesseval_shader_saved);
1014 ctx->tesseval_shader_saved = ctx->tesseval_shader;
1015 }
1016
1017 static void
1018 cso_restore_tesseval_shader(struct cso_context *ctx)
1019 {
1020 if (!ctx->has_tessellation) {
1021 return;
1022 }
1023
1024 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1025 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1026 ctx->tesseval_shader = ctx->tesseval_shader_saved;
1027 }
1028 ctx->tesseval_shader_saved = NULL;
1029 }
1030
1031 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1032 {
1033 assert(ctx->has_compute_shader || !handle);
1034
1035 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1036 ctx->compute_shader = handle;
1037 ctx->pipe->bind_compute_state(ctx->pipe, handle);
1038 }
1039 }
1040
1041 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
1042 {
1043 if (handle == ctx->compute_shader) {
1044 /* unbind before deleting */
1045 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
1046 ctx->compute_shader = NULL;
1047 }
1048 ctx->pipe->delete_compute_state(ctx->pipe, handle);
1049 }
1050
1051 enum pipe_error
1052 cso_set_vertex_elements(struct cso_context *ctx,
1053 unsigned count,
1054 const struct pipe_vertex_element *states)
1055 {
1056 struct u_vbuf *vbuf = ctx->vbuf;
1057 unsigned key_size, hash_key;
1058 struct cso_hash_iter iter;
1059 void *handle;
1060 struct cso_velems_state velems_state;
1061
1062 if (vbuf) {
1063 u_vbuf_set_vertex_elements(vbuf, count, states);
1064 return PIPE_OK;
1065 }
1066
1067 /* Need to include the count into the stored state data too.
1068 * Otherwise first few count pipe_vertex_elements could be identical
1069 * even if count is different, and there's no guarantee the hash would
1070 * be different in that case neither.
1071 */
1072 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1073 velems_state.count = count;
1074 memcpy(velems_state.velems, states,
1075 sizeof(struct pipe_vertex_element) * count);
1076 hash_key = cso_construct_key((void*)&velems_state, key_size);
1077 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1078 (void*)&velems_state, key_size);
1079
1080 if (cso_hash_iter_is_null(iter)) {
1081 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1082 if (!cso)
1083 return PIPE_ERROR_OUT_OF_MEMORY;
1084
1085 memcpy(&cso->state, &velems_state, key_size);
1086 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1087 &cso->state.velems[0]);
1088 cso->delete_state =
1089 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1090 cso->context = ctx->pipe;
1091
1092 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1093 if (cso_hash_iter_is_null(iter)) {
1094 FREE(cso);
1095 return PIPE_ERROR_OUT_OF_MEMORY;
1096 }
1097
1098 handle = cso->data;
1099 }
1100 else {
1101 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1102 }
1103
1104 if (ctx->velements != handle) {
1105 ctx->velements = handle;
1106 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1107 }
1108 return PIPE_OK;
1109 }
1110
1111 static void
1112 cso_save_vertex_elements(struct cso_context *ctx)
1113 {
1114 struct u_vbuf *vbuf = ctx->vbuf;
1115
1116 if (vbuf) {
1117 u_vbuf_save_vertex_elements(vbuf);
1118 return;
1119 }
1120
1121 assert(!ctx->velements_saved);
1122 ctx->velements_saved = ctx->velements;
1123 }
1124
1125 static void
1126 cso_restore_vertex_elements(struct cso_context *ctx)
1127 {
1128 struct u_vbuf *vbuf = ctx->vbuf;
1129
1130 if (vbuf) {
1131 u_vbuf_restore_vertex_elements(vbuf);
1132 return;
1133 }
1134
1135 if (ctx->velements != ctx->velements_saved) {
1136 ctx->velements = ctx->velements_saved;
1137 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1138 }
1139 ctx->velements_saved = NULL;
1140 }
1141
1142 /* vertex buffers */
1143
1144 void cso_set_vertex_buffers(struct cso_context *ctx,
1145 unsigned start_slot, unsigned count,
1146 const struct pipe_vertex_buffer *buffers)
1147 {
1148 struct u_vbuf *vbuf = ctx->vbuf;
1149
1150 if (!count)
1151 return;
1152
1153 if (vbuf) {
1154 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1155 return;
1156 }
1157
1158 /* Save what's in the auxiliary slot, so that we can save and restore it
1159 * for meta ops. */
1160 if (start_slot == 0) {
1161 if (buffers) {
1162 pipe_vertex_buffer_reference(&ctx->vertex_buffer0_current,
1163 buffers);
1164 } else {
1165 pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
1166 }
1167 }
1168
1169 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1170 }
1171
1172 static void
1173 cso_save_vertex_buffer0(struct cso_context *ctx)
1174 {
1175 struct u_vbuf *vbuf = ctx->vbuf;
1176
1177 if (vbuf) {
1178 u_vbuf_save_vertex_buffer0(vbuf);
1179 return;
1180 }
1181
1182 pipe_vertex_buffer_reference(&ctx->vertex_buffer0_saved,
1183 &ctx->vertex_buffer0_current);
1184 }
1185
1186 static void
1187 cso_restore_vertex_buffer0(struct cso_context *ctx)
1188 {
1189 struct u_vbuf *vbuf = ctx->vbuf;
1190
1191 if (vbuf) {
1192 u_vbuf_restore_vertex_buffer0(vbuf);
1193 return;
1194 }
1195
1196 cso_set_vertex_buffers(ctx, 0, 1, &ctx->vertex_buffer0_saved);
1197 pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
1198 }
1199
1200
1201 void
1202 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1203 unsigned idx, const struct pipe_sampler_state *templ)
1204 {
1205 if (templ) {
1206 unsigned key_size = sizeof(struct pipe_sampler_state);
1207 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1208 struct cso_sampler *cso;
1209 struct cso_hash_iter iter =
1210 cso_find_state_template(ctx->cache,
1211 hash_key, CSO_SAMPLER,
1212 (void *) templ, key_size);
1213
1214 if (cso_hash_iter_is_null(iter)) {
1215 cso = MALLOC(sizeof(struct cso_sampler));
1216 if (!cso)
1217 return;
1218
1219 memcpy(&cso->state, templ, sizeof(*templ));
1220 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1221 cso->delete_state =
1222 (cso_state_callback) ctx->pipe->delete_sampler_state;
1223 cso->context = ctx->pipe;
1224 cso->hash_key = hash_key;
1225
1226 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1227 if (cso_hash_iter_is_null(iter)) {
1228 FREE(cso);
1229 return;
1230 }
1231 }
1232 else {
1233 cso = cso_hash_iter_data(iter);
1234 }
1235
1236 ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1237 ctx->samplers[shader_stage].samplers[idx] = cso->data;
1238 ctx->max_sampler_seen = MAX2(ctx->max_sampler_seen, (int)idx);
1239 }
1240 }
1241
1242
1243 /**
1244 * Send staged sampler state to the driver.
1245 */
1246 void
1247 cso_single_sampler_done(struct cso_context *ctx,
1248 enum pipe_shader_type shader_stage)
1249 {
1250 struct sampler_info *info = &ctx->samplers[shader_stage];
1251
1252 if (ctx->max_sampler_seen == -1)
1253 return;
1254
1255 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1256 ctx->max_sampler_seen + 1,
1257 info->samplers);
1258 ctx->max_sampler_seen = -1;
1259 }
1260
1261
1262 /*
1263 * If the function encouters any errors it will return the
1264 * last one. Done to always try to set as many samplers
1265 * as possible.
1266 */
1267 void
1268 cso_set_samplers(struct cso_context *ctx,
1269 enum pipe_shader_type shader_stage,
1270 unsigned nr,
1271 const struct pipe_sampler_state **templates)
1272 {
1273 for (unsigned i = 0; i < nr; i++)
1274 cso_single_sampler(ctx, shader_stage, i, templates[i]);
1275
1276 cso_single_sampler_done(ctx, shader_stage);
1277 }
1278
1279 static void
1280 cso_save_fragment_samplers(struct cso_context *ctx)
1281 {
1282 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1283 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1284
1285 memcpy(saved->cso_samplers, info->cso_samplers,
1286 sizeof(info->cso_samplers));
1287 memcpy(saved->samplers, info->samplers, sizeof(info->samplers));
1288 }
1289
1290
1291 static void
1292 cso_restore_fragment_samplers(struct cso_context *ctx)
1293 {
1294 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1295 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1296
1297 memcpy(info->cso_samplers, saved->cso_samplers,
1298 sizeof(info->cso_samplers));
1299 memcpy(info->samplers, saved->samplers, sizeof(info->samplers));
1300
1301 for (int i = PIPE_MAX_SAMPLERS - 1; i >= 0; i--) {
1302 if (info->samplers[i]) {
1303 ctx->max_sampler_seen = i;
1304 break;
1305 }
1306 }
1307
1308 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1309 }
1310
1311
1312 void
1313 cso_set_sampler_views(struct cso_context *ctx,
1314 enum pipe_shader_type shader_stage,
1315 unsigned count,
1316 struct pipe_sampler_view **views)
1317 {
1318 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1319 unsigned i;
1320 boolean any_change = FALSE;
1321
1322 /* reference new views */
1323 for (i = 0; i < count; i++) {
1324 any_change |= ctx->fragment_views[i] != views[i];
1325 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1326 }
1327 /* unref extra old views, if any */
1328 for (; i < ctx->nr_fragment_views; i++) {
1329 any_change |= ctx->fragment_views[i] != NULL;
1330 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1331 }
1332
1333 /* bind the new sampler views */
1334 if (any_change) {
1335 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1336 MAX2(ctx->nr_fragment_views, count),
1337 ctx->fragment_views);
1338 }
1339
1340 ctx->nr_fragment_views = count;
1341 }
1342 else
1343 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1344 }
1345
1346
1347 static void
1348 cso_save_fragment_sampler_views(struct cso_context *ctx)
1349 {
1350 unsigned i;
1351
1352 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1353
1354 for (i = 0; i < ctx->nr_fragment_views; i++) {
1355 assert(!ctx->fragment_views_saved[i]);
1356 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1357 ctx->fragment_views[i]);
1358 }
1359 }
1360
1361
1362 static void
1363 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1364 {
1365 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1366 unsigned num;
1367
1368 for (i = 0; i < nr_saved; i++) {
1369 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1370 /* move the reference from one pointer to another */
1371 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1372 ctx->fragment_views_saved[i] = NULL;
1373 }
1374 for (; i < ctx->nr_fragment_views; i++) {
1375 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1376 }
1377
1378 num = MAX2(ctx->nr_fragment_views, nr_saved);
1379
1380 /* bind the old/saved sampler views */
1381 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1382 ctx->fragment_views);
1383
1384 ctx->nr_fragment_views = nr_saved;
1385 ctx->nr_fragment_views_saved = 0;
1386 }
1387
1388
1389 void
1390 cso_set_shader_images(struct cso_context *ctx,
1391 enum pipe_shader_type shader_stage,
1392 unsigned start, unsigned count,
1393 struct pipe_image_view *images)
1394 {
1395 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1396 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1397 }
1398
1399 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1400 }
1401
1402
1403 static void
1404 cso_save_fragment_image0(struct cso_context *ctx)
1405 {
1406 util_copy_image_view(&ctx->fragment_image0_saved,
1407 &ctx->fragment_image0_current);
1408 }
1409
1410
1411 static void
1412 cso_restore_fragment_image0(struct cso_context *ctx)
1413 {
1414 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1415 &ctx->fragment_image0_saved);
1416 }
1417
1418
1419 void
1420 cso_set_stream_outputs(struct cso_context *ctx,
1421 unsigned num_targets,
1422 struct pipe_stream_output_target **targets,
1423 const unsigned *offsets)
1424 {
1425 struct pipe_context *pipe = ctx->pipe;
1426 uint i;
1427
1428 if (!ctx->has_streamout) {
1429 assert(num_targets == 0);
1430 return;
1431 }
1432
1433 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1434 /* Nothing to do. */
1435 return;
1436 }
1437
1438 /* reference new targets */
1439 for (i = 0; i < num_targets; i++) {
1440 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1441 }
1442 /* unref extra old targets, if any */
1443 for (; i < ctx->nr_so_targets; i++) {
1444 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1445 }
1446
1447 pipe->set_stream_output_targets(pipe, num_targets, targets,
1448 offsets);
1449 ctx->nr_so_targets = num_targets;
1450 }
1451
1452 static void
1453 cso_save_stream_outputs(struct cso_context *ctx)
1454 {
1455 uint i;
1456
1457 if (!ctx->has_streamout) {
1458 return;
1459 }
1460
1461 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1462
1463 for (i = 0; i < ctx->nr_so_targets; i++) {
1464 assert(!ctx->so_targets_saved[i]);
1465 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1466 }
1467 }
1468
1469 static void
1470 cso_restore_stream_outputs(struct cso_context *ctx)
1471 {
1472 struct pipe_context *pipe = ctx->pipe;
1473 uint i;
1474 unsigned offset[PIPE_MAX_SO_BUFFERS];
1475
1476 if (!ctx->has_streamout) {
1477 return;
1478 }
1479
1480 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1481 /* Nothing to do. */
1482 return;
1483 }
1484
1485 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1486 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1487 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1488 /* move the reference from one pointer to another */
1489 ctx->so_targets[i] = ctx->so_targets_saved[i];
1490 ctx->so_targets_saved[i] = NULL;
1491 /* -1 means append */
1492 offset[i] = (unsigned)-1;
1493 }
1494 for (; i < ctx->nr_so_targets; i++) {
1495 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1496 }
1497
1498 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1499 ctx->so_targets, offset);
1500
1501 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1502 ctx->nr_so_targets_saved = 0;
1503 }
1504
1505 /* constant buffers */
1506
1507 void
1508 cso_set_constant_buffer(struct cso_context *cso,
1509 enum pipe_shader_type shader_stage,
1510 unsigned index, struct pipe_constant_buffer *cb)
1511 {
1512 struct pipe_context *pipe = cso->pipe;
1513
1514 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1515
1516 if (index == 0) {
1517 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1518 }
1519 }
1520
1521 void
1522 cso_set_constant_buffer_resource(struct cso_context *cso,
1523 enum pipe_shader_type shader_stage,
1524 unsigned index,
1525 struct pipe_resource *buffer)
1526 {
1527 if (buffer) {
1528 struct pipe_constant_buffer cb;
1529 cb.buffer = buffer;
1530 cb.buffer_offset = 0;
1531 cb.buffer_size = buffer->width0;
1532 cb.user_buffer = NULL;
1533 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1534 } else {
1535 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1536 }
1537 }
1538
1539 void
1540 cso_set_constant_user_buffer(struct cso_context *cso,
1541 enum pipe_shader_type shader_stage,
1542 unsigned index, void *ptr, unsigned size)
1543 {
1544 if (ptr) {
1545 struct pipe_constant_buffer cb;
1546 cb.buffer = NULL;
1547 cb.buffer_offset = 0;
1548 cb.buffer_size = size;
1549 cb.user_buffer = ptr;
1550 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1551 } else {
1552 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1553 }
1554 }
1555
1556 void
1557 cso_save_constant_buffer_slot0(struct cso_context *cso,
1558 enum pipe_shader_type shader_stage)
1559 {
1560 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1561 &cso->aux_constbuf_current[shader_stage]);
1562 }
1563
1564 void
1565 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1566 enum pipe_shader_type shader_stage)
1567 {
1568 cso_set_constant_buffer(cso, shader_stage, 0,
1569 &cso->aux_constbuf_saved[shader_stage]);
1570 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1571 NULL);
1572 }
1573
1574
1575 /**
1576 * Save all the CSO state items specified by the state_mask bitmask
1577 * of CSO_BIT_x flags.
1578 */
1579 void
1580 cso_save_state(struct cso_context *cso, unsigned state_mask)
1581 {
1582 assert(cso->saved_state == 0);
1583
1584 cso->saved_state = state_mask;
1585
1586 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1587 cso_save_vertex_buffer0(cso);
1588 if (state_mask & CSO_BIT_BLEND)
1589 cso_save_blend(cso);
1590 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1591 cso_save_depth_stencil_alpha(cso);
1592 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1593 cso_save_fragment_samplers(cso);
1594 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1595 cso_save_fragment_sampler_views(cso);
1596 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1597 cso_save_fragment_shader(cso);
1598 if (state_mask & CSO_BIT_FRAMEBUFFER)
1599 cso_save_framebuffer(cso);
1600 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1601 cso_save_geometry_shader(cso);
1602 if (state_mask & CSO_BIT_MIN_SAMPLES)
1603 cso_save_min_samples(cso);
1604 if (state_mask & CSO_BIT_RASTERIZER)
1605 cso_save_rasterizer(cso);
1606 if (state_mask & CSO_BIT_RENDER_CONDITION)
1607 cso_save_render_condition(cso);
1608 if (state_mask & CSO_BIT_SAMPLE_MASK)
1609 cso_save_sample_mask(cso);
1610 if (state_mask & CSO_BIT_STENCIL_REF)
1611 cso_save_stencil_ref(cso);
1612 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1613 cso_save_stream_outputs(cso);
1614 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1615 cso_save_tessctrl_shader(cso);
1616 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1617 cso_save_tesseval_shader(cso);
1618 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1619 cso_save_vertex_elements(cso);
1620 if (state_mask & CSO_BIT_VERTEX_SHADER)
1621 cso_save_vertex_shader(cso);
1622 if (state_mask & CSO_BIT_VIEWPORT)
1623 cso_save_viewport(cso);
1624 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1625 cso->pipe->set_active_query_state(cso->pipe, false);
1626 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1627 cso_save_fragment_image0(cso);
1628 }
1629
1630
1631 /**
1632 * Restore the state which was saved by cso_save_state().
1633 */
1634 void
1635 cso_restore_state(struct cso_context *cso)
1636 {
1637 unsigned state_mask = cso->saved_state;
1638
1639 assert(state_mask);
1640
1641 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1642 cso_restore_vertex_buffer0(cso);
1643 if (state_mask & CSO_BIT_BLEND)
1644 cso_restore_blend(cso);
1645 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1646 cso_restore_depth_stencil_alpha(cso);
1647 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1648 cso_restore_fragment_samplers(cso);
1649 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1650 cso_restore_fragment_sampler_views(cso);
1651 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1652 cso_restore_fragment_shader(cso);
1653 if (state_mask & CSO_BIT_FRAMEBUFFER)
1654 cso_restore_framebuffer(cso);
1655 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1656 cso_restore_geometry_shader(cso);
1657 if (state_mask & CSO_BIT_MIN_SAMPLES)
1658 cso_restore_min_samples(cso);
1659 if (state_mask & CSO_BIT_RASTERIZER)
1660 cso_restore_rasterizer(cso);
1661 if (state_mask & CSO_BIT_RENDER_CONDITION)
1662 cso_restore_render_condition(cso);
1663 if (state_mask & CSO_BIT_SAMPLE_MASK)
1664 cso_restore_sample_mask(cso);
1665 if (state_mask & CSO_BIT_STENCIL_REF)
1666 cso_restore_stencil_ref(cso);
1667 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1668 cso_restore_stream_outputs(cso);
1669 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1670 cso_restore_tessctrl_shader(cso);
1671 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1672 cso_restore_tesseval_shader(cso);
1673 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1674 cso_restore_vertex_elements(cso);
1675 if (state_mask & CSO_BIT_VERTEX_SHADER)
1676 cso_restore_vertex_shader(cso);
1677 if (state_mask & CSO_BIT_VIEWPORT)
1678 cso_restore_viewport(cso);
1679 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1680 cso->pipe->set_active_query_state(cso->pipe, true);
1681 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1682 cso_restore_fragment_image0(cso);
1683
1684 cso->saved_state = 0;
1685 }
1686
1687
1688
1689 /* drawing */
1690
1691 void
1692 cso_draw_vbo(struct cso_context *cso,
1693 const struct pipe_draw_info *info)
1694 {
1695 struct u_vbuf *vbuf = cso->vbuf;
1696
1697 /* We can't have both indirect drawing and SO-vertex-count drawing */
1698 assert(info->indirect == NULL || info->count_from_stream_output == NULL);
1699
1700 /* We can't have SO-vertex-count drawing with an index buffer */
1701 assert(info->count_from_stream_output == NULL || info->index_size == 0);
1702
1703 if (vbuf) {
1704 u_vbuf_draw_vbo(vbuf, info);
1705 } else {
1706 struct pipe_context *pipe = cso->pipe;
1707 pipe->draw_vbo(pipe, info);
1708 }
1709 }
1710
1711 void
1712 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1713 {
1714 struct pipe_draw_info info;
1715
1716 util_draw_init_info(&info);
1717
1718 info.mode = mode;
1719 info.start = start;
1720 info.count = count;
1721 info.min_index = start;
1722 info.max_index = start + count - 1;
1723
1724 cso_draw_vbo(cso, &info);
1725 }
1726
1727 void
1728 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1729 uint start, uint count,
1730 uint start_instance, uint instance_count)
1731 {
1732 struct pipe_draw_info info;
1733
1734 util_draw_init_info(&info);
1735
1736 info.mode = mode;
1737 info.start = start;
1738 info.count = count;
1739 info.min_index = start;
1740 info.max_index = start + count - 1;
1741 info.start_instance = start_instance;
1742 info.instance_count = instance_count;
1743
1744 cso_draw_vbo(cso, &info);
1745 }