cso: don't track the number of sampler states bound
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59 void *samplers[PIPE_MAX_SAMPLERS];
60 };
61
62
63
64 struct cso_context {
65 struct pipe_context *pipe;
66 struct cso_cache *cache;
67 struct u_vbuf *vbuf;
68
69 boolean has_geometry_shader;
70 boolean has_tessellation;
71 boolean has_compute_shader;
72 boolean has_streamout;
73
74 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
75
76 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
77 unsigned nr_fragment_views;
78
79 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80 unsigned nr_fragment_views_saved;
81
82 struct sampler_info fragment_samplers_saved;
83 struct sampler_info samplers[PIPE_SHADER_TYPES];
84
85 /* Temporary number until cso_single_sampler_done is called.
86 * It tracks the highest sampler seen in cso_single_sampler.
87 */
88 int max_sampler_seen;
89
90 struct pipe_vertex_buffer aux_vertex_buffer_current;
91 struct pipe_vertex_buffer aux_vertex_buffer_saved;
92 unsigned aux_vertex_buffer_index;
93
94 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
95 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
96
97 struct pipe_image_view fragment_image0_current;
98 struct pipe_image_view fragment_image0_saved;
99
100 unsigned nr_so_targets;
101 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
102
103 unsigned nr_so_targets_saved;
104 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
105
106 /** Current and saved state.
107 * The saved state is used as a 1-deep stack.
108 */
109 void *blend, *blend_saved;
110 void *depth_stencil, *depth_stencil_saved;
111 void *rasterizer, *rasterizer_saved;
112 void *fragment_shader, *fragment_shader_saved;
113 void *vertex_shader, *vertex_shader_saved;
114 void *geometry_shader, *geometry_shader_saved;
115 void *tessctrl_shader, *tessctrl_shader_saved;
116 void *tesseval_shader, *tesseval_shader_saved;
117 void *compute_shader;
118 void *velements, *velements_saved;
119 struct pipe_query *render_condition, *render_condition_saved;
120 uint render_condition_mode, render_condition_mode_saved;
121 boolean render_condition_cond, render_condition_cond_saved;
122
123 struct pipe_framebuffer_state fb, fb_saved;
124 struct pipe_viewport_state vp, vp_saved;
125 struct pipe_blend_color blend_color;
126 unsigned sample_mask, sample_mask_saved;
127 unsigned min_samples, min_samples_saved;
128 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
129 };
130
131
132 static boolean delete_blend_state(struct cso_context *ctx, void *state)
133 {
134 struct cso_blend *cso = (struct cso_blend *)state;
135
136 if (ctx->blend == cso->data)
137 return FALSE;
138
139 if (cso->delete_state)
140 cso->delete_state(cso->context, cso->data);
141 FREE(state);
142 return TRUE;
143 }
144
145 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
146 {
147 struct cso_depth_stencil_alpha *cso =
148 (struct cso_depth_stencil_alpha *)state;
149
150 if (ctx->depth_stencil == cso->data)
151 return FALSE;
152
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156
157 return TRUE;
158 }
159
160 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
161 {
162 struct cso_sampler *cso = (struct cso_sampler *)state;
163 if (cso->delete_state)
164 cso->delete_state(cso->context, cso->data);
165 FREE(state);
166 return TRUE;
167 }
168
169 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
170 {
171 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
172
173 if (ctx->rasterizer == cso->data)
174 return FALSE;
175 if (cso->delete_state)
176 cso->delete_state(cso->context, cso->data);
177 FREE(state);
178 return TRUE;
179 }
180
181 static boolean delete_vertex_elements(struct cso_context *ctx,
182 void *state)
183 {
184 struct cso_velements *cso = (struct cso_velements *)state;
185
186 if (ctx->velements == cso->data)
187 return FALSE;
188
189 if (cso->delete_state)
190 cso->delete_state(cso->context, cso->data);
191 FREE(state);
192 return TRUE;
193 }
194
195
196 static inline boolean delete_cso(struct cso_context *ctx,
197 void *state, enum cso_cache_type type)
198 {
199 switch (type) {
200 case CSO_BLEND:
201 return delete_blend_state(ctx, state);
202 case CSO_SAMPLER:
203 return delete_sampler_state(ctx, state);
204 case CSO_DEPTH_STENCIL_ALPHA:
205 return delete_depth_stencil_state(ctx, state);
206 case CSO_RASTERIZER:
207 return delete_rasterizer_state(ctx, state);
208 case CSO_VELEMENTS:
209 return delete_vertex_elements(ctx, state);
210 default:
211 assert(0);
212 FREE(state);
213 }
214 return FALSE;
215 }
216
217 static inline void
218 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
219 int max_size, void *user_data)
220 {
221 struct cso_context *ctx = (struct cso_context *)user_data;
222 /* if we're approach the maximum size, remove fourth of the entries
223 * otherwise every subsequent call will go through the same */
224 int hash_size = cso_hash_size(hash);
225 int max_entries = (max_size > hash_size) ? max_size : hash_size;
226 int to_remove = (max_size < max_entries) * max_entries/4;
227 struct cso_hash_iter iter;
228 struct cso_sampler **samplers_to_restore = NULL;
229 unsigned to_restore = 0;
230
231 if (hash_size > max_size)
232 to_remove += hash_size - max_size;
233
234 if (to_remove == 0)
235 return;
236
237 if (type == CSO_SAMPLER) {
238 int i, j;
239
240 samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
241 sizeof(*samplers_to_restore));
242
243 /* Temporarily remove currently bound sampler states from the hash
244 * table, to prevent them from being deleted
245 */
246 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
247 for (j = 0; j < PIPE_MAX_SAMPLERS; j++) {
248 struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
249
250 if (sampler && cso_hash_take(hash, sampler->hash_key))
251 samplers_to_restore[to_restore++] = sampler;
252 }
253 }
254 }
255
256 iter = cso_hash_first_node(hash);
257 while (to_remove) {
258 /*remove elements until we're good */
259 /*fixme: currently we pick the nodes to remove at random*/
260 void *cso = cso_hash_iter_data(iter);
261
262 if (!cso)
263 break;
264
265 if (delete_cso(ctx, cso, type)) {
266 iter = cso_hash_erase(hash, iter);
267 --to_remove;
268 } else
269 iter = cso_hash_iter_next(iter);
270 }
271
272 if (type == CSO_SAMPLER) {
273 /* Put currently bound sampler states back into the hash table */
274 while (to_restore--) {
275 struct cso_sampler *sampler = samplers_to_restore[to_restore];
276
277 cso_hash_insert(hash, sampler->hash_key, sampler);
278 }
279
280 FREE(samplers_to_restore);
281 }
282 }
283
284 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
285 {
286 struct u_vbuf_caps caps;
287
288 /* Install u_vbuf if there is anything unsupported. */
289 if (u_vbuf_get_caps(cso->pipe->screen, &caps, flags)) {
290 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
291 cso->aux_vertex_buffer_index);
292 }
293 }
294
295 struct cso_context *
296 cso_create_context(struct pipe_context *pipe, unsigned u_vbuf_flags)
297 {
298 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
299 if (!ctx)
300 return NULL;
301
302 ctx->cache = cso_cache_create();
303 if (ctx->cache == NULL)
304 goto out;
305 cso_cache_set_sanitize_callback(ctx->cache,
306 sanitize_hash,
307 ctx);
308
309 ctx->pipe = pipe;
310 ctx->sample_mask = ~0;
311
312 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
313
314 cso_init_vbuf(ctx, u_vbuf_flags);
315
316 /* Enable for testing: */
317 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
318
319 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
320 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
321 ctx->has_geometry_shader = TRUE;
322 }
323 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
324 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
325 ctx->has_tessellation = TRUE;
326 }
327 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
328 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
329 int supported_irs =
330 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
331 PIPE_SHADER_CAP_SUPPORTED_IRS);
332 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
333 ctx->has_compute_shader = TRUE;
334 }
335 }
336 if (pipe->screen->get_param(pipe->screen,
337 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
338 ctx->has_streamout = TRUE;
339 }
340
341 ctx->max_sampler_seen = -1;
342 return ctx;
343
344 out:
345 cso_destroy_context( ctx );
346 return NULL;
347 }
348
349 /**
350 * Free the CSO context.
351 */
352 void cso_destroy_context( struct cso_context *ctx )
353 {
354 unsigned i;
355
356 if (ctx->pipe) {
357 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
358 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
359
360 {
361 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
362 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
363 struct pipe_screen *scr = ctx->pipe->screen;
364 enum pipe_shader_type sh;
365 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
366 int maxsam = scr->get_shader_param(scr, sh,
367 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
368 int maxview = scr->get_shader_param(scr, sh,
369 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
370 assert(maxsam <= PIPE_MAX_SAMPLERS);
371 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
372 if (maxsam > 0) {
373 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
374 }
375 if (maxview > 0) {
376 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
377 }
378 }
379 }
380
381 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
382 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
383 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
384 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
385 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
386 if (ctx->has_geometry_shader) {
387 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
388 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
389 }
390 if (ctx->has_tessellation) {
391 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
392 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
393 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
394 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
395 }
396 if (ctx->has_compute_shader) {
397 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
398 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
399 }
400 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
401
402 if (ctx->has_streamout)
403 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
404 }
405
406 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
407 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
408 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
409 }
410
411 util_unreference_framebuffer_state(&ctx->fb);
412 util_unreference_framebuffer_state(&ctx->fb_saved);
413
414 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_current);
415 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_saved);
416
417 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
418 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
419 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
420 }
421
422 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
423 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
424
425 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
426 pipe_so_target_reference(&ctx->so_targets[i], NULL);
427 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
428 }
429
430 if (ctx->cache) {
431 cso_cache_delete( ctx->cache );
432 ctx->cache = NULL;
433 }
434
435 if (ctx->vbuf)
436 u_vbuf_destroy(ctx->vbuf);
437 FREE( ctx );
438 }
439
440
441 /* Those function will either find the state of the given template
442 * in the cache or they will create a new state from the given
443 * template, insert it in the cache and return it.
444 */
445
446 /*
447 * If the driver returns 0 from the create method then they will assign
448 * the data member of the cso to be the template itself.
449 */
450
451 enum pipe_error cso_set_blend(struct cso_context *ctx,
452 const struct pipe_blend_state *templ)
453 {
454 unsigned key_size, hash_key;
455 struct cso_hash_iter iter;
456 void *handle;
457
458 key_size = templ->independent_blend_enable ?
459 sizeof(struct pipe_blend_state) :
460 (char *)&(templ->rt[1]) - (char *)templ;
461 hash_key = cso_construct_key((void*)templ, key_size);
462 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
463 (void*)templ, key_size);
464
465 if (cso_hash_iter_is_null(iter)) {
466 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
467 if (!cso)
468 return PIPE_ERROR_OUT_OF_MEMORY;
469
470 memset(&cso->state, 0, sizeof cso->state);
471 memcpy(&cso->state, templ, key_size);
472 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
473 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
474 cso->context = ctx->pipe;
475
476 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
477 if (cso_hash_iter_is_null(iter)) {
478 FREE(cso);
479 return PIPE_ERROR_OUT_OF_MEMORY;
480 }
481
482 handle = cso->data;
483 }
484 else {
485 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
486 }
487
488 if (ctx->blend != handle) {
489 ctx->blend = handle;
490 ctx->pipe->bind_blend_state(ctx->pipe, handle);
491 }
492 return PIPE_OK;
493 }
494
495 static void
496 cso_save_blend(struct cso_context *ctx)
497 {
498 assert(!ctx->blend_saved);
499 ctx->blend_saved = ctx->blend;
500 }
501
502 static void
503 cso_restore_blend(struct cso_context *ctx)
504 {
505 if (ctx->blend != ctx->blend_saved) {
506 ctx->blend = ctx->blend_saved;
507 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
508 }
509 ctx->blend_saved = NULL;
510 }
511
512
513
514 enum pipe_error
515 cso_set_depth_stencil_alpha(struct cso_context *ctx,
516 const struct pipe_depth_stencil_alpha_state *templ)
517 {
518 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
519 unsigned hash_key = cso_construct_key((void*)templ, key_size);
520 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
521 hash_key,
522 CSO_DEPTH_STENCIL_ALPHA,
523 (void*)templ, key_size);
524 void *handle;
525
526 if (cso_hash_iter_is_null(iter)) {
527 struct cso_depth_stencil_alpha *cso =
528 MALLOC(sizeof(struct cso_depth_stencil_alpha));
529 if (!cso)
530 return PIPE_ERROR_OUT_OF_MEMORY;
531
532 memcpy(&cso->state, templ, sizeof(*templ));
533 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
534 &cso->state);
535 cso->delete_state =
536 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
537 cso->context = ctx->pipe;
538
539 iter = cso_insert_state(ctx->cache, hash_key,
540 CSO_DEPTH_STENCIL_ALPHA, cso);
541 if (cso_hash_iter_is_null(iter)) {
542 FREE(cso);
543 return PIPE_ERROR_OUT_OF_MEMORY;
544 }
545
546 handle = cso->data;
547 }
548 else {
549 handle = ((struct cso_depth_stencil_alpha *)
550 cso_hash_iter_data(iter))->data;
551 }
552
553 if (ctx->depth_stencil != handle) {
554 ctx->depth_stencil = handle;
555 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
556 }
557 return PIPE_OK;
558 }
559
560 static void
561 cso_save_depth_stencil_alpha(struct cso_context *ctx)
562 {
563 assert(!ctx->depth_stencil_saved);
564 ctx->depth_stencil_saved = ctx->depth_stencil;
565 }
566
567 static void
568 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
569 {
570 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
571 ctx->depth_stencil = ctx->depth_stencil_saved;
572 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
573 ctx->depth_stencil_saved);
574 }
575 ctx->depth_stencil_saved = NULL;
576 }
577
578
579
580 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
581 const struct pipe_rasterizer_state *templ)
582 {
583 unsigned key_size = sizeof(struct pipe_rasterizer_state);
584 unsigned hash_key = cso_construct_key((void*)templ, key_size);
585 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
586 hash_key,
587 CSO_RASTERIZER,
588 (void*)templ, key_size);
589 void *handle = NULL;
590
591 if (cso_hash_iter_is_null(iter)) {
592 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
593 if (!cso)
594 return PIPE_ERROR_OUT_OF_MEMORY;
595
596 memcpy(&cso->state, templ, sizeof(*templ));
597 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
598 cso->delete_state =
599 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
600 cso->context = ctx->pipe;
601
602 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
603 if (cso_hash_iter_is_null(iter)) {
604 FREE(cso);
605 return PIPE_ERROR_OUT_OF_MEMORY;
606 }
607
608 handle = cso->data;
609 }
610 else {
611 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
612 }
613
614 if (ctx->rasterizer != handle) {
615 ctx->rasterizer = handle;
616 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
617 }
618 return PIPE_OK;
619 }
620
621 static void
622 cso_save_rasterizer(struct cso_context *ctx)
623 {
624 assert(!ctx->rasterizer_saved);
625 ctx->rasterizer_saved = ctx->rasterizer;
626 }
627
628 static void
629 cso_restore_rasterizer(struct cso_context *ctx)
630 {
631 if (ctx->rasterizer != ctx->rasterizer_saved) {
632 ctx->rasterizer = ctx->rasterizer_saved;
633 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
634 }
635 ctx->rasterizer_saved = NULL;
636 }
637
638
639 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
640 {
641 if (ctx->fragment_shader != handle) {
642 ctx->fragment_shader = handle;
643 ctx->pipe->bind_fs_state(ctx->pipe, handle);
644 }
645 }
646
647 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
648 {
649 if (handle == ctx->fragment_shader) {
650 /* unbind before deleting */
651 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
652 ctx->fragment_shader = NULL;
653 }
654 ctx->pipe->delete_fs_state(ctx->pipe, handle);
655 }
656
657 static void
658 cso_save_fragment_shader(struct cso_context *ctx)
659 {
660 assert(!ctx->fragment_shader_saved);
661 ctx->fragment_shader_saved = ctx->fragment_shader;
662 }
663
664 static void
665 cso_restore_fragment_shader(struct cso_context *ctx)
666 {
667 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
668 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
669 ctx->fragment_shader = ctx->fragment_shader_saved;
670 }
671 ctx->fragment_shader_saved = NULL;
672 }
673
674
675 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
676 {
677 if (ctx->vertex_shader != handle) {
678 ctx->vertex_shader = handle;
679 ctx->pipe->bind_vs_state(ctx->pipe, handle);
680 }
681 }
682
683 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
684 {
685 if (handle == ctx->vertex_shader) {
686 /* unbind before deleting */
687 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
688 ctx->vertex_shader = NULL;
689 }
690 ctx->pipe->delete_vs_state(ctx->pipe, handle);
691 }
692
693 static void
694 cso_save_vertex_shader(struct cso_context *ctx)
695 {
696 assert(!ctx->vertex_shader_saved);
697 ctx->vertex_shader_saved = ctx->vertex_shader;
698 }
699
700 static void
701 cso_restore_vertex_shader(struct cso_context *ctx)
702 {
703 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
704 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
705 ctx->vertex_shader = ctx->vertex_shader_saved;
706 }
707 ctx->vertex_shader_saved = NULL;
708 }
709
710
711 void cso_set_framebuffer(struct cso_context *ctx,
712 const struct pipe_framebuffer_state *fb)
713 {
714 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
715 util_copy_framebuffer_state(&ctx->fb, fb);
716 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
717 }
718 }
719
720 static void
721 cso_save_framebuffer(struct cso_context *ctx)
722 {
723 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
724 }
725
726 static void
727 cso_restore_framebuffer(struct cso_context *ctx)
728 {
729 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
730 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
731 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
732 util_unreference_framebuffer_state(&ctx->fb_saved);
733 }
734 }
735
736
737 void cso_set_viewport(struct cso_context *ctx,
738 const struct pipe_viewport_state *vp)
739 {
740 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
741 ctx->vp = *vp;
742 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
743 }
744 }
745
746 /**
747 * Setup viewport state for given width and height (position is always (0,0)).
748 * Invert the Y axis if 'invert' is true.
749 */
750 void
751 cso_set_viewport_dims(struct cso_context *ctx,
752 float width, float height, boolean invert)
753 {
754 struct pipe_viewport_state vp;
755 vp.scale[0] = width * 0.5f;
756 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
757 vp.scale[2] = 0.5f;
758 vp.translate[0] = 0.5f * width;
759 vp.translate[1] = 0.5f * height;
760 vp.translate[2] = 0.5f;
761 cso_set_viewport(ctx, &vp);
762 }
763
764 static void
765 cso_save_viewport(struct cso_context *ctx)
766 {
767 ctx->vp_saved = ctx->vp;
768 }
769
770
771 static void
772 cso_restore_viewport(struct cso_context *ctx)
773 {
774 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
775 ctx->vp = ctx->vp_saved;
776 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
777 }
778 }
779
780
781 void cso_set_blend_color(struct cso_context *ctx,
782 const struct pipe_blend_color *bc)
783 {
784 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
785 ctx->blend_color = *bc;
786 ctx->pipe->set_blend_color(ctx->pipe, bc);
787 }
788 }
789
790 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
791 {
792 if (ctx->sample_mask != sample_mask) {
793 ctx->sample_mask = sample_mask;
794 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
795 }
796 }
797
798 static void
799 cso_save_sample_mask(struct cso_context *ctx)
800 {
801 ctx->sample_mask_saved = ctx->sample_mask;
802 }
803
804 static void
805 cso_restore_sample_mask(struct cso_context *ctx)
806 {
807 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
808 }
809
810 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
811 {
812 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
813 ctx->min_samples = min_samples;
814 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
815 }
816 }
817
818 static void
819 cso_save_min_samples(struct cso_context *ctx)
820 {
821 ctx->min_samples_saved = ctx->min_samples;
822 }
823
824 static void
825 cso_restore_min_samples(struct cso_context *ctx)
826 {
827 cso_set_min_samples(ctx, ctx->min_samples_saved);
828 }
829
830 void cso_set_stencil_ref(struct cso_context *ctx,
831 const struct pipe_stencil_ref *sr)
832 {
833 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
834 ctx->stencil_ref = *sr;
835 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
836 }
837 }
838
839 static void
840 cso_save_stencil_ref(struct cso_context *ctx)
841 {
842 ctx->stencil_ref_saved = ctx->stencil_ref;
843 }
844
845
846 static void
847 cso_restore_stencil_ref(struct cso_context *ctx)
848 {
849 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
850 sizeof(ctx->stencil_ref))) {
851 ctx->stencil_ref = ctx->stencil_ref_saved;
852 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
853 }
854 }
855
856 void cso_set_render_condition(struct cso_context *ctx,
857 struct pipe_query *query,
858 boolean condition,
859 enum pipe_render_cond_flag mode)
860 {
861 struct pipe_context *pipe = ctx->pipe;
862
863 if (ctx->render_condition != query ||
864 ctx->render_condition_mode != mode ||
865 ctx->render_condition_cond != condition) {
866 pipe->render_condition(pipe, query, condition, mode);
867 ctx->render_condition = query;
868 ctx->render_condition_cond = condition;
869 ctx->render_condition_mode = mode;
870 }
871 }
872
873 static void
874 cso_save_render_condition(struct cso_context *ctx)
875 {
876 ctx->render_condition_saved = ctx->render_condition;
877 ctx->render_condition_cond_saved = ctx->render_condition_cond;
878 ctx->render_condition_mode_saved = ctx->render_condition_mode;
879 }
880
881 static void
882 cso_restore_render_condition(struct cso_context *ctx)
883 {
884 cso_set_render_condition(ctx, ctx->render_condition_saved,
885 ctx->render_condition_cond_saved,
886 ctx->render_condition_mode_saved);
887 }
888
889 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
890 {
891 assert(ctx->has_geometry_shader || !handle);
892
893 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
894 ctx->geometry_shader = handle;
895 ctx->pipe->bind_gs_state(ctx->pipe, handle);
896 }
897 }
898
899 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
900 {
901 if (handle == ctx->geometry_shader) {
902 /* unbind before deleting */
903 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
904 ctx->geometry_shader = NULL;
905 }
906 ctx->pipe->delete_gs_state(ctx->pipe, handle);
907 }
908
909 static void
910 cso_save_geometry_shader(struct cso_context *ctx)
911 {
912 if (!ctx->has_geometry_shader) {
913 return;
914 }
915
916 assert(!ctx->geometry_shader_saved);
917 ctx->geometry_shader_saved = ctx->geometry_shader;
918 }
919
920 static void
921 cso_restore_geometry_shader(struct cso_context *ctx)
922 {
923 if (!ctx->has_geometry_shader) {
924 return;
925 }
926
927 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
928 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
929 ctx->geometry_shader = ctx->geometry_shader_saved;
930 }
931 ctx->geometry_shader_saved = NULL;
932 }
933
934 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
935 {
936 assert(ctx->has_tessellation || !handle);
937
938 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
939 ctx->tessctrl_shader = handle;
940 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
941 }
942 }
943
944 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
945 {
946 if (handle == ctx->tessctrl_shader) {
947 /* unbind before deleting */
948 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
949 ctx->tessctrl_shader = NULL;
950 }
951 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
952 }
953
954 static void
955 cso_save_tessctrl_shader(struct cso_context *ctx)
956 {
957 if (!ctx->has_tessellation) {
958 return;
959 }
960
961 assert(!ctx->tessctrl_shader_saved);
962 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
963 }
964
965 static void
966 cso_restore_tessctrl_shader(struct cso_context *ctx)
967 {
968 if (!ctx->has_tessellation) {
969 return;
970 }
971
972 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
973 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
974 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
975 }
976 ctx->tessctrl_shader_saved = NULL;
977 }
978
979 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
980 {
981 assert(ctx->has_tessellation || !handle);
982
983 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
984 ctx->tesseval_shader = handle;
985 ctx->pipe->bind_tes_state(ctx->pipe, handle);
986 }
987 }
988
989 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
990 {
991 if (handle == ctx->tesseval_shader) {
992 /* unbind before deleting */
993 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
994 ctx->tesseval_shader = NULL;
995 }
996 ctx->pipe->delete_tes_state(ctx->pipe, handle);
997 }
998
999 static void
1000 cso_save_tesseval_shader(struct cso_context *ctx)
1001 {
1002 if (!ctx->has_tessellation) {
1003 return;
1004 }
1005
1006 assert(!ctx->tesseval_shader_saved);
1007 ctx->tesseval_shader_saved = ctx->tesseval_shader;
1008 }
1009
1010 static void
1011 cso_restore_tesseval_shader(struct cso_context *ctx)
1012 {
1013 if (!ctx->has_tessellation) {
1014 return;
1015 }
1016
1017 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1018 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1019 ctx->tesseval_shader = ctx->tesseval_shader_saved;
1020 }
1021 ctx->tesseval_shader_saved = NULL;
1022 }
1023
1024 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1025 {
1026 assert(ctx->has_compute_shader || !handle);
1027
1028 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1029 ctx->compute_shader = handle;
1030 ctx->pipe->bind_compute_state(ctx->pipe, handle);
1031 }
1032 }
1033
1034 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
1035 {
1036 if (handle == ctx->compute_shader) {
1037 /* unbind before deleting */
1038 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
1039 ctx->compute_shader = NULL;
1040 }
1041 ctx->pipe->delete_compute_state(ctx->pipe, handle);
1042 }
1043
1044 enum pipe_error
1045 cso_set_vertex_elements(struct cso_context *ctx,
1046 unsigned count,
1047 const struct pipe_vertex_element *states)
1048 {
1049 struct u_vbuf *vbuf = ctx->vbuf;
1050 unsigned key_size, hash_key;
1051 struct cso_hash_iter iter;
1052 void *handle;
1053 struct cso_velems_state velems_state;
1054
1055 if (vbuf) {
1056 u_vbuf_set_vertex_elements(vbuf, count, states);
1057 return PIPE_OK;
1058 }
1059
1060 /* Need to include the count into the stored state data too.
1061 * Otherwise first few count pipe_vertex_elements could be identical
1062 * even if count is different, and there's no guarantee the hash would
1063 * be different in that case neither.
1064 */
1065 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1066 velems_state.count = count;
1067 memcpy(velems_state.velems, states,
1068 sizeof(struct pipe_vertex_element) * count);
1069 hash_key = cso_construct_key((void*)&velems_state, key_size);
1070 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1071 (void*)&velems_state, key_size);
1072
1073 if (cso_hash_iter_is_null(iter)) {
1074 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1075 if (!cso)
1076 return PIPE_ERROR_OUT_OF_MEMORY;
1077
1078 memcpy(&cso->state, &velems_state, key_size);
1079 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1080 &cso->state.velems[0]);
1081 cso->delete_state =
1082 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1083 cso->context = ctx->pipe;
1084
1085 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1086 if (cso_hash_iter_is_null(iter)) {
1087 FREE(cso);
1088 return PIPE_ERROR_OUT_OF_MEMORY;
1089 }
1090
1091 handle = cso->data;
1092 }
1093 else {
1094 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1095 }
1096
1097 if (ctx->velements != handle) {
1098 ctx->velements = handle;
1099 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1100 }
1101 return PIPE_OK;
1102 }
1103
1104 static void
1105 cso_save_vertex_elements(struct cso_context *ctx)
1106 {
1107 struct u_vbuf *vbuf = ctx->vbuf;
1108
1109 if (vbuf) {
1110 u_vbuf_save_vertex_elements(vbuf);
1111 return;
1112 }
1113
1114 assert(!ctx->velements_saved);
1115 ctx->velements_saved = ctx->velements;
1116 }
1117
1118 static void
1119 cso_restore_vertex_elements(struct cso_context *ctx)
1120 {
1121 struct u_vbuf *vbuf = ctx->vbuf;
1122
1123 if (vbuf) {
1124 u_vbuf_restore_vertex_elements(vbuf);
1125 return;
1126 }
1127
1128 if (ctx->velements != ctx->velements_saved) {
1129 ctx->velements = ctx->velements_saved;
1130 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1131 }
1132 ctx->velements_saved = NULL;
1133 }
1134
1135 /* vertex buffers */
1136
1137 void cso_set_vertex_buffers(struct cso_context *ctx,
1138 unsigned start_slot, unsigned count,
1139 const struct pipe_vertex_buffer *buffers)
1140 {
1141 struct u_vbuf *vbuf = ctx->vbuf;
1142
1143 if (vbuf) {
1144 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1145 return;
1146 }
1147
1148 /* Save what's in the auxiliary slot, so that we can save and restore it
1149 * for meta ops. */
1150 if (start_slot <= ctx->aux_vertex_buffer_index &&
1151 start_slot+count > ctx->aux_vertex_buffer_index) {
1152 if (buffers) {
1153 const struct pipe_vertex_buffer *vb =
1154 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1155
1156 pipe_vertex_buffer_reference(&ctx->aux_vertex_buffer_current, vb);
1157 } else {
1158 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_current);
1159 }
1160 }
1161
1162 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1163 }
1164
1165 static void
1166 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1167 {
1168 struct u_vbuf *vbuf = ctx->vbuf;
1169
1170 if (vbuf) {
1171 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1172 return;
1173 }
1174
1175 pipe_vertex_buffer_reference(&ctx->aux_vertex_buffer_saved,
1176 &ctx->aux_vertex_buffer_current);
1177 }
1178
1179 static void
1180 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1181 {
1182 struct u_vbuf *vbuf = ctx->vbuf;
1183
1184 if (vbuf) {
1185 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1186 return;
1187 }
1188
1189 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1190 &ctx->aux_vertex_buffer_saved);
1191 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_saved);
1192 }
1193
1194 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1195 {
1196 return ctx->aux_vertex_buffer_index;
1197 }
1198
1199
1200
1201 enum pipe_error
1202 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1203 unsigned idx, const struct pipe_sampler_state *templ)
1204 {
1205 if (templ) {
1206 unsigned key_size = sizeof(struct pipe_sampler_state);
1207 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1208 struct cso_sampler *cso;
1209 struct cso_hash_iter iter =
1210 cso_find_state_template(ctx->cache,
1211 hash_key, CSO_SAMPLER,
1212 (void *) templ, key_size);
1213
1214 if (cso_hash_iter_is_null(iter)) {
1215 cso = MALLOC(sizeof(struct cso_sampler));
1216 if (!cso)
1217 return PIPE_ERROR_OUT_OF_MEMORY;
1218
1219 memcpy(&cso->state, templ, sizeof(*templ));
1220 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1221 cso->delete_state =
1222 (cso_state_callback) ctx->pipe->delete_sampler_state;
1223 cso->context = ctx->pipe;
1224 cso->hash_key = hash_key;
1225
1226 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1227 if (cso_hash_iter_is_null(iter)) {
1228 FREE(cso);
1229 return PIPE_ERROR_OUT_OF_MEMORY;
1230 }
1231 }
1232 else {
1233 cso = cso_hash_iter_data(iter);
1234 }
1235
1236 ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1237 ctx->samplers[shader_stage].samplers[idx] = cso->data;
1238 ctx->max_sampler_seen = MAX2(ctx->max_sampler_seen, (int)idx);
1239 }
1240
1241 return PIPE_OK;
1242 }
1243
1244
1245 /**
1246 * Send staged sampler state to the driver.
1247 */
1248 void
1249 cso_single_sampler_done(struct cso_context *ctx,
1250 enum pipe_shader_type shader_stage)
1251 {
1252 struct sampler_info *info = &ctx->samplers[shader_stage];
1253
1254 if (ctx->max_sampler_seen == -1)
1255 return;
1256
1257 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1258 ctx->max_sampler_seen + 1,
1259 info->samplers);
1260 ctx->max_sampler_seen = -1;
1261 }
1262
1263
1264 /*
1265 * If the function encouters any errors it will return the
1266 * last one. Done to always try to set as many samplers
1267 * as possible.
1268 */
1269 enum pipe_error
1270 cso_set_samplers(struct cso_context *ctx,
1271 enum pipe_shader_type shader_stage,
1272 unsigned nr,
1273 const struct pipe_sampler_state **templates)
1274 {
1275 unsigned i;
1276 enum pipe_error temp, error = PIPE_OK;
1277
1278 for (i = 0; i < nr; i++) {
1279 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1280 if (temp != PIPE_OK)
1281 error = temp;
1282 }
1283
1284 cso_single_sampler_done(ctx, shader_stage);
1285
1286 return error;
1287 }
1288
1289 static void
1290 cso_save_fragment_samplers(struct cso_context *ctx)
1291 {
1292 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1293 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1294
1295 memcpy(saved->cso_samplers, info->cso_samplers,
1296 sizeof(info->cso_samplers));
1297 memcpy(saved->samplers, info->samplers, sizeof(info->samplers));
1298 }
1299
1300
1301 static void
1302 cso_restore_fragment_samplers(struct cso_context *ctx)
1303 {
1304 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1305 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1306
1307 memcpy(info->cso_samplers, saved->cso_samplers,
1308 sizeof(info->cso_samplers));
1309 memcpy(info->samplers, saved->samplers, sizeof(info->samplers));
1310
1311 for (int i = PIPE_MAX_SAMPLERS - 1; i >= 0; i--) {
1312 if (info->samplers[i]) {
1313 ctx->max_sampler_seen = i;
1314 break;
1315 }
1316 }
1317
1318 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1319 }
1320
1321
1322 void
1323 cso_set_sampler_views(struct cso_context *ctx,
1324 enum pipe_shader_type shader_stage,
1325 unsigned count,
1326 struct pipe_sampler_view **views)
1327 {
1328 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1329 unsigned i;
1330 boolean any_change = FALSE;
1331
1332 /* reference new views */
1333 for (i = 0; i < count; i++) {
1334 any_change |= ctx->fragment_views[i] != views[i];
1335 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1336 }
1337 /* unref extra old views, if any */
1338 for (; i < ctx->nr_fragment_views; i++) {
1339 any_change |= ctx->fragment_views[i] != NULL;
1340 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1341 }
1342
1343 /* bind the new sampler views */
1344 if (any_change) {
1345 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1346 MAX2(ctx->nr_fragment_views, count),
1347 ctx->fragment_views);
1348 }
1349
1350 ctx->nr_fragment_views = count;
1351 }
1352 else
1353 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1354 }
1355
1356
1357 static void
1358 cso_save_fragment_sampler_views(struct cso_context *ctx)
1359 {
1360 unsigned i;
1361
1362 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1363
1364 for (i = 0; i < ctx->nr_fragment_views; i++) {
1365 assert(!ctx->fragment_views_saved[i]);
1366 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1367 ctx->fragment_views[i]);
1368 }
1369 }
1370
1371
1372 static void
1373 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1374 {
1375 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1376 unsigned num;
1377
1378 for (i = 0; i < nr_saved; i++) {
1379 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1380 /* move the reference from one pointer to another */
1381 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1382 ctx->fragment_views_saved[i] = NULL;
1383 }
1384 for (; i < ctx->nr_fragment_views; i++) {
1385 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1386 }
1387
1388 num = MAX2(ctx->nr_fragment_views, nr_saved);
1389
1390 /* bind the old/saved sampler views */
1391 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1392 ctx->fragment_views);
1393
1394 ctx->nr_fragment_views = nr_saved;
1395 ctx->nr_fragment_views_saved = 0;
1396 }
1397
1398
1399 void
1400 cso_set_shader_images(struct cso_context *ctx,
1401 enum pipe_shader_type shader_stage,
1402 unsigned start, unsigned count,
1403 struct pipe_image_view *images)
1404 {
1405 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1406 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1407 }
1408
1409 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1410 }
1411
1412
1413 static void
1414 cso_save_fragment_image0(struct cso_context *ctx)
1415 {
1416 util_copy_image_view(&ctx->fragment_image0_saved,
1417 &ctx->fragment_image0_current);
1418 }
1419
1420
1421 static void
1422 cso_restore_fragment_image0(struct cso_context *ctx)
1423 {
1424 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1425 &ctx->fragment_image0_saved);
1426 }
1427
1428
1429 void
1430 cso_set_stream_outputs(struct cso_context *ctx,
1431 unsigned num_targets,
1432 struct pipe_stream_output_target **targets,
1433 const unsigned *offsets)
1434 {
1435 struct pipe_context *pipe = ctx->pipe;
1436 uint i;
1437
1438 if (!ctx->has_streamout) {
1439 assert(num_targets == 0);
1440 return;
1441 }
1442
1443 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1444 /* Nothing to do. */
1445 return;
1446 }
1447
1448 /* reference new targets */
1449 for (i = 0; i < num_targets; i++) {
1450 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1451 }
1452 /* unref extra old targets, if any */
1453 for (; i < ctx->nr_so_targets; i++) {
1454 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1455 }
1456
1457 pipe->set_stream_output_targets(pipe, num_targets, targets,
1458 offsets);
1459 ctx->nr_so_targets = num_targets;
1460 }
1461
1462 static void
1463 cso_save_stream_outputs(struct cso_context *ctx)
1464 {
1465 uint i;
1466
1467 if (!ctx->has_streamout) {
1468 return;
1469 }
1470
1471 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1472
1473 for (i = 0; i < ctx->nr_so_targets; i++) {
1474 assert(!ctx->so_targets_saved[i]);
1475 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1476 }
1477 }
1478
1479 static void
1480 cso_restore_stream_outputs(struct cso_context *ctx)
1481 {
1482 struct pipe_context *pipe = ctx->pipe;
1483 uint i;
1484 unsigned offset[PIPE_MAX_SO_BUFFERS];
1485
1486 if (!ctx->has_streamout) {
1487 return;
1488 }
1489
1490 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1491 /* Nothing to do. */
1492 return;
1493 }
1494
1495 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1496 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1497 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1498 /* move the reference from one pointer to another */
1499 ctx->so_targets[i] = ctx->so_targets_saved[i];
1500 ctx->so_targets_saved[i] = NULL;
1501 /* -1 means append */
1502 offset[i] = (unsigned)-1;
1503 }
1504 for (; i < ctx->nr_so_targets; i++) {
1505 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1506 }
1507
1508 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1509 ctx->so_targets, offset);
1510
1511 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1512 ctx->nr_so_targets_saved = 0;
1513 }
1514
1515 /* constant buffers */
1516
1517 void
1518 cso_set_constant_buffer(struct cso_context *cso,
1519 enum pipe_shader_type shader_stage,
1520 unsigned index, struct pipe_constant_buffer *cb)
1521 {
1522 struct pipe_context *pipe = cso->pipe;
1523
1524 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1525
1526 if (index == 0) {
1527 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1528 }
1529 }
1530
1531 void
1532 cso_set_constant_buffer_resource(struct cso_context *cso,
1533 enum pipe_shader_type shader_stage,
1534 unsigned index,
1535 struct pipe_resource *buffer)
1536 {
1537 if (buffer) {
1538 struct pipe_constant_buffer cb;
1539 cb.buffer = buffer;
1540 cb.buffer_offset = 0;
1541 cb.buffer_size = buffer->width0;
1542 cb.user_buffer = NULL;
1543 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1544 } else {
1545 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1546 }
1547 }
1548
1549 void
1550 cso_save_constant_buffer_slot0(struct cso_context *cso,
1551 enum pipe_shader_type shader_stage)
1552 {
1553 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1554 &cso->aux_constbuf_current[shader_stage]);
1555 }
1556
1557 void
1558 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1559 enum pipe_shader_type shader_stage)
1560 {
1561 cso_set_constant_buffer(cso, shader_stage, 0,
1562 &cso->aux_constbuf_saved[shader_stage]);
1563 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1564 NULL);
1565 }
1566
1567
1568 /**
1569 * Save all the CSO state items specified by the state_mask bitmask
1570 * of CSO_BIT_x flags.
1571 */
1572 void
1573 cso_save_state(struct cso_context *cso, unsigned state_mask)
1574 {
1575 assert(cso->saved_state == 0);
1576
1577 cso->saved_state = state_mask;
1578
1579 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1580 cso_save_aux_vertex_buffer_slot(cso);
1581 if (state_mask & CSO_BIT_BLEND)
1582 cso_save_blend(cso);
1583 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1584 cso_save_depth_stencil_alpha(cso);
1585 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1586 cso_save_fragment_samplers(cso);
1587 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1588 cso_save_fragment_sampler_views(cso);
1589 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1590 cso_save_fragment_shader(cso);
1591 if (state_mask & CSO_BIT_FRAMEBUFFER)
1592 cso_save_framebuffer(cso);
1593 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1594 cso_save_geometry_shader(cso);
1595 if (state_mask & CSO_BIT_MIN_SAMPLES)
1596 cso_save_min_samples(cso);
1597 if (state_mask & CSO_BIT_RASTERIZER)
1598 cso_save_rasterizer(cso);
1599 if (state_mask & CSO_BIT_RENDER_CONDITION)
1600 cso_save_render_condition(cso);
1601 if (state_mask & CSO_BIT_SAMPLE_MASK)
1602 cso_save_sample_mask(cso);
1603 if (state_mask & CSO_BIT_STENCIL_REF)
1604 cso_save_stencil_ref(cso);
1605 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1606 cso_save_stream_outputs(cso);
1607 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1608 cso_save_tessctrl_shader(cso);
1609 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1610 cso_save_tesseval_shader(cso);
1611 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1612 cso_save_vertex_elements(cso);
1613 if (state_mask & CSO_BIT_VERTEX_SHADER)
1614 cso_save_vertex_shader(cso);
1615 if (state_mask & CSO_BIT_VIEWPORT)
1616 cso_save_viewport(cso);
1617 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1618 cso->pipe->set_active_query_state(cso->pipe, false);
1619 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1620 cso_save_fragment_image0(cso);
1621 }
1622
1623
1624 /**
1625 * Restore the state which was saved by cso_save_state().
1626 */
1627 void
1628 cso_restore_state(struct cso_context *cso)
1629 {
1630 unsigned state_mask = cso->saved_state;
1631
1632 assert(state_mask);
1633
1634 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1635 cso_restore_aux_vertex_buffer_slot(cso);
1636 if (state_mask & CSO_BIT_BLEND)
1637 cso_restore_blend(cso);
1638 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1639 cso_restore_depth_stencil_alpha(cso);
1640 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1641 cso_restore_fragment_samplers(cso);
1642 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1643 cso_restore_fragment_sampler_views(cso);
1644 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1645 cso_restore_fragment_shader(cso);
1646 if (state_mask & CSO_BIT_FRAMEBUFFER)
1647 cso_restore_framebuffer(cso);
1648 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1649 cso_restore_geometry_shader(cso);
1650 if (state_mask & CSO_BIT_MIN_SAMPLES)
1651 cso_restore_min_samples(cso);
1652 if (state_mask & CSO_BIT_RASTERIZER)
1653 cso_restore_rasterizer(cso);
1654 if (state_mask & CSO_BIT_RENDER_CONDITION)
1655 cso_restore_render_condition(cso);
1656 if (state_mask & CSO_BIT_SAMPLE_MASK)
1657 cso_restore_sample_mask(cso);
1658 if (state_mask & CSO_BIT_STENCIL_REF)
1659 cso_restore_stencil_ref(cso);
1660 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1661 cso_restore_stream_outputs(cso);
1662 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1663 cso_restore_tessctrl_shader(cso);
1664 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1665 cso_restore_tesseval_shader(cso);
1666 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1667 cso_restore_vertex_elements(cso);
1668 if (state_mask & CSO_BIT_VERTEX_SHADER)
1669 cso_restore_vertex_shader(cso);
1670 if (state_mask & CSO_BIT_VIEWPORT)
1671 cso_restore_viewport(cso);
1672 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1673 cso->pipe->set_active_query_state(cso->pipe, true);
1674 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1675 cso_restore_fragment_image0(cso);
1676
1677 cso->saved_state = 0;
1678 }
1679
1680
1681
1682 /* drawing */
1683
1684 void
1685 cso_draw_vbo(struct cso_context *cso,
1686 const struct pipe_draw_info *info)
1687 {
1688 struct u_vbuf *vbuf = cso->vbuf;
1689
1690 if (vbuf) {
1691 u_vbuf_draw_vbo(vbuf, info);
1692 } else {
1693 struct pipe_context *pipe = cso->pipe;
1694 pipe->draw_vbo(pipe, info);
1695 }
1696 }
1697
1698 void
1699 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1700 {
1701 struct pipe_draw_info info;
1702
1703 util_draw_init_info(&info);
1704
1705 info.mode = mode;
1706 info.start = start;
1707 info.count = count;
1708 info.min_index = start;
1709 info.max_index = start + count - 1;
1710
1711 cso_draw_vbo(cso, &info);
1712 }
1713
1714 void
1715 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1716 uint start, uint count,
1717 uint start_instance, uint instance_count)
1718 {
1719 struct pipe_draw_info info;
1720
1721 util_draw_init_info(&info);
1722
1723 info.mode = mode;
1724 info.start = start;
1725 info.count = count;
1726 info.min_index = start;
1727 info.max_index = start + count - 1;
1728 info.start_instance = start_instance;
1729 info.instance_count = instance_count;
1730
1731 cso_draw_vbo(cso, &info);
1732 }