cso: add new cso_save/restore_state() functions
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_compute_shader;
73 boolean has_streamout;
74
75 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
76
77 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
78 unsigned nr_fragment_views;
79
80 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
81 unsigned nr_fragment_views_saved;
82
83 void *fragment_samplers_saved[PIPE_MAX_SAMPLERS];
84 unsigned nr_fragment_samplers_saved;
85
86 struct sampler_info samplers[PIPE_SHADER_TYPES];
87
88 struct pipe_vertex_buffer aux_vertex_buffer_current;
89 struct pipe_vertex_buffer aux_vertex_buffer_saved;
90 unsigned aux_vertex_buffer_index;
91
92 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
93 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved;
108 void *vertex_shader, *vertex_shader_saved;
109 void *geometry_shader, *geometry_shader_saved;
110 void *tessctrl_shader, *tessctrl_shader_saved;
111 void *tesseval_shader, *tesseval_shader_saved;
112 void *compute_shader;
113 void *velements, *velements_saved;
114 struct pipe_query *render_condition, *render_condition_saved;
115 uint render_condition_mode, render_condition_mode_saved;
116 boolean render_condition_cond, render_condition_cond_saved;
117
118 struct pipe_framebuffer_state fb, fb_saved;
119 struct pipe_viewport_state vp, vp_saved;
120 struct pipe_blend_color blend_color;
121 unsigned sample_mask, sample_mask_saved;
122 unsigned min_samples, min_samples_saved;
123 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
124 };
125
126
127 static boolean delete_blend_state(struct cso_context *ctx, void *state)
128 {
129 struct cso_blend *cso = (struct cso_blend *)state;
130
131 if (ctx->blend == cso->data)
132 return FALSE;
133
134 if (cso->delete_state)
135 cso->delete_state(cso->context, cso->data);
136 FREE(state);
137 return TRUE;
138 }
139
140 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
141 {
142 struct cso_depth_stencil_alpha *cso =
143 (struct cso_depth_stencil_alpha *)state;
144
145 if (ctx->depth_stencil == cso->data)
146 return FALSE;
147
148 if (cso->delete_state)
149 cso->delete_state(cso->context, cso->data);
150 FREE(state);
151
152 return TRUE;
153 }
154
155 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
156 {
157 struct cso_sampler *cso = (struct cso_sampler *)state;
158 if (cso->delete_state)
159 cso->delete_state(cso->context, cso->data);
160 FREE(state);
161 return TRUE;
162 }
163
164 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
165 {
166 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
167
168 if (ctx->rasterizer == cso->data)
169 return FALSE;
170 if (cso->delete_state)
171 cso->delete_state(cso->context, cso->data);
172 FREE(state);
173 return TRUE;
174 }
175
176 static boolean delete_vertex_elements(struct cso_context *ctx,
177 void *state)
178 {
179 struct cso_velements *cso = (struct cso_velements *)state;
180
181 if (ctx->velements == cso->data)
182 return FALSE;
183
184 if (cso->delete_state)
185 cso->delete_state(cso->context, cso->data);
186 FREE(state);
187 return TRUE;
188 }
189
190
191 static inline boolean delete_cso(struct cso_context *ctx,
192 void *state, enum cso_cache_type type)
193 {
194 switch (type) {
195 case CSO_BLEND:
196 return delete_blend_state(ctx, state);
197 case CSO_SAMPLER:
198 return delete_sampler_state(ctx, state);
199 case CSO_DEPTH_STENCIL_ALPHA:
200 return delete_depth_stencil_state(ctx, state);
201 case CSO_RASTERIZER:
202 return delete_rasterizer_state(ctx, state);
203 case CSO_VELEMENTS:
204 return delete_vertex_elements(ctx, state);
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static inline void
213 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
214 int max_size, void *user_data)
215 {
216 struct cso_context *ctx = (struct cso_context *)user_data;
217 /* if we're approach the maximum size, remove fourth of the entries
218 * otherwise every subsequent call will go through the same */
219 int hash_size = cso_hash_size(hash);
220 int max_entries = (max_size > hash_size) ? max_size : hash_size;
221 int to_remove = (max_size < max_entries) * max_entries/4;
222 struct cso_hash_iter iter = cso_hash_first_node(hash);
223 if (hash_size > max_size)
224 to_remove += hash_size - max_size;
225 while (to_remove) {
226 /*remove elements until we're good */
227 /*fixme: currently we pick the nodes to remove at random*/
228 void *cso = cso_hash_iter_data(iter);
229 if (delete_cso(ctx, cso, type)) {
230 iter = cso_hash_erase(hash, iter);
231 --to_remove;
232 } else
233 iter = cso_hash_iter_next(iter);
234 }
235 }
236
237 static void cso_init_vbuf(struct cso_context *cso)
238 {
239 struct u_vbuf_caps caps;
240
241 /* Install u_vbuf if there is anything unsupported. */
242 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
243 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
244 cso->aux_vertex_buffer_index);
245 }
246 }
247
248 struct cso_context *cso_create_context( struct pipe_context *pipe )
249 {
250 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
251 if (!ctx)
252 goto out;
253
254 ctx->cache = cso_cache_create();
255 if (ctx->cache == NULL)
256 goto out;
257 cso_cache_set_sanitize_callback(ctx->cache,
258 sanitize_hash,
259 ctx);
260
261 ctx->pipe = pipe;
262 ctx->sample_mask = ~0;
263
264 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
265
266 cso_init_vbuf(ctx);
267
268 /* Enable for testing: */
269 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
270
271 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
272 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
273 ctx->has_geometry_shader = TRUE;
274 }
275 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
276 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
277 ctx->has_tessellation = TRUE;
278 }
279 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
280 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
281 ctx->has_compute_shader = TRUE;
282 }
283 if (pipe->screen->get_param(pipe->screen,
284 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
285 ctx->has_streamout = TRUE;
286 }
287
288 return ctx;
289
290 out:
291 cso_destroy_context( ctx );
292 return NULL;
293 }
294
295 /**
296 * Free the CSO context.
297 */
298 void cso_destroy_context( struct cso_context *ctx )
299 {
300 unsigned i;
301
302 if (ctx->pipe) {
303 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
304
305 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
306 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
307
308 {
309 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
310 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
311 struct pipe_screen *scr = ctx->pipe->screen;
312 unsigned sh;
313 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
314 int maxsam = scr->get_shader_param(scr, sh,
315 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
316 int maxview = scr->get_shader_param(scr, sh,
317 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
318 assert(maxsam <= PIPE_MAX_SAMPLERS);
319 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
320 if (maxsam > 0) {
321 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
322 }
323 if (maxview > 0) {
324 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
325 }
326 }
327 }
328
329 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
330 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
331 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
332 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
333 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
334 if (ctx->has_geometry_shader) {
335 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
336 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
337 }
338 if (ctx->has_tessellation) {
339 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
340 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
341 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
342 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
343 }
344 if (ctx->has_compute_shader) {
345 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
346 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
347 }
348 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
349
350 if (ctx->has_streamout)
351 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
352 }
353
354 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
355 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
356 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
357 }
358
359 util_unreference_framebuffer_state(&ctx->fb);
360 util_unreference_framebuffer_state(&ctx->fb_saved);
361
362 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
363 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
364
365 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
366 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
367 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
368 }
369
370 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
371 pipe_so_target_reference(&ctx->so_targets[i], NULL);
372 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
373 }
374
375 if (ctx->cache) {
376 cso_cache_delete( ctx->cache );
377 ctx->cache = NULL;
378 }
379
380 if (ctx->vbuf)
381 u_vbuf_destroy(ctx->vbuf);
382 FREE( ctx );
383 }
384
385
386 /* Those function will either find the state of the given template
387 * in the cache or they will create a new state from the given
388 * template, insert it in the cache and return it.
389 */
390
391 /*
392 * If the driver returns 0 from the create method then they will assign
393 * the data member of the cso to be the template itself.
394 */
395
396 enum pipe_error cso_set_blend(struct cso_context *ctx,
397 const struct pipe_blend_state *templ)
398 {
399 unsigned key_size, hash_key;
400 struct cso_hash_iter iter;
401 void *handle;
402
403 key_size = templ->independent_blend_enable ?
404 sizeof(struct pipe_blend_state) :
405 (char *)&(templ->rt[1]) - (char *)templ;
406 hash_key = cso_construct_key((void*)templ, key_size);
407 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
408 (void*)templ, key_size);
409
410 if (cso_hash_iter_is_null(iter)) {
411 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
412 if (!cso)
413 return PIPE_ERROR_OUT_OF_MEMORY;
414
415 memset(&cso->state, 0, sizeof cso->state);
416 memcpy(&cso->state, templ, key_size);
417 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
418 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
419 cso->context = ctx->pipe;
420
421 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
422 if (cso_hash_iter_is_null(iter)) {
423 FREE(cso);
424 return PIPE_ERROR_OUT_OF_MEMORY;
425 }
426
427 handle = cso->data;
428 }
429 else {
430 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
431 }
432
433 if (ctx->blend != handle) {
434 ctx->blend = handle;
435 ctx->pipe->bind_blend_state(ctx->pipe, handle);
436 }
437 return PIPE_OK;
438 }
439
440 void cso_save_blend(struct cso_context *ctx)
441 {
442 assert(!ctx->blend_saved);
443 ctx->blend_saved = ctx->blend;
444 }
445
446 void cso_restore_blend(struct cso_context *ctx)
447 {
448 if (ctx->blend != ctx->blend_saved) {
449 ctx->blend = ctx->blend_saved;
450 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
451 }
452 ctx->blend_saved = NULL;
453 }
454
455
456
457 enum pipe_error
458 cso_set_depth_stencil_alpha(struct cso_context *ctx,
459 const struct pipe_depth_stencil_alpha_state *templ)
460 {
461 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
462 unsigned hash_key = cso_construct_key((void*)templ, key_size);
463 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
464 hash_key,
465 CSO_DEPTH_STENCIL_ALPHA,
466 (void*)templ, key_size);
467 void *handle;
468
469 if (cso_hash_iter_is_null(iter)) {
470 struct cso_depth_stencil_alpha *cso =
471 MALLOC(sizeof(struct cso_depth_stencil_alpha));
472 if (!cso)
473 return PIPE_ERROR_OUT_OF_MEMORY;
474
475 memcpy(&cso->state, templ, sizeof(*templ));
476 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
477 &cso->state);
478 cso->delete_state =
479 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
480 cso->context = ctx->pipe;
481
482 iter = cso_insert_state(ctx->cache, hash_key,
483 CSO_DEPTH_STENCIL_ALPHA, cso);
484 if (cso_hash_iter_is_null(iter)) {
485 FREE(cso);
486 return PIPE_ERROR_OUT_OF_MEMORY;
487 }
488
489 handle = cso->data;
490 }
491 else {
492 handle = ((struct cso_depth_stencil_alpha *)
493 cso_hash_iter_data(iter))->data;
494 }
495
496 if (ctx->depth_stencil != handle) {
497 ctx->depth_stencil = handle;
498 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
499 }
500 return PIPE_OK;
501 }
502
503 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
504 {
505 assert(!ctx->depth_stencil_saved);
506 ctx->depth_stencil_saved = ctx->depth_stencil;
507 }
508
509 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
510 {
511 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
512 ctx->depth_stencil = ctx->depth_stencil_saved;
513 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
514 ctx->depth_stencil_saved);
515 }
516 ctx->depth_stencil_saved = NULL;
517 }
518
519
520
521 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
522 const struct pipe_rasterizer_state *templ)
523 {
524 unsigned key_size = sizeof(struct pipe_rasterizer_state);
525 unsigned hash_key = cso_construct_key((void*)templ, key_size);
526 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
527 hash_key,
528 CSO_RASTERIZER,
529 (void*)templ, key_size);
530 void *handle = NULL;
531
532 if (cso_hash_iter_is_null(iter)) {
533 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
534 if (!cso)
535 return PIPE_ERROR_OUT_OF_MEMORY;
536
537 memcpy(&cso->state, templ, sizeof(*templ));
538 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
539 cso->delete_state =
540 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
541 cso->context = ctx->pipe;
542
543 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
544 if (cso_hash_iter_is_null(iter)) {
545 FREE(cso);
546 return PIPE_ERROR_OUT_OF_MEMORY;
547 }
548
549 handle = cso->data;
550 }
551 else {
552 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
553 }
554
555 if (ctx->rasterizer != handle) {
556 ctx->rasterizer = handle;
557 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
558 }
559 return PIPE_OK;
560 }
561
562 void cso_save_rasterizer(struct cso_context *ctx)
563 {
564 assert(!ctx->rasterizer_saved);
565 ctx->rasterizer_saved = ctx->rasterizer;
566 }
567
568 void cso_restore_rasterizer(struct cso_context *ctx)
569 {
570 if (ctx->rasterizer != ctx->rasterizer_saved) {
571 ctx->rasterizer = ctx->rasterizer_saved;
572 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
573 }
574 ctx->rasterizer_saved = NULL;
575 }
576
577
578 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
579 {
580 if (ctx->fragment_shader != handle) {
581 ctx->fragment_shader = handle;
582 ctx->pipe->bind_fs_state(ctx->pipe, handle);
583 }
584 }
585
586 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
587 {
588 if (handle == ctx->fragment_shader) {
589 /* unbind before deleting */
590 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
591 ctx->fragment_shader = NULL;
592 }
593 ctx->pipe->delete_fs_state(ctx->pipe, handle);
594 }
595
596 void cso_save_fragment_shader(struct cso_context *ctx)
597 {
598 assert(!ctx->fragment_shader_saved);
599 ctx->fragment_shader_saved = ctx->fragment_shader;
600 }
601
602 void cso_restore_fragment_shader(struct cso_context *ctx)
603 {
604 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
605 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
606 ctx->fragment_shader = ctx->fragment_shader_saved;
607 }
608 ctx->fragment_shader_saved = NULL;
609 }
610
611
612 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
613 {
614 if (ctx->vertex_shader != handle) {
615 ctx->vertex_shader = handle;
616 ctx->pipe->bind_vs_state(ctx->pipe, handle);
617 }
618 }
619
620 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
621 {
622 if (handle == ctx->vertex_shader) {
623 /* unbind before deleting */
624 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
625 ctx->vertex_shader = NULL;
626 }
627 ctx->pipe->delete_vs_state(ctx->pipe, handle);
628 }
629
630 void cso_save_vertex_shader(struct cso_context *ctx)
631 {
632 assert(!ctx->vertex_shader_saved);
633 ctx->vertex_shader_saved = ctx->vertex_shader;
634 }
635
636 void cso_restore_vertex_shader(struct cso_context *ctx)
637 {
638 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
639 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
640 ctx->vertex_shader = ctx->vertex_shader_saved;
641 }
642 ctx->vertex_shader_saved = NULL;
643 }
644
645
646 void cso_set_framebuffer(struct cso_context *ctx,
647 const struct pipe_framebuffer_state *fb)
648 {
649 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
650 util_copy_framebuffer_state(&ctx->fb, fb);
651 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
652 }
653 }
654
655 void cso_save_framebuffer(struct cso_context *ctx)
656 {
657 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
658 }
659
660 void cso_restore_framebuffer(struct cso_context *ctx)
661 {
662 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
663 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
664 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
665 util_unreference_framebuffer_state(&ctx->fb_saved);
666 }
667 }
668
669
670 void cso_set_viewport(struct cso_context *ctx,
671 const struct pipe_viewport_state *vp)
672 {
673 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
674 ctx->vp = *vp;
675 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
676 }
677 }
678
679 /**
680 * Setup viewport state for given width and height (position is always (0,0)).
681 * Invert the Y axis if 'invert' is true.
682 */
683 void
684 cso_set_viewport_dims(struct cso_context *ctx,
685 float width, float height, boolean invert)
686 {
687 struct pipe_viewport_state vp;
688 vp.scale[0] = width * 0.5f;
689 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
690 vp.scale[2] = 0.5f;
691 vp.translate[0] = 0.5f * width;
692 vp.translate[1] = 0.5f * height;
693 vp.translate[2] = 0.5f;
694 cso_set_viewport(ctx, &vp);
695 }
696
697 void cso_save_viewport(struct cso_context *ctx)
698 {
699 ctx->vp_saved = ctx->vp;
700 }
701
702
703 void cso_restore_viewport(struct cso_context *ctx)
704 {
705 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
706 ctx->vp = ctx->vp_saved;
707 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
708 }
709 }
710
711
712 void cso_set_blend_color(struct cso_context *ctx,
713 const struct pipe_blend_color *bc)
714 {
715 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
716 ctx->blend_color = *bc;
717 ctx->pipe->set_blend_color(ctx->pipe, bc);
718 }
719 }
720
721 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
722 {
723 if (ctx->sample_mask != sample_mask) {
724 ctx->sample_mask = sample_mask;
725 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
726 }
727 }
728
729 void cso_save_sample_mask(struct cso_context *ctx)
730 {
731 ctx->sample_mask_saved = ctx->sample_mask;
732 }
733
734 void cso_restore_sample_mask(struct cso_context *ctx)
735 {
736 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
737 }
738
739 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
740 {
741 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
742 ctx->min_samples = min_samples;
743 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
744 }
745 }
746
747 void cso_save_min_samples(struct cso_context *ctx)
748 {
749 ctx->min_samples_saved = ctx->min_samples;
750 }
751
752 void cso_restore_min_samples(struct cso_context *ctx)
753 {
754 cso_set_min_samples(ctx, ctx->min_samples_saved);
755 }
756
757 void cso_set_stencil_ref(struct cso_context *ctx,
758 const struct pipe_stencil_ref *sr)
759 {
760 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
761 ctx->stencil_ref = *sr;
762 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
763 }
764 }
765
766 void cso_save_stencil_ref(struct cso_context *ctx)
767 {
768 ctx->stencil_ref_saved = ctx->stencil_ref;
769 }
770
771
772 void cso_restore_stencil_ref(struct cso_context *ctx)
773 {
774 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
775 sizeof(ctx->stencil_ref))) {
776 ctx->stencil_ref = ctx->stencil_ref_saved;
777 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
778 }
779 }
780
781 void cso_set_render_condition(struct cso_context *ctx,
782 struct pipe_query *query,
783 boolean condition, uint mode)
784 {
785 struct pipe_context *pipe = ctx->pipe;
786
787 if (ctx->render_condition != query ||
788 ctx->render_condition_mode != mode ||
789 ctx->render_condition_cond != condition) {
790 pipe->render_condition(pipe, query, condition, mode);
791 ctx->render_condition = query;
792 ctx->render_condition_cond = condition;
793 ctx->render_condition_mode = mode;
794 }
795 }
796
797 void cso_save_render_condition(struct cso_context *ctx)
798 {
799 ctx->render_condition_saved = ctx->render_condition;
800 ctx->render_condition_cond_saved = ctx->render_condition_cond;
801 ctx->render_condition_mode_saved = ctx->render_condition_mode;
802 }
803
804 void cso_restore_render_condition(struct cso_context *ctx)
805 {
806 cso_set_render_condition(ctx, ctx->render_condition_saved,
807 ctx->render_condition_cond_saved,
808 ctx->render_condition_mode_saved);
809 }
810
811 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
812 {
813 assert(ctx->has_geometry_shader || !handle);
814
815 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
816 ctx->geometry_shader = handle;
817 ctx->pipe->bind_gs_state(ctx->pipe, handle);
818 }
819 }
820
821 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
822 {
823 if (handle == ctx->geometry_shader) {
824 /* unbind before deleting */
825 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
826 ctx->geometry_shader = NULL;
827 }
828 ctx->pipe->delete_gs_state(ctx->pipe, handle);
829 }
830
831 void cso_save_geometry_shader(struct cso_context *ctx)
832 {
833 if (!ctx->has_geometry_shader) {
834 return;
835 }
836
837 assert(!ctx->geometry_shader_saved);
838 ctx->geometry_shader_saved = ctx->geometry_shader;
839 }
840
841 void cso_restore_geometry_shader(struct cso_context *ctx)
842 {
843 if (!ctx->has_geometry_shader) {
844 return;
845 }
846
847 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
848 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
849 ctx->geometry_shader = ctx->geometry_shader_saved;
850 }
851 ctx->geometry_shader_saved = NULL;
852 }
853
854 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
855 {
856 assert(ctx->has_tessellation || !handle);
857
858 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
859 ctx->tessctrl_shader = handle;
860 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
861 }
862 }
863
864 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
865 {
866 if (handle == ctx->tessctrl_shader) {
867 /* unbind before deleting */
868 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
869 ctx->tessctrl_shader = NULL;
870 }
871 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
872 }
873
874 void cso_save_tessctrl_shader(struct cso_context *ctx)
875 {
876 if (!ctx->has_tessellation) {
877 return;
878 }
879
880 assert(!ctx->tessctrl_shader_saved);
881 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
882 }
883
884 void cso_restore_tessctrl_shader(struct cso_context *ctx)
885 {
886 if (!ctx->has_tessellation) {
887 return;
888 }
889
890 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
891 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
892 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
893 }
894 ctx->tessctrl_shader_saved = NULL;
895 }
896
897 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
898 {
899 assert(ctx->has_tessellation || !handle);
900
901 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
902 ctx->tesseval_shader = handle;
903 ctx->pipe->bind_tes_state(ctx->pipe, handle);
904 }
905 }
906
907 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
908 {
909 if (handle == ctx->tesseval_shader) {
910 /* unbind before deleting */
911 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
912 ctx->tesseval_shader = NULL;
913 }
914 ctx->pipe->delete_tes_state(ctx->pipe, handle);
915 }
916
917 void cso_save_tesseval_shader(struct cso_context *ctx)
918 {
919 if (!ctx->has_tessellation) {
920 return;
921 }
922
923 assert(!ctx->tesseval_shader_saved);
924 ctx->tesseval_shader_saved = ctx->tesseval_shader;
925 }
926
927 void cso_restore_tesseval_shader(struct cso_context *ctx)
928 {
929 if (!ctx->has_tessellation) {
930 return;
931 }
932
933 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
934 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
935 ctx->tesseval_shader = ctx->tesseval_shader_saved;
936 }
937 ctx->tesseval_shader_saved = NULL;
938 }
939
940 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
941 {
942 assert(ctx->has_compute_shader || !handle);
943
944 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
945 ctx->compute_shader = handle;
946 ctx->pipe->bind_compute_state(ctx->pipe, handle);
947 }
948 }
949
950 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
951 {
952 if (handle == ctx->compute_shader) {
953 /* unbind before deleting */
954 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
955 ctx->compute_shader = NULL;
956 }
957 ctx->pipe->delete_compute_state(ctx->pipe, handle);
958 }
959
960 enum pipe_error
961 cso_set_vertex_elements(struct cso_context *ctx,
962 unsigned count,
963 const struct pipe_vertex_element *states)
964 {
965 struct u_vbuf *vbuf = ctx->vbuf;
966 unsigned key_size, hash_key;
967 struct cso_hash_iter iter;
968 void *handle;
969 struct cso_velems_state velems_state;
970
971 if (vbuf) {
972 u_vbuf_set_vertex_elements(vbuf, count, states);
973 return PIPE_OK;
974 }
975
976 /* Need to include the count into the stored state data too.
977 * Otherwise first few count pipe_vertex_elements could be identical
978 * even if count is different, and there's no guarantee the hash would
979 * be different in that case neither.
980 */
981 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
982 velems_state.count = count;
983 memcpy(velems_state.velems, states,
984 sizeof(struct pipe_vertex_element) * count);
985 hash_key = cso_construct_key((void*)&velems_state, key_size);
986 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
987 (void*)&velems_state, key_size);
988
989 if (cso_hash_iter_is_null(iter)) {
990 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
991 if (!cso)
992 return PIPE_ERROR_OUT_OF_MEMORY;
993
994 memcpy(&cso->state, &velems_state, key_size);
995 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
996 &cso->state.velems[0]);
997 cso->delete_state =
998 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
999 cso->context = ctx->pipe;
1000
1001 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1002 if (cso_hash_iter_is_null(iter)) {
1003 FREE(cso);
1004 return PIPE_ERROR_OUT_OF_MEMORY;
1005 }
1006
1007 handle = cso->data;
1008 }
1009 else {
1010 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1011 }
1012
1013 if (ctx->velements != handle) {
1014 ctx->velements = handle;
1015 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1016 }
1017 return PIPE_OK;
1018 }
1019
1020 void cso_save_vertex_elements(struct cso_context *ctx)
1021 {
1022 struct u_vbuf *vbuf = ctx->vbuf;
1023
1024 if (vbuf) {
1025 u_vbuf_save_vertex_elements(vbuf);
1026 return;
1027 }
1028
1029 assert(!ctx->velements_saved);
1030 ctx->velements_saved = ctx->velements;
1031 }
1032
1033 void cso_restore_vertex_elements(struct cso_context *ctx)
1034 {
1035 struct u_vbuf *vbuf = ctx->vbuf;
1036
1037 if (vbuf) {
1038 u_vbuf_restore_vertex_elements(vbuf);
1039 return;
1040 }
1041
1042 if (ctx->velements != ctx->velements_saved) {
1043 ctx->velements = ctx->velements_saved;
1044 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1045 }
1046 ctx->velements_saved = NULL;
1047 }
1048
1049 /* vertex buffers */
1050
1051 void cso_set_vertex_buffers(struct cso_context *ctx,
1052 unsigned start_slot, unsigned count,
1053 const struct pipe_vertex_buffer *buffers)
1054 {
1055 struct u_vbuf *vbuf = ctx->vbuf;
1056
1057 if (vbuf) {
1058 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1059 return;
1060 }
1061
1062 /* Save what's in the auxiliary slot, so that we can save and restore it
1063 * for meta ops. */
1064 if (start_slot <= ctx->aux_vertex_buffer_index &&
1065 start_slot+count > ctx->aux_vertex_buffer_index) {
1066 if (buffers) {
1067 const struct pipe_vertex_buffer *vb =
1068 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1069
1070 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1071 vb->buffer);
1072 memcpy(&ctx->aux_vertex_buffer_current, vb,
1073 sizeof(struct pipe_vertex_buffer));
1074 }
1075 else {
1076 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1077 NULL);
1078 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1079 }
1080 }
1081
1082 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1083 }
1084
1085 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1086 {
1087 struct u_vbuf *vbuf = ctx->vbuf;
1088
1089 if (vbuf) {
1090 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1091 return;
1092 }
1093
1094 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1095 ctx->aux_vertex_buffer_current.buffer);
1096 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1097 sizeof(struct pipe_vertex_buffer));
1098 }
1099
1100 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1101 {
1102 struct u_vbuf *vbuf = ctx->vbuf;
1103
1104 if (vbuf) {
1105 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1106 return;
1107 }
1108
1109 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1110 &ctx->aux_vertex_buffer_saved);
1111 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1112 }
1113
1114 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1115 {
1116 return ctx->aux_vertex_buffer_index;
1117 }
1118
1119
1120 /**************** fragment/vertex sampler view state *************************/
1121
1122 enum pipe_error
1123 cso_single_sampler(struct cso_context *ctx, unsigned shader_stage,
1124 unsigned idx, const struct pipe_sampler_state *templ)
1125 {
1126 void *handle = NULL;
1127
1128 if (templ) {
1129 unsigned key_size = sizeof(struct pipe_sampler_state);
1130 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1131 struct cso_hash_iter iter =
1132 cso_find_state_template(ctx->cache,
1133 hash_key, CSO_SAMPLER,
1134 (void *) templ, key_size);
1135
1136 if (cso_hash_iter_is_null(iter)) {
1137 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1138 if (!cso)
1139 return PIPE_ERROR_OUT_OF_MEMORY;
1140
1141 memcpy(&cso->state, templ, sizeof(*templ));
1142 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1143 cso->delete_state =
1144 (cso_state_callback) ctx->pipe->delete_sampler_state;
1145 cso->context = ctx->pipe;
1146
1147 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1148 if (cso_hash_iter_is_null(iter)) {
1149 FREE(cso);
1150 return PIPE_ERROR_OUT_OF_MEMORY;
1151 }
1152
1153 handle = cso->data;
1154 }
1155 else {
1156 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1157 }
1158 }
1159
1160 ctx->samplers[shader_stage].samplers[idx] = handle;
1161 return PIPE_OK;
1162 }
1163
1164
1165 void
1166 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1167 {
1168 struct sampler_info *info = &ctx->samplers[shader_stage];
1169 unsigned i;
1170
1171 /* find highest non-null sampler */
1172 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1173 if (info->samplers[i - 1] != NULL)
1174 break;
1175 }
1176
1177 info->nr_samplers = i;
1178 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0, i,
1179 info->samplers);
1180 }
1181
1182
1183 /*
1184 * If the function encouters any errors it will return the
1185 * last one. Done to always try to set as many samplers
1186 * as possible.
1187 */
1188 enum pipe_error
1189 cso_set_samplers(struct cso_context *ctx,
1190 unsigned shader_stage,
1191 unsigned nr,
1192 const struct pipe_sampler_state **templates)
1193 {
1194 struct sampler_info *info = &ctx->samplers[shader_stage];
1195 unsigned i;
1196 enum pipe_error temp, error = PIPE_OK;
1197
1198 /* TODO: fastpath
1199 */
1200
1201 for (i = 0; i < nr; i++) {
1202 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1203 if (temp != PIPE_OK)
1204 error = temp;
1205 }
1206
1207 for ( ; i < info->nr_samplers; i++) {
1208 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1209 if (temp != PIPE_OK)
1210 error = temp;
1211 }
1212
1213 cso_single_sampler_done(ctx, shader_stage);
1214
1215 return error;
1216 }
1217
1218 void
1219 cso_save_fragment_samplers(struct cso_context *ctx)
1220 {
1221 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1222
1223 ctx->nr_fragment_samplers_saved = info->nr_samplers;
1224 memcpy(ctx->fragment_samplers_saved, info->samplers,
1225 sizeof(info->samplers));
1226 }
1227
1228
1229 void
1230 cso_restore_fragment_samplers(struct cso_context *ctx)
1231 {
1232 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1233
1234 info->nr_samplers = ctx->nr_fragment_samplers_saved;
1235 memcpy(info->samplers, ctx->fragment_samplers_saved,
1236 sizeof(info->samplers));
1237 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1238 }
1239
1240
1241 void
1242 cso_set_sampler_views(struct cso_context *ctx,
1243 unsigned shader_stage,
1244 unsigned count,
1245 struct pipe_sampler_view **views)
1246 {
1247 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1248 unsigned i;
1249 boolean any_change = FALSE;
1250
1251 /* reference new views */
1252 for (i = 0; i < count; i++) {
1253 any_change |= ctx->fragment_views[i] != views[i];
1254 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1255 }
1256 /* unref extra old views, if any */
1257 for (; i < ctx->nr_fragment_views; i++) {
1258 any_change |= ctx->fragment_views[i] != NULL;
1259 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1260 }
1261
1262 /* bind the new sampler views */
1263 if (any_change) {
1264 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1265 MAX2(ctx->nr_fragment_views, count),
1266 ctx->fragment_views);
1267 }
1268
1269 ctx->nr_fragment_views = count;
1270 }
1271 else
1272 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1273 }
1274
1275
1276 void
1277 cso_save_fragment_sampler_views(struct cso_context *ctx)
1278 {
1279 unsigned i;
1280
1281 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1282
1283 for (i = 0; i < ctx->nr_fragment_views; i++) {
1284 assert(!ctx->fragment_views_saved[i]);
1285 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1286 ctx->fragment_views[i]);
1287 }
1288 }
1289
1290
1291 void
1292 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1293 {
1294 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1295 unsigned num;
1296
1297 for (i = 0; i < nr_saved; i++) {
1298 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1299 /* move the reference from one pointer to another */
1300 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1301 ctx->fragment_views_saved[i] = NULL;
1302 }
1303 for (; i < ctx->nr_fragment_views; i++) {
1304 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1305 }
1306
1307 num = MAX2(ctx->nr_fragment_views, nr_saved);
1308
1309 /* bind the old/saved sampler views */
1310 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1311 ctx->fragment_views);
1312
1313 ctx->nr_fragment_views = nr_saved;
1314 ctx->nr_fragment_views_saved = 0;
1315 }
1316
1317
1318 void
1319 cso_set_stream_outputs(struct cso_context *ctx,
1320 unsigned num_targets,
1321 struct pipe_stream_output_target **targets,
1322 const unsigned *offsets)
1323 {
1324 struct pipe_context *pipe = ctx->pipe;
1325 uint i;
1326
1327 if (!ctx->has_streamout) {
1328 assert(num_targets == 0);
1329 return;
1330 }
1331
1332 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1333 /* Nothing to do. */
1334 return;
1335 }
1336
1337 /* reference new targets */
1338 for (i = 0; i < num_targets; i++) {
1339 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1340 }
1341 /* unref extra old targets, if any */
1342 for (; i < ctx->nr_so_targets; i++) {
1343 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1344 }
1345
1346 pipe->set_stream_output_targets(pipe, num_targets, targets,
1347 offsets);
1348 ctx->nr_so_targets = num_targets;
1349 }
1350
1351 void
1352 cso_save_stream_outputs(struct cso_context *ctx)
1353 {
1354 uint i;
1355
1356 if (!ctx->has_streamout) {
1357 return;
1358 }
1359
1360 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1361
1362 for (i = 0; i < ctx->nr_so_targets; i++) {
1363 assert(!ctx->so_targets_saved[i]);
1364 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1365 }
1366 }
1367
1368 void
1369 cso_restore_stream_outputs(struct cso_context *ctx)
1370 {
1371 struct pipe_context *pipe = ctx->pipe;
1372 uint i;
1373 unsigned offset[PIPE_MAX_SO_BUFFERS];
1374
1375 if (!ctx->has_streamout) {
1376 return;
1377 }
1378
1379 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1380 /* Nothing to do. */
1381 return;
1382 }
1383
1384 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1385 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1386 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1387 /* move the reference from one pointer to another */
1388 ctx->so_targets[i] = ctx->so_targets_saved[i];
1389 ctx->so_targets_saved[i] = NULL;
1390 /* -1 means append */
1391 offset[i] = (unsigned)-1;
1392 }
1393 for (; i < ctx->nr_so_targets; i++) {
1394 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1395 }
1396
1397 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1398 ctx->so_targets, offset);
1399
1400 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1401 ctx->nr_so_targets_saved = 0;
1402 }
1403
1404 /* constant buffers */
1405
1406 void
1407 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1408 unsigned index, struct pipe_constant_buffer *cb)
1409 {
1410 struct pipe_context *pipe = cso->pipe;
1411
1412 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1413
1414 if (index == 0) {
1415 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1416 }
1417 }
1418
1419 void
1420 cso_set_constant_buffer_resource(struct cso_context *cso,
1421 unsigned shader_stage,
1422 unsigned index,
1423 struct pipe_resource *buffer)
1424 {
1425 if (buffer) {
1426 struct pipe_constant_buffer cb;
1427 cb.buffer = buffer;
1428 cb.buffer_offset = 0;
1429 cb.buffer_size = buffer->width0;
1430 cb.user_buffer = NULL;
1431 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1432 } else {
1433 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1434 }
1435 }
1436
1437 void
1438 cso_save_constant_buffer_slot0(struct cso_context *cso,
1439 unsigned shader_stage)
1440 {
1441 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1442 &cso->aux_constbuf_current[shader_stage]);
1443 }
1444
1445 void
1446 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1447 unsigned shader_stage)
1448 {
1449 cso_set_constant_buffer(cso, shader_stage, 0,
1450 &cso->aux_constbuf_saved[shader_stage]);
1451 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1452 NULL);
1453 }
1454
1455
1456 /**
1457 * Save all the CSO state items specified by the state_mask bitmask
1458 * of CSO_BIT_x flags.
1459 */
1460 void
1461 cso_save_state(struct cso_context *cso, unsigned state_mask)
1462 {
1463 assert(cso->saved_state == 0);
1464
1465 cso->saved_state = state_mask;
1466
1467 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1468 cso_save_aux_vertex_buffer_slot(cso);
1469 if (state_mask & CSO_BIT_BLEND)
1470 cso_save_blend(cso);
1471 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1472 cso_save_depth_stencil_alpha(cso);
1473 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1474 cso_save_fragment_samplers(cso);
1475 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1476 cso_save_fragment_sampler_views(cso);
1477 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1478 cso_save_fragment_shader(cso);
1479 if (state_mask & CSO_BIT_FRAMEBUFFER)
1480 cso_save_framebuffer(cso);
1481 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1482 cso_save_geometry_shader(cso);
1483 if (state_mask & CSO_BIT_MIN_SAMPLES)
1484 cso_save_min_samples(cso);
1485 if (state_mask & CSO_BIT_RASTERIZER)
1486 cso_save_rasterizer(cso);
1487 if (state_mask & CSO_BIT_RENDER_CONDITION)
1488 cso_save_render_condition(cso);
1489 if (state_mask & CSO_BIT_SAMPLE_MASK)
1490 cso_save_sample_mask(cso);
1491 if (state_mask & CSO_BIT_STENCIL_REF)
1492 cso_save_stencil_ref(cso);
1493 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1494 cso_save_stream_outputs(cso);
1495 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1496 cso_save_tessctrl_shader(cso);
1497 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1498 cso_save_tesseval_shader(cso);
1499 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1500 cso_save_vertex_elements(cso);
1501 if (state_mask & CSO_BIT_VERTEX_SHADER)
1502 cso_save_vertex_shader(cso);
1503 if (state_mask & CSO_BIT_VIEWPORT)
1504 cso_save_viewport(cso);
1505 }
1506
1507
1508 /**
1509 * Restore the state which was saved by cso_save_state().
1510 */
1511 void
1512 cso_restore_state(struct cso_context *cso)
1513 {
1514 unsigned state_mask = cso->saved_state;
1515
1516 assert(state_mask);
1517
1518 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1519 cso_restore_aux_vertex_buffer_slot(cso);
1520 if (state_mask & CSO_BIT_BLEND)
1521 cso_restore_blend(cso);
1522 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1523 cso_restore_depth_stencil_alpha(cso);
1524 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1525 cso_restore_fragment_samplers(cso);
1526 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1527 cso_restore_fragment_sampler_views(cso);
1528 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1529 cso_restore_fragment_shader(cso);
1530 if (state_mask & CSO_BIT_FRAMEBUFFER)
1531 cso_restore_framebuffer(cso);
1532 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1533 cso_restore_geometry_shader(cso);
1534 if (state_mask & CSO_BIT_MIN_SAMPLES)
1535 cso_restore_min_samples(cso);
1536 if (state_mask & CSO_BIT_RASTERIZER)
1537 cso_restore_rasterizer(cso);
1538 if (state_mask & CSO_BIT_RENDER_CONDITION)
1539 cso_restore_render_condition(cso);
1540 if (state_mask & CSO_BIT_SAMPLE_MASK)
1541 cso_restore_sample_mask(cso);
1542 if (state_mask & CSO_BIT_STENCIL_REF)
1543 cso_restore_stencil_ref(cso);
1544 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1545 cso_restore_stream_outputs(cso);
1546 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1547 cso_restore_tessctrl_shader(cso);
1548 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1549 cso_restore_tesseval_shader(cso);
1550 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1551 cso_restore_vertex_elements(cso);
1552 if (state_mask & CSO_BIT_VERTEX_SHADER)
1553 cso_restore_vertex_shader(cso);
1554 if (state_mask & CSO_BIT_VIEWPORT)
1555 cso_restore_viewport(cso);
1556
1557 cso->saved_state = 0;
1558 }
1559
1560
1561
1562 /* drawing */
1563
1564 void
1565 cso_set_index_buffer(struct cso_context *cso,
1566 const struct pipe_index_buffer *ib)
1567 {
1568 struct u_vbuf *vbuf = cso->vbuf;
1569
1570 if (vbuf) {
1571 u_vbuf_set_index_buffer(vbuf, ib);
1572 } else {
1573 struct pipe_context *pipe = cso->pipe;
1574 pipe->set_index_buffer(pipe, ib);
1575 }
1576 }
1577
1578 void
1579 cso_draw_vbo(struct cso_context *cso,
1580 const struct pipe_draw_info *info)
1581 {
1582 struct u_vbuf *vbuf = cso->vbuf;
1583
1584 if (vbuf) {
1585 u_vbuf_draw_vbo(vbuf, info);
1586 } else {
1587 struct pipe_context *pipe = cso->pipe;
1588 pipe->draw_vbo(pipe, info);
1589 }
1590 }
1591
1592 void
1593 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1594 {
1595 struct pipe_draw_info info;
1596
1597 util_draw_init_info(&info);
1598
1599 info.mode = mode;
1600 info.start = start;
1601 info.count = count;
1602 info.min_index = start;
1603 info.max_index = start + count - 1;
1604
1605 cso_draw_vbo(cso, &info);
1606 }
1607
1608 void
1609 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1610 uint start, uint count,
1611 uint start_instance, uint instance_count)
1612 {
1613 struct pipe_draw_info info;
1614
1615 util_draw_init_info(&info);
1616
1617 info.mode = mode;
1618 info.start = start;
1619 info.count = count;
1620 info.min_index = start;
1621 info.max_index = start + count - 1;
1622 info.start_instance = start_instance;
1623 info.instance_count = instance_count;
1624
1625 cso_draw_vbo(cso, &info);
1626 }