cso: only allow saving and restoring fragment sampler states
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_streamout;
73
74 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
75 unsigned nr_fragment_views;
76
77 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
78 unsigned nr_fragment_views_saved;
79
80 void *fragment_samplers_saved[PIPE_MAX_SAMPLERS];
81 unsigned nr_fragment_samplers_saved;
82
83 struct sampler_info samplers[PIPE_SHADER_TYPES];
84
85 struct pipe_vertex_buffer aux_vertex_buffer_current;
86 struct pipe_vertex_buffer aux_vertex_buffer_saved;
87 unsigned aux_vertex_buffer_index;
88
89 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
90 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
91
92 unsigned nr_so_targets;
93 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
94
95 unsigned nr_so_targets_saved;
96 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
97
98 /** Current and saved state.
99 * The saved state is used as a 1-deep stack.
100 */
101 void *blend, *blend_saved;
102 void *depth_stencil, *depth_stencil_saved;
103 void *rasterizer, *rasterizer_saved;
104 void *fragment_shader, *fragment_shader_saved;
105 void *vertex_shader, *vertex_shader_saved;
106 void *geometry_shader, *geometry_shader_saved;
107 void *tessctrl_shader, *tessctrl_shader_saved;
108 void *tesseval_shader, *tesseval_shader_saved;
109 void *velements, *velements_saved;
110 struct pipe_query *render_condition, *render_condition_saved;
111 uint render_condition_mode, render_condition_mode_saved;
112 boolean render_condition_cond, render_condition_cond_saved;
113
114 struct pipe_clip_state clip;
115 struct pipe_clip_state clip_saved;
116
117 struct pipe_framebuffer_state fb, fb_saved;
118 struct pipe_viewport_state vp, vp_saved;
119 struct pipe_blend_color blend_color;
120 unsigned sample_mask, sample_mask_saved;
121 unsigned min_samples, min_samples_saved;
122 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
123 };
124
125
126 static boolean delete_blend_state(struct cso_context *ctx, void *state)
127 {
128 struct cso_blend *cso = (struct cso_blend *)state;
129
130 if (ctx->blend == cso->data)
131 return FALSE;
132
133 if (cso->delete_state)
134 cso->delete_state(cso->context, cso->data);
135 FREE(state);
136 return TRUE;
137 }
138
139 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
140 {
141 struct cso_depth_stencil_alpha *cso =
142 (struct cso_depth_stencil_alpha *)state;
143
144 if (ctx->depth_stencil == cso->data)
145 return FALSE;
146
147 if (cso->delete_state)
148 cso->delete_state(cso->context, cso->data);
149 FREE(state);
150
151 return TRUE;
152 }
153
154 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
155 {
156 struct cso_sampler *cso = (struct cso_sampler *)state;
157 if (cso->delete_state)
158 cso->delete_state(cso->context, cso->data);
159 FREE(state);
160 return TRUE;
161 }
162
163 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
164 {
165 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
166
167 if (ctx->rasterizer == cso->data)
168 return FALSE;
169 if (cso->delete_state)
170 cso->delete_state(cso->context, cso->data);
171 FREE(state);
172 return TRUE;
173 }
174
175 static boolean delete_vertex_elements(struct cso_context *ctx,
176 void *state)
177 {
178 struct cso_velements *cso = (struct cso_velements *)state;
179
180 if (ctx->velements == cso->data)
181 return FALSE;
182
183 if (cso->delete_state)
184 cso->delete_state(cso->context, cso->data);
185 FREE(state);
186 return TRUE;
187 }
188
189
190 static inline boolean delete_cso(struct cso_context *ctx,
191 void *state, enum cso_cache_type type)
192 {
193 switch (type) {
194 case CSO_BLEND:
195 return delete_blend_state(ctx, state);
196 case CSO_SAMPLER:
197 return delete_sampler_state(ctx, state);
198 case CSO_DEPTH_STENCIL_ALPHA:
199 return delete_depth_stencil_state(ctx, state);
200 case CSO_RASTERIZER:
201 return delete_rasterizer_state(ctx, state);
202 case CSO_VELEMENTS:
203 return delete_vertex_elements(ctx, state);
204 default:
205 assert(0);
206 FREE(state);
207 }
208 return FALSE;
209 }
210
211 static inline void
212 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
213 int max_size, void *user_data)
214 {
215 struct cso_context *ctx = (struct cso_context *)user_data;
216 /* if we're approach the maximum size, remove fourth of the entries
217 * otherwise every subsequent call will go through the same */
218 int hash_size = cso_hash_size(hash);
219 int max_entries = (max_size > hash_size) ? max_size : hash_size;
220 int to_remove = (max_size < max_entries) * max_entries/4;
221 struct cso_hash_iter iter = cso_hash_first_node(hash);
222 if (hash_size > max_size)
223 to_remove += hash_size - max_size;
224 while (to_remove) {
225 /*remove elements until we're good */
226 /*fixme: currently we pick the nodes to remove at random*/
227 void *cso = cso_hash_iter_data(iter);
228 if (delete_cso(ctx, cso, type)) {
229 iter = cso_hash_erase(hash, iter);
230 --to_remove;
231 } else
232 iter = cso_hash_iter_next(iter);
233 }
234 }
235
236 static void cso_init_vbuf(struct cso_context *cso)
237 {
238 struct u_vbuf_caps caps;
239
240 /* Install u_vbuf if there is anything unsupported. */
241 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
242 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
243 cso->aux_vertex_buffer_index);
244 }
245 }
246
247 struct cso_context *cso_create_context( struct pipe_context *pipe )
248 {
249 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
250 if (ctx == NULL)
251 goto out;
252
253 ctx->cache = cso_cache_create();
254 if (ctx->cache == NULL)
255 goto out;
256 cso_cache_set_sanitize_callback(ctx->cache,
257 sanitize_hash,
258 ctx);
259
260 ctx->pipe = pipe;
261 ctx->sample_mask = ~0;
262
263 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
264
265 cso_init_vbuf(ctx);
266
267 /* Enable for testing: */
268 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
269
270 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
271 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
272 ctx->has_geometry_shader = TRUE;
273 }
274 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
275 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
276 ctx->has_tessellation = TRUE;
277 }
278 if (pipe->screen->get_param(pipe->screen,
279 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
280 ctx->has_streamout = TRUE;
281 }
282
283 return ctx;
284
285 out:
286 cso_destroy_context( ctx );
287 return NULL;
288 }
289
290 /**
291 * Free the CSO context.
292 */
293 void cso_destroy_context( struct cso_context *ctx )
294 {
295 unsigned i;
296
297 if (ctx->pipe) {
298 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
299
300 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
301 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
302
303 {
304 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
305 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
306 struct pipe_screen *scr = ctx->pipe->screen;
307 unsigned sh;
308 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
309 int maxsam = scr->get_shader_param(scr, sh,
310 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
311 int maxview = scr->get_shader_param(scr, sh,
312 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
313 assert(maxsam <= PIPE_MAX_SAMPLERS);
314 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
315 if (maxsam > 0) {
316 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
317 }
318 if (maxview > 0) {
319 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
320 }
321 }
322 }
323
324 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
325 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
326 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
327 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
328 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
329 if (ctx->has_geometry_shader) {
330 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
331 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
332 }
333 if (ctx->has_tessellation) {
334 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
335 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
336 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
337 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
338 }
339 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
340
341 if (ctx->has_streamout)
342 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
343 }
344
345 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
346 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
347 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
348 }
349
350 util_unreference_framebuffer_state(&ctx->fb);
351 util_unreference_framebuffer_state(&ctx->fb_saved);
352
353 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
354 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
355
356 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
357 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
358 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
359 }
360
361 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
362 pipe_so_target_reference(&ctx->so_targets[i], NULL);
363 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
364 }
365
366 if (ctx->cache) {
367 cso_cache_delete( ctx->cache );
368 ctx->cache = NULL;
369 }
370
371 if (ctx->vbuf)
372 u_vbuf_destroy(ctx->vbuf);
373 FREE( ctx );
374 }
375
376
377 /* Those function will either find the state of the given template
378 * in the cache or they will create a new state from the given
379 * template, insert it in the cache and return it.
380 */
381
382 /*
383 * If the driver returns 0 from the create method then they will assign
384 * the data member of the cso to be the template itself.
385 */
386
387 enum pipe_error cso_set_blend(struct cso_context *ctx,
388 const struct pipe_blend_state *templ)
389 {
390 unsigned key_size, hash_key;
391 struct cso_hash_iter iter;
392 void *handle;
393
394 key_size = templ->independent_blend_enable ?
395 sizeof(struct pipe_blend_state) :
396 (char *)&(templ->rt[1]) - (char *)templ;
397 hash_key = cso_construct_key((void*)templ, key_size);
398 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
399 (void*)templ, key_size);
400
401 if (cso_hash_iter_is_null(iter)) {
402 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
403 if (!cso)
404 return PIPE_ERROR_OUT_OF_MEMORY;
405
406 memset(&cso->state, 0, sizeof cso->state);
407 memcpy(&cso->state, templ, key_size);
408 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
409 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
410 cso->context = ctx->pipe;
411
412 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
413 if (cso_hash_iter_is_null(iter)) {
414 FREE(cso);
415 return PIPE_ERROR_OUT_OF_MEMORY;
416 }
417
418 handle = cso->data;
419 }
420 else {
421 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
422 }
423
424 if (ctx->blend != handle) {
425 ctx->blend = handle;
426 ctx->pipe->bind_blend_state(ctx->pipe, handle);
427 }
428 return PIPE_OK;
429 }
430
431 void cso_save_blend(struct cso_context *ctx)
432 {
433 assert(!ctx->blend_saved);
434 ctx->blend_saved = ctx->blend;
435 }
436
437 void cso_restore_blend(struct cso_context *ctx)
438 {
439 if (ctx->blend != ctx->blend_saved) {
440 ctx->blend = ctx->blend_saved;
441 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
442 }
443 ctx->blend_saved = NULL;
444 }
445
446
447
448 enum pipe_error
449 cso_set_depth_stencil_alpha(struct cso_context *ctx,
450 const struct pipe_depth_stencil_alpha_state *templ)
451 {
452 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
453 unsigned hash_key = cso_construct_key((void*)templ, key_size);
454 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
455 hash_key,
456 CSO_DEPTH_STENCIL_ALPHA,
457 (void*)templ, key_size);
458 void *handle;
459
460 if (cso_hash_iter_is_null(iter)) {
461 struct cso_depth_stencil_alpha *cso =
462 MALLOC(sizeof(struct cso_depth_stencil_alpha));
463 if (!cso)
464 return PIPE_ERROR_OUT_OF_MEMORY;
465
466 memcpy(&cso->state, templ, sizeof(*templ));
467 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
468 &cso->state);
469 cso->delete_state =
470 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
471 cso->context = ctx->pipe;
472
473 iter = cso_insert_state(ctx->cache, hash_key,
474 CSO_DEPTH_STENCIL_ALPHA, cso);
475 if (cso_hash_iter_is_null(iter)) {
476 FREE(cso);
477 return PIPE_ERROR_OUT_OF_MEMORY;
478 }
479
480 handle = cso->data;
481 }
482 else {
483 handle = ((struct cso_depth_stencil_alpha *)
484 cso_hash_iter_data(iter))->data;
485 }
486
487 if (ctx->depth_stencil != handle) {
488 ctx->depth_stencil = handle;
489 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
490 }
491 return PIPE_OK;
492 }
493
494 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
495 {
496 assert(!ctx->depth_stencil_saved);
497 ctx->depth_stencil_saved = ctx->depth_stencil;
498 }
499
500 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
501 {
502 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
503 ctx->depth_stencil = ctx->depth_stencil_saved;
504 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
505 ctx->depth_stencil_saved);
506 }
507 ctx->depth_stencil_saved = NULL;
508 }
509
510
511
512 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
513 const struct pipe_rasterizer_state *templ)
514 {
515 unsigned key_size = sizeof(struct pipe_rasterizer_state);
516 unsigned hash_key = cso_construct_key((void*)templ, key_size);
517 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
518 hash_key,
519 CSO_RASTERIZER,
520 (void*)templ, key_size);
521 void *handle = NULL;
522
523 if (cso_hash_iter_is_null(iter)) {
524 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
525 if (!cso)
526 return PIPE_ERROR_OUT_OF_MEMORY;
527
528 memcpy(&cso->state, templ, sizeof(*templ));
529 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
530 cso->delete_state =
531 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
532 cso->context = ctx->pipe;
533
534 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
535 if (cso_hash_iter_is_null(iter)) {
536 FREE(cso);
537 return PIPE_ERROR_OUT_OF_MEMORY;
538 }
539
540 handle = cso->data;
541 }
542 else {
543 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
544 }
545
546 if (ctx->rasterizer != handle) {
547 ctx->rasterizer = handle;
548 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
549 }
550 return PIPE_OK;
551 }
552
553 void cso_save_rasterizer(struct cso_context *ctx)
554 {
555 assert(!ctx->rasterizer_saved);
556 ctx->rasterizer_saved = ctx->rasterizer;
557 }
558
559 void cso_restore_rasterizer(struct cso_context *ctx)
560 {
561 if (ctx->rasterizer != ctx->rasterizer_saved) {
562 ctx->rasterizer = ctx->rasterizer_saved;
563 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
564 }
565 ctx->rasterizer_saved = NULL;
566 }
567
568
569 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
570 {
571 if (ctx->fragment_shader != handle) {
572 ctx->fragment_shader = handle;
573 ctx->pipe->bind_fs_state(ctx->pipe, handle);
574 }
575 }
576
577 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
578 {
579 if (handle == ctx->fragment_shader) {
580 /* unbind before deleting */
581 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
582 ctx->fragment_shader = NULL;
583 }
584 ctx->pipe->delete_fs_state(ctx->pipe, handle);
585 }
586
587 void cso_save_fragment_shader(struct cso_context *ctx)
588 {
589 assert(!ctx->fragment_shader_saved);
590 ctx->fragment_shader_saved = ctx->fragment_shader;
591 }
592
593 void cso_restore_fragment_shader(struct cso_context *ctx)
594 {
595 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
596 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
597 ctx->fragment_shader = ctx->fragment_shader_saved;
598 }
599 ctx->fragment_shader_saved = NULL;
600 }
601
602
603 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
604 {
605 if (ctx->vertex_shader != handle) {
606 ctx->vertex_shader = handle;
607 ctx->pipe->bind_vs_state(ctx->pipe, handle);
608 }
609 }
610
611 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
612 {
613 if (handle == ctx->vertex_shader) {
614 /* unbind before deleting */
615 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
616 ctx->vertex_shader = NULL;
617 }
618 ctx->pipe->delete_vs_state(ctx->pipe, handle);
619 }
620
621 void cso_save_vertex_shader(struct cso_context *ctx)
622 {
623 assert(!ctx->vertex_shader_saved);
624 ctx->vertex_shader_saved = ctx->vertex_shader;
625 }
626
627 void cso_restore_vertex_shader(struct cso_context *ctx)
628 {
629 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
630 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
631 ctx->vertex_shader = ctx->vertex_shader_saved;
632 }
633 ctx->vertex_shader_saved = NULL;
634 }
635
636
637 void cso_set_framebuffer(struct cso_context *ctx,
638 const struct pipe_framebuffer_state *fb)
639 {
640 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
641 util_copy_framebuffer_state(&ctx->fb, fb);
642 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
643 }
644 }
645
646 void cso_save_framebuffer(struct cso_context *ctx)
647 {
648 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
649 }
650
651 void cso_restore_framebuffer(struct cso_context *ctx)
652 {
653 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
654 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
655 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
656 util_unreference_framebuffer_state(&ctx->fb_saved);
657 }
658 }
659
660
661 void cso_set_viewport(struct cso_context *ctx,
662 const struct pipe_viewport_state *vp)
663 {
664 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
665 ctx->vp = *vp;
666 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
667 }
668 }
669
670 void cso_save_viewport(struct cso_context *ctx)
671 {
672 ctx->vp_saved = ctx->vp;
673 }
674
675
676 void cso_restore_viewport(struct cso_context *ctx)
677 {
678 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
679 ctx->vp = ctx->vp_saved;
680 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
681 }
682 }
683
684
685 void cso_set_blend_color(struct cso_context *ctx,
686 const struct pipe_blend_color *bc)
687 {
688 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
689 ctx->blend_color = *bc;
690 ctx->pipe->set_blend_color(ctx->pipe, bc);
691 }
692 }
693
694 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
695 {
696 if (ctx->sample_mask != sample_mask) {
697 ctx->sample_mask = sample_mask;
698 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
699 }
700 }
701
702 void cso_save_sample_mask(struct cso_context *ctx)
703 {
704 ctx->sample_mask_saved = ctx->sample_mask;
705 }
706
707 void cso_restore_sample_mask(struct cso_context *ctx)
708 {
709 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
710 }
711
712 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
713 {
714 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
715 ctx->min_samples = min_samples;
716 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
717 }
718 }
719
720 void cso_save_min_samples(struct cso_context *ctx)
721 {
722 ctx->min_samples_saved = ctx->min_samples;
723 }
724
725 void cso_restore_min_samples(struct cso_context *ctx)
726 {
727 cso_set_min_samples(ctx, ctx->min_samples_saved);
728 }
729
730 void cso_set_stencil_ref(struct cso_context *ctx,
731 const struct pipe_stencil_ref *sr)
732 {
733 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
734 ctx->stencil_ref = *sr;
735 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
736 }
737 }
738
739 void cso_save_stencil_ref(struct cso_context *ctx)
740 {
741 ctx->stencil_ref_saved = ctx->stencil_ref;
742 }
743
744
745 void cso_restore_stencil_ref(struct cso_context *ctx)
746 {
747 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
748 sizeof(ctx->stencil_ref))) {
749 ctx->stencil_ref = ctx->stencil_ref_saved;
750 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
751 }
752 }
753
754 void cso_set_render_condition(struct cso_context *ctx,
755 struct pipe_query *query,
756 boolean condition, uint mode)
757 {
758 struct pipe_context *pipe = ctx->pipe;
759
760 if (ctx->render_condition != query ||
761 ctx->render_condition_mode != mode ||
762 ctx->render_condition_cond != condition) {
763 pipe->render_condition(pipe, query, condition, mode);
764 ctx->render_condition = query;
765 ctx->render_condition_cond = condition;
766 ctx->render_condition_mode = mode;
767 }
768 }
769
770 void cso_save_render_condition(struct cso_context *ctx)
771 {
772 ctx->render_condition_saved = ctx->render_condition;
773 ctx->render_condition_cond_saved = ctx->render_condition_cond;
774 ctx->render_condition_mode_saved = ctx->render_condition_mode;
775 }
776
777 void cso_restore_render_condition(struct cso_context *ctx)
778 {
779 cso_set_render_condition(ctx, ctx->render_condition_saved,
780 ctx->render_condition_cond_saved,
781 ctx->render_condition_mode_saved);
782 }
783
784 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
785 {
786 assert(ctx->has_geometry_shader || !handle);
787
788 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
789 ctx->geometry_shader = handle;
790 ctx->pipe->bind_gs_state(ctx->pipe, handle);
791 }
792 }
793
794 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
795 {
796 if (handle == ctx->geometry_shader) {
797 /* unbind before deleting */
798 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
799 ctx->geometry_shader = NULL;
800 }
801 ctx->pipe->delete_gs_state(ctx->pipe, handle);
802 }
803
804 void cso_save_geometry_shader(struct cso_context *ctx)
805 {
806 if (!ctx->has_geometry_shader) {
807 return;
808 }
809
810 assert(!ctx->geometry_shader_saved);
811 ctx->geometry_shader_saved = ctx->geometry_shader;
812 }
813
814 void cso_restore_geometry_shader(struct cso_context *ctx)
815 {
816 if (!ctx->has_geometry_shader) {
817 return;
818 }
819
820 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
821 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
822 ctx->geometry_shader = ctx->geometry_shader_saved;
823 }
824 ctx->geometry_shader_saved = NULL;
825 }
826
827 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
828 {
829 assert(ctx->has_tessellation || !handle);
830
831 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
832 ctx->tessctrl_shader = handle;
833 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
834 }
835 }
836
837 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
838 {
839 if (handle == ctx->tessctrl_shader) {
840 /* unbind before deleting */
841 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
842 ctx->tessctrl_shader = NULL;
843 }
844 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
845 }
846
847 void cso_save_tessctrl_shader(struct cso_context *ctx)
848 {
849 if (!ctx->has_tessellation) {
850 return;
851 }
852
853 assert(!ctx->tessctrl_shader_saved);
854 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
855 }
856
857 void cso_restore_tessctrl_shader(struct cso_context *ctx)
858 {
859 if (!ctx->has_tessellation) {
860 return;
861 }
862
863 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
864 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
865 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
866 }
867 ctx->tessctrl_shader_saved = NULL;
868 }
869
870 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
871 {
872 assert(ctx->has_tessellation || !handle);
873
874 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
875 ctx->tesseval_shader = handle;
876 ctx->pipe->bind_tes_state(ctx->pipe, handle);
877 }
878 }
879
880 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
881 {
882 if (handle == ctx->tesseval_shader) {
883 /* unbind before deleting */
884 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
885 ctx->tesseval_shader = NULL;
886 }
887 ctx->pipe->delete_tes_state(ctx->pipe, handle);
888 }
889
890 void cso_save_tesseval_shader(struct cso_context *ctx)
891 {
892 if (!ctx->has_tessellation) {
893 return;
894 }
895
896 assert(!ctx->tesseval_shader_saved);
897 ctx->tesseval_shader_saved = ctx->tesseval_shader;
898 }
899
900 void cso_restore_tesseval_shader(struct cso_context *ctx)
901 {
902 if (!ctx->has_tessellation) {
903 return;
904 }
905
906 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
907 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
908 ctx->tesseval_shader = ctx->tesseval_shader_saved;
909 }
910 ctx->tesseval_shader_saved = NULL;
911 }
912
913 /* clip state */
914
915 static inline void
916 clip_state_cpy(struct pipe_clip_state *dst,
917 const struct pipe_clip_state *src)
918 {
919 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
920 }
921
922 static inline int
923 clip_state_cmp(const struct pipe_clip_state *a,
924 const struct pipe_clip_state *b)
925 {
926 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
927 }
928
929 void
930 cso_set_clip(struct cso_context *ctx,
931 const struct pipe_clip_state *clip)
932 {
933 if (clip_state_cmp(&ctx->clip, clip)) {
934 clip_state_cpy(&ctx->clip, clip);
935 ctx->pipe->set_clip_state(ctx->pipe, clip);
936 }
937 }
938
939 void
940 cso_save_clip(struct cso_context *ctx)
941 {
942 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
943 }
944
945 void
946 cso_restore_clip(struct cso_context *ctx)
947 {
948 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
949 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
950 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
951 }
952 }
953
954 enum pipe_error
955 cso_set_vertex_elements(struct cso_context *ctx,
956 unsigned count,
957 const struct pipe_vertex_element *states)
958 {
959 struct u_vbuf *vbuf = ctx->vbuf;
960 unsigned key_size, hash_key;
961 struct cso_hash_iter iter;
962 void *handle;
963 struct cso_velems_state velems_state;
964
965 if (vbuf) {
966 u_vbuf_set_vertex_elements(vbuf, count, states);
967 return PIPE_OK;
968 }
969
970 /* Need to include the count into the stored state data too.
971 * Otherwise first few count pipe_vertex_elements could be identical
972 * even if count is different, and there's no guarantee the hash would
973 * be different in that case neither.
974 */
975 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
976 velems_state.count = count;
977 memcpy(velems_state.velems, states,
978 sizeof(struct pipe_vertex_element) * count);
979 hash_key = cso_construct_key((void*)&velems_state, key_size);
980 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
981 (void*)&velems_state, key_size);
982
983 if (cso_hash_iter_is_null(iter)) {
984 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
985 if (!cso)
986 return PIPE_ERROR_OUT_OF_MEMORY;
987
988 memcpy(&cso->state, &velems_state, key_size);
989 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
990 &cso->state.velems[0]);
991 cso->delete_state =
992 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
993 cso->context = ctx->pipe;
994
995 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
996 if (cso_hash_iter_is_null(iter)) {
997 FREE(cso);
998 return PIPE_ERROR_OUT_OF_MEMORY;
999 }
1000
1001 handle = cso->data;
1002 }
1003 else {
1004 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1005 }
1006
1007 if (ctx->velements != handle) {
1008 ctx->velements = handle;
1009 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1010 }
1011 return PIPE_OK;
1012 }
1013
1014 void cso_save_vertex_elements(struct cso_context *ctx)
1015 {
1016 struct u_vbuf *vbuf = ctx->vbuf;
1017
1018 if (vbuf) {
1019 u_vbuf_save_vertex_elements(vbuf);
1020 return;
1021 }
1022
1023 assert(!ctx->velements_saved);
1024 ctx->velements_saved = ctx->velements;
1025 }
1026
1027 void cso_restore_vertex_elements(struct cso_context *ctx)
1028 {
1029 struct u_vbuf *vbuf = ctx->vbuf;
1030
1031 if (vbuf) {
1032 u_vbuf_restore_vertex_elements(vbuf);
1033 return;
1034 }
1035
1036 if (ctx->velements != ctx->velements_saved) {
1037 ctx->velements = ctx->velements_saved;
1038 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1039 }
1040 ctx->velements_saved = NULL;
1041 }
1042
1043 /* vertex buffers */
1044
1045 void cso_set_vertex_buffers(struct cso_context *ctx,
1046 unsigned start_slot, unsigned count,
1047 const struct pipe_vertex_buffer *buffers)
1048 {
1049 struct u_vbuf *vbuf = ctx->vbuf;
1050
1051 if (vbuf) {
1052 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1053 return;
1054 }
1055
1056 /* Save what's in the auxiliary slot, so that we can save and restore it
1057 * for meta ops. */
1058 if (start_slot <= ctx->aux_vertex_buffer_index &&
1059 start_slot+count > ctx->aux_vertex_buffer_index) {
1060 if (buffers) {
1061 const struct pipe_vertex_buffer *vb =
1062 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1063
1064 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1065 vb->buffer);
1066 memcpy(&ctx->aux_vertex_buffer_current, vb,
1067 sizeof(struct pipe_vertex_buffer));
1068 }
1069 else {
1070 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1071 NULL);
1072 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1073 }
1074 }
1075
1076 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1077 }
1078
1079 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1080 {
1081 struct u_vbuf *vbuf = ctx->vbuf;
1082
1083 if (vbuf) {
1084 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1085 return;
1086 }
1087
1088 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1089 ctx->aux_vertex_buffer_current.buffer);
1090 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1091 sizeof(struct pipe_vertex_buffer));
1092 }
1093
1094 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1095 {
1096 struct u_vbuf *vbuf = ctx->vbuf;
1097
1098 if (vbuf) {
1099 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1100 return;
1101 }
1102
1103 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1104 &ctx->aux_vertex_buffer_saved);
1105 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1106 }
1107
1108 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1109 {
1110 return ctx->aux_vertex_buffer_index;
1111 }
1112
1113
1114 /**************** fragment/vertex sampler view state *************************/
1115
1116 static enum pipe_error
1117 single_sampler(struct cso_context *ctx,
1118 struct sampler_info *info,
1119 unsigned idx,
1120 const struct pipe_sampler_state *templ)
1121 {
1122 void *handle = NULL;
1123
1124 if (templ != NULL) {
1125 unsigned key_size = sizeof(struct pipe_sampler_state);
1126 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1127 struct cso_hash_iter iter =
1128 cso_find_state_template(ctx->cache,
1129 hash_key, CSO_SAMPLER,
1130 (void *) templ, key_size);
1131
1132 if (cso_hash_iter_is_null(iter)) {
1133 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1134 if (!cso)
1135 return PIPE_ERROR_OUT_OF_MEMORY;
1136
1137 memcpy(&cso->state, templ, sizeof(*templ));
1138 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1139 cso->delete_state =
1140 (cso_state_callback) ctx->pipe->delete_sampler_state;
1141 cso->context = ctx->pipe;
1142
1143 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1144 if (cso_hash_iter_is_null(iter)) {
1145 FREE(cso);
1146 return PIPE_ERROR_OUT_OF_MEMORY;
1147 }
1148
1149 handle = cso->data;
1150 }
1151 else {
1152 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1153 }
1154 }
1155
1156 info->samplers[idx] = handle;
1157
1158 return PIPE_OK;
1159 }
1160
1161 enum pipe_error
1162 cso_single_sampler(struct cso_context *ctx,
1163 unsigned shader_stage,
1164 unsigned idx,
1165 const struct pipe_sampler_state *templ)
1166 {
1167 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1168 }
1169
1170
1171
1172 static void
1173 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1174 {
1175 struct sampler_info *info = &ctx->samplers[shader_stage];
1176 unsigned i;
1177
1178 /* find highest non-null sampler */
1179 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1180 if (info->samplers[i - 1] != NULL)
1181 break;
1182 }
1183
1184 info->nr_samplers = i;
1185 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0, i,
1186 info->samplers);
1187 }
1188
1189 void
1190 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1191 {
1192 single_sampler_done(ctx, shader_stage);
1193 }
1194
1195
1196 /*
1197 * If the function encouters any errors it will return the
1198 * last one. Done to always try to set as many samplers
1199 * as possible.
1200 */
1201 enum pipe_error
1202 cso_set_samplers(struct cso_context *ctx,
1203 unsigned shader_stage,
1204 unsigned nr,
1205 const struct pipe_sampler_state **templates)
1206 {
1207 struct sampler_info *info = &ctx->samplers[shader_stage];
1208 unsigned i;
1209 enum pipe_error temp, error = PIPE_OK;
1210
1211 /* TODO: fastpath
1212 */
1213
1214 for (i = 0; i < nr; i++) {
1215 temp = single_sampler(ctx, info, i, templates[i]);
1216 if (temp != PIPE_OK)
1217 error = temp;
1218 }
1219
1220 for ( ; i < info->nr_samplers; i++) {
1221 temp = single_sampler(ctx, info, i, NULL);
1222 if (temp != PIPE_OK)
1223 error = temp;
1224 }
1225
1226 single_sampler_done(ctx, shader_stage);
1227
1228 return error;
1229 }
1230
1231 void
1232 cso_save_fragment_samplers(struct cso_context *ctx)
1233 {
1234 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1235
1236 ctx->nr_fragment_samplers_saved = info->nr_samplers;
1237 memcpy(ctx->fragment_samplers_saved, info->samplers,
1238 sizeof(info->samplers));
1239 }
1240
1241
1242 void
1243 cso_restore_fragment_samplers(struct cso_context *ctx)
1244 {
1245 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1246
1247 info->nr_samplers = ctx->nr_fragment_samplers_saved;
1248 memcpy(info->samplers, ctx->fragment_samplers_saved,
1249 sizeof(info->samplers));
1250 single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1251 }
1252
1253
1254 void
1255 cso_set_sampler_views(struct cso_context *ctx,
1256 unsigned shader_stage,
1257 unsigned count,
1258 struct pipe_sampler_view **views)
1259 {
1260 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1261 unsigned i;
1262 boolean any_change = FALSE;
1263
1264 /* reference new views */
1265 for (i = 0; i < count; i++) {
1266 any_change |= ctx->fragment_views[i] != views[i];
1267 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1268 }
1269 /* unref extra old views, if any */
1270 for (; i < ctx->nr_fragment_views; i++) {
1271 any_change |= ctx->fragment_views[i] != NULL;
1272 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1273 }
1274
1275 /* bind the new sampler views */
1276 if (any_change) {
1277 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1278 MAX2(ctx->nr_fragment_views, count),
1279 ctx->fragment_views);
1280 }
1281
1282 ctx->nr_fragment_views = count;
1283 }
1284 else
1285 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1286 }
1287
1288
1289 void
1290 cso_save_fragment_sampler_views(struct cso_context *ctx)
1291 {
1292 unsigned i;
1293
1294 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1295
1296 for (i = 0; i < ctx->nr_fragment_views; i++) {
1297 assert(!ctx->fragment_views_saved[i]);
1298 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1299 ctx->fragment_views[i]);
1300 }
1301 }
1302
1303
1304 void
1305 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1306 {
1307 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1308 unsigned num;
1309
1310 for (i = 0; i < nr_saved; i++) {
1311 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1312 /* move the reference from one pointer to another */
1313 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1314 ctx->fragment_views_saved[i] = NULL;
1315 }
1316 for (; i < ctx->nr_fragment_views; i++) {
1317 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1318 }
1319
1320 num = MAX2(ctx->nr_fragment_views, nr_saved);
1321
1322 /* bind the old/saved sampler views */
1323 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1324 ctx->fragment_views);
1325
1326 ctx->nr_fragment_views = nr_saved;
1327 ctx->nr_fragment_views_saved = 0;
1328 }
1329
1330
1331 void
1332 cso_set_stream_outputs(struct cso_context *ctx,
1333 unsigned num_targets,
1334 struct pipe_stream_output_target **targets,
1335 const unsigned *offsets)
1336 {
1337 struct pipe_context *pipe = ctx->pipe;
1338 uint i;
1339
1340 if (!ctx->has_streamout) {
1341 assert(num_targets == 0);
1342 return;
1343 }
1344
1345 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1346 /* Nothing to do. */
1347 return;
1348 }
1349
1350 /* reference new targets */
1351 for (i = 0; i < num_targets; i++) {
1352 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1353 }
1354 /* unref extra old targets, if any */
1355 for (; i < ctx->nr_so_targets; i++) {
1356 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1357 }
1358
1359 pipe->set_stream_output_targets(pipe, num_targets, targets,
1360 offsets);
1361 ctx->nr_so_targets = num_targets;
1362 }
1363
1364 void
1365 cso_save_stream_outputs(struct cso_context *ctx)
1366 {
1367 uint i;
1368
1369 if (!ctx->has_streamout) {
1370 return;
1371 }
1372
1373 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1374
1375 for (i = 0; i < ctx->nr_so_targets; i++) {
1376 assert(!ctx->so_targets_saved[i]);
1377 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1378 }
1379 }
1380
1381 void
1382 cso_restore_stream_outputs(struct cso_context *ctx)
1383 {
1384 struct pipe_context *pipe = ctx->pipe;
1385 uint i;
1386 unsigned offset[PIPE_MAX_SO_BUFFERS];
1387
1388 if (!ctx->has_streamout) {
1389 return;
1390 }
1391
1392 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1393 /* Nothing to do. */
1394 return;
1395 }
1396
1397 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1398 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1399 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1400 /* move the reference from one pointer to another */
1401 ctx->so_targets[i] = ctx->so_targets_saved[i];
1402 ctx->so_targets_saved[i] = NULL;
1403 /* -1 means append */
1404 offset[i] = (unsigned)-1;
1405 }
1406 for (; i < ctx->nr_so_targets; i++) {
1407 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1408 }
1409
1410 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1411 ctx->so_targets, offset);
1412
1413 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1414 ctx->nr_so_targets_saved = 0;
1415 }
1416
1417 /* constant buffers */
1418
1419 void
1420 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1421 unsigned index, struct pipe_constant_buffer *cb)
1422 {
1423 struct pipe_context *pipe = cso->pipe;
1424
1425 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1426
1427 if (index == 0) {
1428 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1429 }
1430 }
1431
1432 void
1433 cso_set_constant_buffer_resource(struct cso_context *cso,
1434 unsigned shader_stage,
1435 unsigned index,
1436 struct pipe_resource *buffer)
1437 {
1438 if (buffer) {
1439 struct pipe_constant_buffer cb;
1440 cb.buffer = buffer;
1441 cb.buffer_offset = 0;
1442 cb.buffer_size = buffer->width0;
1443 cb.user_buffer = NULL;
1444 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1445 } else {
1446 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1447 }
1448 }
1449
1450 void
1451 cso_save_constant_buffer_slot0(struct cso_context *cso,
1452 unsigned shader_stage)
1453 {
1454 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1455 &cso->aux_constbuf_current[shader_stage]);
1456 }
1457
1458 void
1459 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1460 unsigned shader_stage)
1461 {
1462 cso_set_constant_buffer(cso, shader_stage, 0,
1463 &cso->aux_constbuf_saved[shader_stage]);
1464 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1465 NULL);
1466 }
1467
1468 /* drawing */
1469
1470 void
1471 cso_set_index_buffer(struct cso_context *cso,
1472 const struct pipe_index_buffer *ib)
1473 {
1474 struct u_vbuf *vbuf = cso->vbuf;
1475
1476 if (vbuf) {
1477 u_vbuf_set_index_buffer(vbuf, ib);
1478 } else {
1479 struct pipe_context *pipe = cso->pipe;
1480 pipe->set_index_buffer(pipe, ib);
1481 }
1482 }
1483
1484 void
1485 cso_draw_vbo(struct cso_context *cso,
1486 const struct pipe_draw_info *info)
1487 {
1488 struct u_vbuf *vbuf = cso->vbuf;
1489
1490 if (vbuf) {
1491 u_vbuf_draw_vbo(vbuf, info);
1492 } else {
1493 struct pipe_context *pipe = cso->pipe;
1494 pipe->draw_vbo(pipe, info);
1495 }
1496 }
1497
1498 void
1499 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1500 {
1501 struct pipe_draw_info info;
1502
1503 util_draw_init_info(&info);
1504
1505 info.mode = mode;
1506 info.start = start;
1507 info.count = count;
1508 info.min_index = start;
1509 info.max_index = start + count - 1;
1510
1511 cso_draw_vbo(cso, &info);
1512 }
1513
1514 void
1515 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1516 uint start, uint count,
1517 uint start_instance, uint instance_count)
1518 {
1519 struct pipe_draw_info info;
1520
1521 util_draw_init_info(&info);
1522
1523 info.mode = mode;
1524 info.start = start;
1525 info.count = count;
1526 info.min_index = start;
1527 info.max_index = start + count - 1;
1528 info.start_instance = start_instance;
1529 info.instance_count = instance_count;
1530
1531 cso_draw_vbo(cso, &info);
1532 }