s/Tungsten Graphics/VMware/
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 struct pipe_vertex_buffer aux_vertex_buffer_current;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved;
91 unsigned aux_vertex_buffer_index;
92
93 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
94 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *velements, *velements_saved;
112 struct pipe_query *render_condition, *render_condition_saved;
113 uint render_condition_mode, render_condition_mode_saved;
114 boolean render_condition_cond, render_condition_cond_saved;
115
116 struct pipe_clip_state clip;
117 struct pipe_clip_state clip_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
124 };
125
126
127 static boolean delete_blend_state(struct cso_context *ctx, void *state)
128 {
129 struct cso_blend *cso = (struct cso_blend *)state;
130
131 if (ctx->blend == cso->data)
132 return FALSE;
133
134 if (cso->delete_state)
135 cso->delete_state(cso->context, cso->data);
136 FREE(state);
137 return TRUE;
138 }
139
140 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
141 {
142 struct cso_depth_stencil_alpha *cso =
143 (struct cso_depth_stencil_alpha *)state;
144
145 if (ctx->depth_stencil == cso->data)
146 return FALSE;
147
148 if (cso->delete_state)
149 cso->delete_state(cso->context, cso->data);
150 FREE(state);
151
152 return TRUE;
153 }
154
155 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
156 {
157 struct cso_sampler *cso = (struct cso_sampler *)state;
158 if (cso->delete_state)
159 cso->delete_state(cso->context, cso->data);
160 FREE(state);
161 return TRUE;
162 }
163
164 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
165 {
166 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
167
168 if (ctx->rasterizer == cso->data)
169 return FALSE;
170 if (cso->delete_state)
171 cso->delete_state(cso->context, cso->data);
172 FREE(state);
173 return TRUE;
174 }
175
176 static boolean delete_vertex_elements(struct cso_context *ctx,
177 void *state)
178 {
179 struct cso_velements *cso = (struct cso_velements *)state;
180
181 if (ctx->velements == cso->data)
182 return FALSE;
183
184 if (cso->delete_state)
185 cso->delete_state(cso->context, cso->data);
186 FREE(state);
187 return TRUE;
188 }
189
190
191 static INLINE boolean delete_cso(struct cso_context *ctx,
192 void *state, enum cso_cache_type type)
193 {
194 switch (type) {
195 case CSO_BLEND:
196 return delete_blend_state(ctx, state);
197 case CSO_SAMPLER:
198 return delete_sampler_state(ctx, state);
199 case CSO_DEPTH_STENCIL_ALPHA:
200 return delete_depth_stencil_state(ctx, state);
201 case CSO_RASTERIZER:
202 return delete_rasterizer_state(ctx, state);
203 case CSO_VELEMENTS:
204 return delete_vertex_elements(ctx, state);
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static INLINE void
213 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
214 int max_size, void *user_data)
215 {
216 struct cso_context *ctx = (struct cso_context *)user_data;
217 /* if we're approach the maximum size, remove fourth of the entries
218 * otherwise every subsequent call will go through the same */
219 int hash_size = cso_hash_size(hash);
220 int max_entries = (max_size > hash_size) ? max_size : hash_size;
221 int to_remove = (max_size < max_entries) * max_entries/4;
222 struct cso_hash_iter iter = cso_hash_first_node(hash);
223 if (hash_size > max_size)
224 to_remove += hash_size - max_size;
225 while (to_remove) {
226 /*remove elements until we're good */
227 /*fixme: currently we pick the nodes to remove at random*/
228 void *cso = cso_hash_iter_data(iter);
229 if (delete_cso(ctx, cso, type)) {
230 iter = cso_hash_erase(hash, iter);
231 --to_remove;
232 } else
233 iter = cso_hash_iter_next(iter);
234 }
235 }
236
237 static void cso_init_vbuf(struct cso_context *cso)
238 {
239 struct u_vbuf_caps caps;
240
241 u_vbuf_get_caps(cso->pipe->screen, &caps);
242
243 /* Install u_vbuf if there is anything unsupported. */
244 if (!caps.buffer_offset_unaligned ||
245 !caps.buffer_stride_unaligned ||
246 !caps.velem_src_offset_unaligned ||
247 !caps.format_fixed32 ||
248 !caps.format_float16 ||
249 !caps.format_float64 ||
250 !caps.format_norm32 ||
251 !caps.format_scaled32 ||
252 !caps.user_vertex_buffers) {
253 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
254 cso->aux_vertex_buffer_index);
255 }
256 }
257
258 struct cso_context *cso_create_context( struct pipe_context *pipe )
259 {
260 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
261 if (ctx == NULL)
262 goto out;
263
264 ctx->cache = cso_cache_create();
265 if (ctx->cache == NULL)
266 goto out;
267 cso_cache_set_sanitize_callback(ctx->cache,
268 sanitize_hash,
269 ctx);
270
271 ctx->pipe = pipe;
272 ctx->sample_mask = ~0;
273
274 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
275
276 cso_init_vbuf(ctx);
277
278 /* Enable for testing: */
279 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
280
281 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
282 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
283 ctx->has_geometry_shader = TRUE;
284 }
285 if (pipe->screen->get_param(pipe->screen,
286 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
287 ctx->has_streamout = TRUE;
288 }
289
290 return ctx;
291
292 out:
293 cso_destroy_context( ctx );
294 return NULL;
295 }
296
297 /**
298 * Prior to context destruction, this function unbinds all state objects.
299 */
300 void cso_release_all( struct cso_context *ctx )
301 {
302 unsigned i, shader;
303
304 if (ctx->pipe) {
305 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
306 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
307
308 {
309 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
310 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
311 struct pipe_screen *scr = ctx->pipe->screen;
312 unsigned sh;
313 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
314 int maxsam = scr->get_shader_param(scr, sh,
315 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
316 int maxview = scr->get_shader_param(scr, sh,
317 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
318 assert(maxsam <= PIPE_MAX_SAMPLERS);
319 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
320 if (maxsam > 0) {
321 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
322 }
323 if (maxview > 0) {
324 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
325 }
326 }
327 }
328
329 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
330 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
331 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
332 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
333
334 if (ctx->pipe->set_stream_output_targets)
335 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
336 }
337
338 /* free fragment sampler views */
339 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
340 struct sampler_info *info = &ctx->samplers[shader];
341 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
342 pipe_sampler_view_reference(&info->views[i], NULL);
343 pipe_sampler_view_reference(&info->views_saved[i], NULL);
344 }
345 }
346
347 util_unreference_framebuffer_state(&ctx->fb);
348 util_unreference_framebuffer_state(&ctx->fb_saved);
349
350 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
351 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
352
353 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
354 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
355 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
356 }
357
358 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
359 pipe_so_target_reference(&ctx->so_targets[i], NULL);
360 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
361 }
362
363 if (ctx->cache) {
364 cso_cache_delete( ctx->cache );
365 ctx->cache = NULL;
366 }
367 }
368
369
370 /**
371 * Free the CSO context. NOTE: the state tracker should have previously called
372 * cso_release_all().
373 */
374 void cso_destroy_context( struct cso_context *ctx )
375 {
376 if (ctx) {
377 if (ctx->vbuf)
378 u_vbuf_destroy(ctx->vbuf);
379 FREE( ctx );
380 }
381 }
382
383
384 /* Those function will either find the state of the given template
385 * in the cache or they will create a new state from the given
386 * template, insert it in the cache and return it.
387 */
388
389 /*
390 * If the driver returns 0 from the create method then they will assign
391 * the data member of the cso to be the template itself.
392 */
393
394 enum pipe_error cso_set_blend(struct cso_context *ctx,
395 const struct pipe_blend_state *templ)
396 {
397 unsigned key_size, hash_key;
398 struct cso_hash_iter iter;
399 void *handle;
400
401 key_size = templ->independent_blend_enable ?
402 sizeof(struct pipe_blend_state) :
403 (char *)&(templ->rt[1]) - (char *)templ;
404 hash_key = cso_construct_key((void*)templ, key_size);
405 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
406 (void*)templ, key_size);
407
408 if (cso_hash_iter_is_null(iter)) {
409 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
410 if (!cso)
411 return PIPE_ERROR_OUT_OF_MEMORY;
412
413 memset(&cso->state, 0, sizeof cso->state);
414 memcpy(&cso->state, templ, key_size);
415 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
416 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
417 cso->context = ctx->pipe;
418
419 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
420 if (cso_hash_iter_is_null(iter)) {
421 FREE(cso);
422 return PIPE_ERROR_OUT_OF_MEMORY;
423 }
424
425 handle = cso->data;
426 }
427 else {
428 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
429 }
430
431 if (ctx->blend != handle) {
432 ctx->blend = handle;
433 ctx->pipe->bind_blend_state(ctx->pipe, handle);
434 }
435 return PIPE_OK;
436 }
437
438 void cso_save_blend(struct cso_context *ctx)
439 {
440 assert(!ctx->blend_saved);
441 ctx->blend_saved = ctx->blend;
442 }
443
444 void cso_restore_blend(struct cso_context *ctx)
445 {
446 if (ctx->blend != ctx->blend_saved) {
447 ctx->blend = ctx->blend_saved;
448 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
449 }
450 ctx->blend_saved = NULL;
451 }
452
453
454
455 enum pipe_error
456 cso_set_depth_stencil_alpha(struct cso_context *ctx,
457 const struct pipe_depth_stencil_alpha_state *templ)
458 {
459 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
460 unsigned hash_key = cso_construct_key((void*)templ, key_size);
461 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
462 hash_key,
463 CSO_DEPTH_STENCIL_ALPHA,
464 (void*)templ, key_size);
465 void *handle;
466
467 if (cso_hash_iter_is_null(iter)) {
468 struct cso_depth_stencil_alpha *cso =
469 MALLOC(sizeof(struct cso_depth_stencil_alpha));
470 if (!cso)
471 return PIPE_ERROR_OUT_OF_MEMORY;
472
473 memcpy(&cso->state, templ, sizeof(*templ));
474 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
475 &cso->state);
476 cso->delete_state =
477 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
478 cso->context = ctx->pipe;
479
480 iter = cso_insert_state(ctx->cache, hash_key,
481 CSO_DEPTH_STENCIL_ALPHA, cso);
482 if (cso_hash_iter_is_null(iter)) {
483 FREE(cso);
484 return PIPE_ERROR_OUT_OF_MEMORY;
485 }
486
487 handle = cso->data;
488 }
489 else {
490 handle = ((struct cso_depth_stencil_alpha *)
491 cso_hash_iter_data(iter))->data;
492 }
493
494 if (ctx->depth_stencil != handle) {
495 ctx->depth_stencil = handle;
496 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
497 }
498 return PIPE_OK;
499 }
500
501 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
502 {
503 assert(!ctx->depth_stencil_saved);
504 ctx->depth_stencil_saved = ctx->depth_stencil;
505 }
506
507 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
508 {
509 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
510 ctx->depth_stencil = ctx->depth_stencil_saved;
511 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
512 ctx->depth_stencil_saved);
513 }
514 ctx->depth_stencil_saved = NULL;
515 }
516
517
518
519 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
520 const struct pipe_rasterizer_state *templ)
521 {
522 unsigned key_size = sizeof(struct pipe_rasterizer_state);
523 unsigned hash_key = cso_construct_key((void*)templ, key_size);
524 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
525 hash_key,
526 CSO_RASTERIZER,
527 (void*)templ, key_size);
528 void *handle = NULL;
529
530 if (cso_hash_iter_is_null(iter)) {
531 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
532 if (!cso)
533 return PIPE_ERROR_OUT_OF_MEMORY;
534
535 memcpy(&cso->state, templ, sizeof(*templ));
536 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
537 cso->delete_state =
538 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
539 cso->context = ctx->pipe;
540
541 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
542 if (cso_hash_iter_is_null(iter)) {
543 FREE(cso);
544 return PIPE_ERROR_OUT_OF_MEMORY;
545 }
546
547 handle = cso->data;
548 }
549 else {
550 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
551 }
552
553 if (ctx->rasterizer != handle) {
554 ctx->rasterizer = handle;
555 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
556 }
557 return PIPE_OK;
558 }
559
560 void cso_save_rasterizer(struct cso_context *ctx)
561 {
562 assert(!ctx->rasterizer_saved);
563 ctx->rasterizer_saved = ctx->rasterizer;
564 }
565
566 void cso_restore_rasterizer(struct cso_context *ctx)
567 {
568 if (ctx->rasterizer != ctx->rasterizer_saved) {
569 ctx->rasterizer = ctx->rasterizer_saved;
570 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
571 }
572 ctx->rasterizer_saved = NULL;
573 }
574
575
576 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
577 {
578 if (ctx->fragment_shader != handle) {
579 ctx->fragment_shader = handle;
580 ctx->pipe->bind_fs_state(ctx->pipe, handle);
581 }
582 }
583
584 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
585 {
586 if (handle == ctx->fragment_shader) {
587 /* unbind before deleting */
588 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
589 ctx->fragment_shader = NULL;
590 }
591 ctx->pipe->delete_fs_state(ctx->pipe, handle);
592 }
593
594 void cso_save_fragment_shader(struct cso_context *ctx)
595 {
596 assert(!ctx->fragment_shader_saved);
597 ctx->fragment_shader_saved = ctx->fragment_shader;
598 }
599
600 void cso_restore_fragment_shader(struct cso_context *ctx)
601 {
602 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
603 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
604 ctx->fragment_shader = ctx->fragment_shader_saved;
605 }
606 ctx->fragment_shader_saved = NULL;
607 }
608
609
610 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
611 {
612 if (ctx->vertex_shader != handle) {
613 ctx->vertex_shader = handle;
614 ctx->pipe->bind_vs_state(ctx->pipe, handle);
615 }
616 }
617
618 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
619 {
620 if (handle == ctx->vertex_shader) {
621 /* unbind before deleting */
622 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
623 ctx->vertex_shader = NULL;
624 }
625 ctx->pipe->delete_vs_state(ctx->pipe, handle);
626 }
627
628 void cso_save_vertex_shader(struct cso_context *ctx)
629 {
630 assert(!ctx->vertex_shader_saved);
631 ctx->vertex_shader_saved = ctx->vertex_shader;
632 }
633
634 void cso_restore_vertex_shader(struct cso_context *ctx)
635 {
636 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
637 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
638 ctx->vertex_shader = ctx->vertex_shader_saved;
639 }
640 ctx->vertex_shader_saved = NULL;
641 }
642
643
644 void cso_set_framebuffer(struct cso_context *ctx,
645 const struct pipe_framebuffer_state *fb)
646 {
647 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
648 util_copy_framebuffer_state(&ctx->fb, fb);
649 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
650 }
651 }
652
653 void cso_save_framebuffer(struct cso_context *ctx)
654 {
655 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
656 }
657
658 void cso_restore_framebuffer(struct cso_context *ctx)
659 {
660 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
661 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
662 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
663 util_unreference_framebuffer_state(&ctx->fb_saved);
664 }
665 }
666
667
668 void cso_set_viewport(struct cso_context *ctx,
669 const struct pipe_viewport_state *vp)
670 {
671 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
672 ctx->vp = *vp;
673 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
674 }
675 }
676
677 void cso_save_viewport(struct cso_context *ctx)
678 {
679 ctx->vp_saved = ctx->vp;
680 }
681
682
683 void cso_restore_viewport(struct cso_context *ctx)
684 {
685 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
686 ctx->vp = ctx->vp_saved;
687 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
688 }
689 }
690
691
692 void cso_set_blend_color(struct cso_context *ctx,
693 const struct pipe_blend_color *bc)
694 {
695 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
696 ctx->blend_color = *bc;
697 ctx->pipe->set_blend_color(ctx->pipe, bc);
698 }
699 }
700
701 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
702 {
703 if (ctx->sample_mask != sample_mask) {
704 ctx->sample_mask = sample_mask;
705 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
706 }
707 }
708
709 void cso_save_sample_mask(struct cso_context *ctx)
710 {
711 ctx->sample_mask_saved = ctx->sample_mask;
712 }
713
714 void cso_restore_sample_mask(struct cso_context *ctx)
715 {
716 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
717 }
718
719 void cso_set_stencil_ref(struct cso_context *ctx,
720 const struct pipe_stencil_ref *sr)
721 {
722 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
723 ctx->stencil_ref = *sr;
724 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
725 }
726 }
727
728 void cso_save_stencil_ref(struct cso_context *ctx)
729 {
730 ctx->stencil_ref_saved = ctx->stencil_ref;
731 }
732
733
734 void cso_restore_stencil_ref(struct cso_context *ctx)
735 {
736 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
737 sizeof(ctx->stencil_ref))) {
738 ctx->stencil_ref = ctx->stencil_ref_saved;
739 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
740 }
741 }
742
743 void cso_set_render_condition(struct cso_context *ctx,
744 struct pipe_query *query,
745 boolean condition, uint mode)
746 {
747 struct pipe_context *pipe = ctx->pipe;
748
749 if (ctx->render_condition != query ||
750 ctx->render_condition_mode != mode ||
751 ctx->render_condition_cond != condition) {
752 pipe->render_condition(pipe, query, condition, mode);
753 ctx->render_condition = query;
754 ctx->render_condition_cond = condition;
755 ctx->render_condition_mode = mode;
756 }
757 }
758
759 void cso_save_render_condition(struct cso_context *ctx)
760 {
761 ctx->render_condition_saved = ctx->render_condition;
762 ctx->render_condition_cond_saved = ctx->render_condition_cond;
763 ctx->render_condition_mode_saved = ctx->render_condition_mode;
764 }
765
766 void cso_restore_render_condition(struct cso_context *ctx)
767 {
768 cso_set_render_condition(ctx, ctx->render_condition_saved,
769 ctx->render_condition_cond_saved,
770 ctx->render_condition_mode_saved);
771 }
772
773 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
774 {
775 assert(ctx->has_geometry_shader || !handle);
776
777 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
778 ctx->geometry_shader = handle;
779 ctx->pipe->bind_gs_state(ctx->pipe, handle);
780 }
781 }
782
783 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
784 {
785 if (handle == ctx->geometry_shader) {
786 /* unbind before deleting */
787 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
788 ctx->geometry_shader = NULL;
789 }
790 ctx->pipe->delete_gs_state(ctx->pipe, handle);
791 }
792
793 void cso_save_geometry_shader(struct cso_context *ctx)
794 {
795 if (!ctx->has_geometry_shader) {
796 return;
797 }
798
799 assert(!ctx->geometry_shader_saved);
800 ctx->geometry_shader_saved = ctx->geometry_shader;
801 }
802
803 void cso_restore_geometry_shader(struct cso_context *ctx)
804 {
805 if (!ctx->has_geometry_shader) {
806 return;
807 }
808
809 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
810 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
811 ctx->geometry_shader = ctx->geometry_shader_saved;
812 }
813 ctx->geometry_shader_saved = NULL;
814 }
815
816 /* clip state */
817
818 static INLINE void
819 clip_state_cpy(struct pipe_clip_state *dst,
820 const struct pipe_clip_state *src)
821 {
822 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
823 }
824
825 static INLINE int
826 clip_state_cmp(const struct pipe_clip_state *a,
827 const struct pipe_clip_state *b)
828 {
829 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
830 }
831
832 void
833 cso_set_clip(struct cso_context *ctx,
834 const struct pipe_clip_state *clip)
835 {
836 if (clip_state_cmp(&ctx->clip, clip)) {
837 clip_state_cpy(&ctx->clip, clip);
838 ctx->pipe->set_clip_state(ctx->pipe, clip);
839 }
840 }
841
842 void
843 cso_save_clip(struct cso_context *ctx)
844 {
845 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
846 }
847
848 void
849 cso_restore_clip(struct cso_context *ctx)
850 {
851 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
852 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
853 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
854 }
855 }
856
857 enum pipe_error
858 cso_set_vertex_elements(struct cso_context *ctx,
859 unsigned count,
860 const struct pipe_vertex_element *states)
861 {
862 struct u_vbuf *vbuf = ctx->vbuf;
863 unsigned key_size, hash_key;
864 struct cso_hash_iter iter;
865 void *handle;
866 struct cso_velems_state velems_state;
867
868 if (vbuf) {
869 u_vbuf_set_vertex_elements(vbuf, count, states);
870 return PIPE_OK;
871 }
872
873 /* Need to include the count into the stored state data too.
874 * Otherwise first few count pipe_vertex_elements could be identical
875 * even if count is different, and there's no guarantee the hash would
876 * be different in that case neither.
877 */
878 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
879 velems_state.count = count;
880 memcpy(velems_state.velems, states,
881 sizeof(struct pipe_vertex_element) * count);
882 hash_key = cso_construct_key((void*)&velems_state, key_size);
883 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
884 (void*)&velems_state, key_size);
885
886 if (cso_hash_iter_is_null(iter)) {
887 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
888 if (!cso)
889 return PIPE_ERROR_OUT_OF_MEMORY;
890
891 memcpy(&cso->state, &velems_state, key_size);
892 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
893 &cso->state.velems[0]);
894 cso->delete_state =
895 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
896 cso->context = ctx->pipe;
897
898 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
899 if (cso_hash_iter_is_null(iter)) {
900 FREE(cso);
901 return PIPE_ERROR_OUT_OF_MEMORY;
902 }
903
904 handle = cso->data;
905 }
906 else {
907 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
908 }
909
910 if (ctx->velements != handle) {
911 ctx->velements = handle;
912 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
913 }
914 return PIPE_OK;
915 }
916
917 void cso_save_vertex_elements(struct cso_context *ctx)
918 {
919 struct u_vbuf *vbuf = ctx->vbuf;
920
921 if (vbuf) {
922 u_vbuf_save_vertex_elements(vbuf);
923 return;
924 }
925
926 assert(!ctx->velements_saved);
927 ctx->velements_saved = ctx->velements;
928 }
929
930 void cso_restore_vertex_elements(struct cso_context *ctx)
931 {
932 struct u_vbuf *vbuf = ctx->vbuf;
933
934 if (vbuf) {
935 u_vbuf_restore_vertex_elements(vbuf);
936 return;
937 }
938
939 if (ctx->velements != ctx->velements_saved) {
940 ctx->velements = ctx->velements_saved;
941 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
942 }
943 ctx->velements_saved = NULL;
944 }
945
946 /* vertex buffers */
947
948 void cso_set_vertex_buffers(struct cso_context *ctx,
949 unsigned start_slot, unsigned count,
950 const struct pipe_vertex_buffer *buffers)
951 {
952 struct u_vbuf *vbuf = ctx->vbuf;
953
954 if (vbuf) {
955 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
956 return;
957 }
958
959 /* Save what's in the auxiliary slot, so that we can save and restore it
960 * for meta ops. */
961 if (start_slot <= ctx->aux_vertex_buffer_index &&
962 start_slot+count > ctx->aux_vertex_buffer_index) {
963 if (buffers) {
964 const struct pipe_vertex_buffer *vb =
965 buffers + (ctx->aux_vertex_buffer_index - start_slot);
966
967 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
968 vb->buffer);
969 memcpy(&ctx->aux_vertex_buffer_current, vb,
970 sizeof(struct pipe_vertex_buffer));
971 }
972 else {
973 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
974 NULL);
975 ctx->aux_vertex_buffer_current.user_buffer = NULL;
976 }
977 }
978
979 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
980 }
981
982 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
983 {
984 struct u_vbuf *vbuf = ctx->vbuf;
985
986 if (vbuf) {
987 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
988 return;
989 }
990
991 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
992 ctx->aux_vertex_buffer_current.buffer);
993 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
994 sizeof(struct pipe_vertex_buffer));
995 }
996
997 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
998 {
999 struct u_vbuf *vbuf = ctx->vbuf;
1000
1001 if (vbuf) {
1002 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1003 return;
1004 }
1005
1006 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1007 &ctx->aux_vertex_buffer_saved);
1008 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1009 }
1010
1011 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1012 {
1013 return ctx->aux_vertex_buffer_index;
1014 }
1015
1016
1017 /**************** fragment/vertex sampler view state *************************/
1018
1019 static enum pipe_error
1020 single_sampler(struct cso_context *ctx,
1021 struct sampler_info *info,
1022 unsigned idx,
1023 const struct pipe_sampler_state *templ)
1024 {
1025 void *handle = NULL;
1026
1027 if (templ != NULL) {
1028 unsigned key_size = sizeof(struct pipe_sampler_state);
1029 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1030 struct cso_hash_iter iter =
1031 cso_find_state_template(ctx->cache,
1032 hash_key, CSO_SAMPLER,
1033 (void *) templ, key_size);
1034
1035 if (cso_hash_iter_is_null(iter)) {
1036 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1037 if (!cso)
1038 return PIPE_ERROR_OUT_OF_MEMORY;
1039
1040 memcpy(&cso->state, templ, sizeof(*templ));
1041 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1042 cso->delete_state =
1043 (cso_state_callback) ctx->pipe->delete_sampler_state;
1044 cso->context = ctx->pipe;
1045
1046 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1047 if (cso_hash_iter_is_null(iter)) {
1048 FREE(cso);
1049 return PIPE_ERROR_OUT_OF_MEMORY;
1050 }
1051
1052 handle = cso->data;
1053 }
1054 else {
1055 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1056 }
1057 }
1058
1059 info->samplers[idx] = handle;
1060
1061 return PIPE_OK;
1062 }
1063
1064 enum pipe_error
1065 cso_single_sampler(struct cso_context *ctx,
1066 unsigned shader_stage,
1067 unsigned idx,
1068 const struct pipe_sampler_state *templ)
1069 {
1070 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1071 }
1072
1073
1074
1075 static void
1076 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1077 {
1078 struct sampler_info *info = &ctx->samplers[shader_stage];
1079 unsigned i;
1080
1081 /* find highest non-null sampler */
1082 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1083 if (info->samplers[i - 1] != NULL)
1084 break;
1085 }
1086
1087 info->nr_samplers = i;
1088
1089 if (info->hw.nr_samplers != info->nr_samplers ||
1090 memcmp(info->hw.samplers,
1091 info->samplers,
1092 info->nr_samplers * sizeof(void *)) != 0)
1093 {
1094 memcpy(info->hw.samplers,
1095 info->samplers,
1096 info->nr_samplers * sizeof(void *));
1097
1098 /* set remaining slots/pointers to null */
1099 for (i = info->nr_samplers; i < info->hw.nr_samplers; i++)
1100 info->samplers[i] = NULL;
1101
1102 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1103 MAX2(info->nr_samplers,
1104 info->hw.nr_samplers),
1105 info->samplers);
1106
1107 info->hw.nr_samplers = info->nr_samplers;
1108 }
1109 }
1110
1111 void
1112 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1113 {
1114 single_sampler_done(ctx, shader_stage);
1115 }
1116
1117
1118 /*
1119 * If the function encouters any errors it will return the
1120 * last one. Done to always try to set as many samplers
1121 * as possible.
1122 */
1123 enum pipe_error
1124 cso_set_samplers(struct cso_context *ctx,
1125 unsigned shader_stage,
1126 unsigned nr,
1127 const struct pipe_sampler_state **templates)
1128 {
1129 struct sampler_info *info = &ctx->samplers[shader_stage];
1130 unsigned i;
1131 enum pipe_error temp, error = PIPE_OK;
1132
1133 /* TODO: fastpath
1134 */
1135
1136 for (i = 0; i < nr; i++) {
1137 temp = single_sampler(ctx, info, i, templates[i]);
1138 if (temp != PIPE_OK)
1139 error = temp;
1140 }
1141
1142 for ( ; i < info->nr_samplers; i++) {
1143 temp = single_sampler(ctx, info, i, NULL);
1144 if (temp != PIPE_OK)
1145 error = temp;
1146 }
1147
1148 single_sampler_done(ctx, shader_stage);
1149
1150 return error;
1151 }
1152
1153 void
1154 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1155 {
1156 struct sampler_info *info = &ctx->samplers[shader_stage];
1157 info->nr_samplers_saved = info->nr_samplers;
1158 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1159 }
1160
1161
1162 void
1163 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1164 {
1165 struct sampler_info *info = &ctx->samplers[shader_stage];
1166 info->nr_samplers = info->nr_samplers_saved;
1167 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1168 single_sampler_done(ctx, shader_stage);
1169 }
1170
1171
1172 void
1173 cso_set_sampler_views(struct cso_context *ctx,
1174 unsigned shader_stage,
1175 unsigned count,
1176 struct pipe_sampler_view **views)
1177 {
1178 struct sampler_info *info = &ctx->samplers[shader_stage];
1179 unsigned i;
1180
1181 /* reference new views */
1182 for (i = 0; i < count; i++) {
1183 pipe_sampler_view_reference(&info->views[i], views[i]);
1184 }
1185 /* unref extra old views, if any */
1186 for (; i < info->nr_views; i++) {
1187 pipe_sampler_view_reference(&info->views[i], NULL);
1188 }
1189
1190 info->nr_views = count;
1191
1192 /* bind the new sampler views */
1193 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count,
1194 info->views);
1195 }
1196
1197
1198 void
1199 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1200 {
1201 struct sampler_info *info = &ctx->samplers[shader_stage];
1202 unsigned i;
1203
1204 info->nr_views_saved = info->nr_views;
1205
1206 for (i = 0; i < info->nr_views; i++) {
1207 assert(!info->views_saved[i]);
1208 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1209 }
1210 }
1211
1212
1213 void
1214 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1215 {
1216 struct sampler_info *info = &ctx->samplers[shader_stage];
1217 unsigned i, nr_saved = info->nr_views_saved;
1218 unsigned num;
1219
1220 for (i = 0; i < nr_saved; i++) {
1221 pipe_sampler_view_reference(&info->views[i], NULL);
1222 /* move the reference from one pointer to another */
1223 info->views[i] = info->views_saved[i];
1224 info->views_saved[i] = NULL;
1225 }
1226 for (; i < info->nr_views; i++) {
1227 pipe_sampler_view_reference(&info->views[i], NULL);
1228 }
1229
1230 num = MAX2(info->nr_views, nr_saved);
1231
1232 /* bind the old/saved sampler views */
1233 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, num, info->views);
1234
1235 info->nr_views = nr_saved;
1236 info->nr_views_saved = 0;
1237 }
1238
1239
1240 void
1241 cso_set_stream_outputs(struct cso_context *ctx,
1242 unsigned num_targets,
1243 struct pipe_stream_output_target **targets,
1244 unsigned append_bitmask)
1245 {
1246 struct pipe_context *pipe = ctx->pipe;
1247 uint i;
1248
1249 if (!ctx->has_streamout) {
1250 assert(num_targets == 0);
1251 return;
1252 }
1253
1254 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1255 /* Nothing to do. */
1256 return;
1257 }
1258
1259 /* reference new targets */
1260 for (i = 0; i < num_targets; i++) {
1261 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1262 }
1263 /* unref extra old targets, if any */
1264 for (; i < ctx->nr_so_targets; i++) {
1265 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1266 }
1267
1268 pipe->set_stream_output_targets(pipe, num_targets, targets,
1269 append_bitmask);
1270 ctx->nr_so_targets = num_targets;
1271 }
1272
1273 void
1274 cso_save_stream_outputs(struct cso_context *ctx)
1275 {
1276 uint i;
1277
1278 if (!ctx->has_streamout) {
1279 return;
1280 }
1281
1282 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1283
1284 for (i = 0; i < ctx->nr_so_targets; i++) {
1285 assert(!ctx->so_targets_saved[i]);
1286 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1287 }
1288 }
1289
1290 void
1291 cso_restore_stream_outputs(struct cso_context *ctx)
1292 {
1293 struct pipe_context *pipe = ctx->pipe;
1294 uint i;
1295
1296 if (!ctx->has_streamout) {
1297 return;
1298 }
1299
1300 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1301 /* Nothing to do. */
1302 return;
1303 }
1304
1305 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1306 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1307 /* move the reference from one pointer to another */
1308 ctx->so_targets[i] = ctx->so_targets_saved[i];
1309 ctx->so_targets_saved[i] = NULL;
1310 }
1311 for (; i < ctx->nr_so_targets; i++) {
1312 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1313 }
1314
1315 /* ~0 means append */
1316 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1317 ctx->so_targets, ~0);
1318
1319 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1320 ctx->nr_so_targets_saved = 0;
1321 }
1322
1323 /* constant buffers */
1324
1325 void
1326 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1327 unsigned index, struct pipe_constant_buffer *cb)
1328 {
1329 struct pipe_context *pipe = cso->pipe;
1330
1331 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1332
1333 if (index == 0) {
1334 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1335 }
1336 }
1337
1338 void
1339 cso_set_constant_buffer_resource(struct cso_context *cso,
1340 unsigned shader_stage,
1341 unsigned index,
1342 struct pipe_resource *buffer)
1343 {
1344 if (buffer) {
1345 struct pipe_constant_buffer cb;
1346 cb.buffer = buffer;
1347 cb.buffer_offset = 0;
1348 cb.buffer_size = buffer->width0;
1349 cb.user_buffer = NULL;
1350 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1351 } else {
1352 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1353 }
1354 }
1355
1356 void
1357 cso_save_constant_buffer_slot0(struct cso_context *cso,
1358 unsigned shader_stage)
1359 {
1360 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1361 &cso->aux_constbuf_current[shader_stage]);
1362 }
1363
1364 void
1365 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1366 unsigned shader_stage)
1367 {
1368 cso_set_constant_buffer(cso, shader_stage, 0,
1369 &cso->aux_constbuf_saved[shader_stage]);
1370 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1371 NULL);
1372 }
1373
1374 /* drawing */
1375
1376 void
1377 cso_set_index_buffer(struct cso_context *cso,
1378 const struct pipe_index_buffer *ib)
1379 {
1380 struct u_vbuf *vbuf = cso->vbuf;
1381
1382 if (vbuf) {
1383 u_vbuf_set_index_buffer(vbuf, ib);
1384 } else {
1385 struct pipe_context *pipe = cso->pipe;
1386 pipe->set_index_buffer(pipe, ib);
1387 }
1388 }
1389
1390 void
1391 cso_draw_vbo(struct cso_context *cso,
1392 const struct pipe_draw_info *info)
1393 {
1394 struct u_vbuf *vbuf = cso->vbuf;
1395
1396 if (vbuf) {
1397 u_vbuf_draw_vbo(vbuf, info);
1398 } else {
1399 struct pipe_context *pipe = cso->pipe;
1400 pipe->draw_vbo(pipe, info);
1401 }
1402 }
1403
1404 void
1405 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1406 {
1407 struct pipe_draw_info info;
1408
1409 util_draw_init_info(&info);
1410
1411 info.mode = mode;
1412 info.start = start;
1413 info.count = count;
1414 info.min_index = start;
1415 info.max_index = start + count - 1;
1416
1417 cso_draw_vbo(cso, &info);
1418 }
1419
1420 void
1421 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1422 uint start, uint count,
1423 uint start_instance, uint instance_count)
1424 {
1425 struct pipe_draw_info info;
1426
1427 util_draw_init_info(&info);
1428
1429 info.mode = mode;
1430 info.start = start;
1431 info.count = count;
1432 info.min_index = start;
1433 info.max_index = start + count - 1;
1434 info.start_instance = start_instance;
1435 info.instance_count = instance_count;
1436
1437 cso_draw_vbo(cso, &info);
1438 }