cso: minor comment fix
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 struct pipe_vertex_buffer aux_vertex_buffer_current;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved;
91 unsigned aux_vertex_buffer_index;
92
93 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
94 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *velements, *velements_saved;
112 struct pipe_query *render_condition, *render_condition_saved;
113 uint render_condition_mode, render_condition_mode_saved;
114 boolean render_condition_cond, render_condition_cond_saved;
115
116 struct pipe_clip_state clip;
117 struct pipe_clip_state clip_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 unsigned min_samples, min_samples_saved;
124 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
125 };
126
127
128 static boolean delete_blend_state(struct cso_context *ctx, void *state)
129 {
130 struct cso_blend *cso = (struct cso_blend *)state;
131
132 if (ctx->blend == cso->data)
133 return FALSE;
134
135 if (cso->delete_state)
136 cso->delete_state(cso->context, cso->data);
137 FREE(state);
138 return TRUE;
139 }
140
141 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
142 {
143 struct cso_depth_stencil_alpha *cso =
144 (struct cso_depth_stencil_alpha *)state;
145
146 if (ctx->depth_stencil == cso->data)
147 return FALSE;
148
149 if (cso->delete_state)
150 cso->delete_state(cso->context, cso->data);
151 FREE(state);
152
153 return TRUE;
154 }
155
156 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
157 {
158 struct cso_sampler *cso = (struct cso_sampler *)state;
159 if (cso->delete_state)
160 cso->delete_state(cso->context, cso->data);
161 FREE(state);
162 return TRUE;
163 }
164
165 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
166 {
167 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
168
169 if (ctx->rasterizer == cso->data)
170 return FALSE;
171 if (cso->delete_state)
172 cso->delete_state(cso->context, cso->data);
173 FREE(state);
174 return TRUE;
175 }
176
177 static boolean delete_vertex_elements(struct cso_context *ctx,
178 void *state)
179 {
180 struct cso_velements *cso = (struct cso_velements *)state;
181
182 if (ctx->velements == cso->data)
183 return FALSE;
184
185 if (cso->delete_state)
186 cso->delete_state(cso->context, cso->data);
187 FREE(state);
188 return TRUE;
189 }
190
191
192 static INLINE boolean delete_cso(struct cso_context *ctx,
193 void *state, enum cso_cache_type type)
194 {
195 switch (type) {
196 case CSO_BLEND:
197 return delete_blend_state(ctx, state);
198 case CSO_SAMPLER:
199 return delete_sampler_state(ctx, state);
200 case CSO_DEPTH_STENCIL_ALPHA:
201 return delete_depth_stencil_state(ctx, state);
202 case CSO_RASTERIZER:
203 return delete_rasterizer_state(ctx, state);
204 case CSO_VELEMENTS:
205 return delete_vertex_elements(ctx, state);
206 default:
207 assert(0);
208 FREE(state);
209 }
210 return FALSE;
211 }
212
213 static INLINE void
214 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
215 int max_size, void *user_data)
216 {
217 struct cso_context *ctx = (struct cso_context *)user_data;
218 /* if we're approach the maximum size, remove fourth of the entries
219 * otherwise every subsequent call will go through the same */
220 int hash_size = cso_hash_size(hash);
221 int max_entries = (max_size > hash_size) ? max_size : hash_size;
222 int to_remove = (max_size < max_entries) * max_entries/4;
223 struct cso_hash_iter iter = cso_hash_first_node(hash);
224 if (hash_size > max_size)
225 to_remove += hash_size - max_size;
226 while (to_remove) {
227 /*remove elements until we're good */
228 /*fixme: currently we pick the nodes to remove at random*/
229 void *cso = cso_hash_iter_data(iter);
230 if (delete_cso(ctx, cso, type)) {
231 iter = cso_hash_erase(hash, iter);
232 --to_remove;
233 } else
234 iter = cso_hash_iter_next(iter);
235 }
236 }
237
238 static void cso_init_vbuf(struct cso_context *cso)
239 {
240 struct u_vbuf_caps caps;
241
242 /* Install u_vbuf if there is anything unsupported. */
243 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
244 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
245 cso->aux_vertex_buffer_index);
246 }
247 }
248
249 struct cso_context *cso_create_context( struct pipe_context *pipe )
250 {
251 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
252 if (ctx == NULL)
253 goto out;
254
255 ctx->cache = cso_cache_create();
256 if (ctx->cache == NULL)
257 goto out;
258 cso_cache_set_sanitize_callback(ctx->cache,
259 sanitize_hash,
260 ctx);
261
262 ctx->pipe = pipe;
263 ctx->sample_mask = ~0;
264
265 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
266
267 cso_init_vbuf(ctx);
268
269 /* Enable for testing: */
270 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
271
272 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
273 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
274 ctx->has_geometry_shader = TRUE;
275 }
276 if (pipe->screen->get_param(pipe->screen,
277 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
278 ctx->has_streamout = TRUE;
279 }
280
281 return ctx;
282
283 out:
284 cso_destroy_context( ctx );
285 return NULL;
286 }
287
288 /**
289 * Free the CSO context.
290 */
291 void cso_destroy_context( struct cso_context *ctx )
292 {
293 unsigned i, shader;
294
295 if (ctx->pipe) {
296 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
297 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
298
299 {
300 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
301 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
302 struct pipe_screen *scr = ctx->pipe->screen;
303 unsigned sh;
304 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
305 int maxsam = scr->get_shader_param(scr, sh,
306 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
307 int maxview = scr->get_shader_param(scr, sh,
308 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
309 assert(maxsam <= PIPE_MAX_SAMPLERS);
310 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
311 if (maxsam > 0) {
312 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
313 }
314 if (maxview > 0) {
315 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
316 }
317 }
318 }
319
320 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
321 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
322 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
323 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
324
325 if (ctx->has_streamout)
326 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
327 }
328
329 /* free sampler views for each shader stage */
330 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
331 struct sampler_info *info = &ctx->samplers[shader];
332 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
333 pipe_sampler_view_reference(&info->views[i], NULL);
334 pipe_sampler_view_reference(&info->views_saved[i], NULL);
335 }
336 }
337
338 util_unreference_framebuffer_state(&ctx->fb);
339 util_unreference_framebuffer_state(&ctx->fb_saved);
340
341 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
342 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
343
344 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
345 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
346 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
347 }
348
349 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
350 pipe_so_target_reference(&ctx->so_targets[i], NULL);
351 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
352 }
353
354 if (ctx->cache) {
355 cso_cache_delete( ctx->cache );
356 ctx->cache = NULL;
357 }
358
359 if (ctx->vbuf)
360 u_vbuf_destroy(ctx->vbuf);
361 FREE( ctx );
362 }
363
364
365 /* Those function will either find the state of the given template
366 * in the cache or they will create a new state from the given
367 * template, insert it in the cache and return it.
368 */
369
370 /*
371 * If the driver returns 0 from the create method then they will assign
372 * the data member of the cso to be the template itself.
373 */
374
375 enum pipe_error cso_set_blend(struct cso_context *ctx,
376 const struct pipe_blend_state *templ)
377 {
378 unsigned key_size, hash_key;
379 struct cso_hash_iter iter;
380 void *handle;
381
382 key_size = templ->independent_blend_enable ?
383 sizeof(struct pipe_blend_state) :
384 (char *)&(templ->rt[1]) - (char *)templ;
385 hash_key = cso_construct_key((void*)templ, key_size);
386 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
387 (void*)templ, key_size);
388
389 if (cso_hash_iter_is_null(iter)) {
390 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
391 if (!cso)
392 return PIPE_ERROR_OUT_OF_MEMORY;
393
394 memset(&cso->state, 0, sizeof cso->state);
395 memcpy(&cso->state, templ, key_size);
396 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
397 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
398 cso->context = ctx->pipe;
399
400 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
401 if (cso_hash_iter_is_null(iter)) {
402 FREE(cso);
403 return PIPE_ERROR_OUT_OF_MEMORY;
404 }
405
406 handle = cso->data;
407 }
408 else {
409 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
410 }
411
412 if (ctx->blend != handle) {
413 ctx->blend = handle;
414 ctx->pipe->bind_blend_state(ctx->pipe, handle);
415 }
416 return PIPE_OK;
417 }
418
419 void cso_save_blend(struct cso_context *ctx)
420 {
421 assert(!ctx->blend_saved);
422 ctx->blend_saved = ctx->blend;
423 }
424
425 void cso_restore_blend(struct cso_context *ctx)
426 {
427 if (ctx->blend != ctx->blend_saved) {
428 ctx->blend = ctx->blend_saved;
429 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
430 }
431 ctx->blend_saved = NULL;
432 }
433
434
435
436 enum pipe_error
437 cso_set_depth_stencil_alpha(struct cso_context *ctx,
438 const struct pipe_depth_stencil_alpha_state *templ)
439 {
440 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
441 unsigned hash_key = cso_construct_key((void*)templ, key_size);
442 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
443 hash_key,
444 CSO_DEPTH_STENCIL_ALPHA,
445 (void*)templ, key_size);
446 void *handle;
447
448 if (cso_hash_iter_is_null(iter)) {
449 struct cso_depth_stencil_alpha *cso =
450 MALLOC(sizeof(struct cso_depth_stencil_alpha));
451 if (!cso)
452 return PIPE_ERROR_OUT_OF_MEMORY;
453
454 memcpy(&cso->state, templ, sizeof(*templ));
455 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
456 &cso->state);
457 cso->delete_state =
458 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
459 cso->context = ctx->pipe;
460
461 iter = cso_insert_state(ctx->cache, hash_key,
462 CSO_DEPTH_STENCIL_ALPHA, cso);
463 if (cso_hash_iter_is_null(iter)) {
464 FREE(cso);
465 return PIPE_ERROR_OUT_OF_MEMORY;
466 }
467
468 handle = cso->data;
469 }
470 else {
471 handle = ((struct cso_depth_stencil_alpha *)
472 cso_hash_iter_data(iter))->data;
473 }
474
475 if (ctx->depth_stencil != handle) {
476 ctx->depth_stencil = handle;
477 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
478 }
479 return PIPE_OK;
480 }
481
482 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
483 {
484 assert(!ctx->depth_stencil_saved);
485 ctx->depth_stencil_saved = ctx->depth_stencil;
486 }
487
488 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
489 {
490 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
491 ctx->depth_stencil = ctx->depth_stencil_saved;
492 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
493 ctx->depth_stencil_saved);
494 }
495 ctx->depth_stencil_saved = NULL;
496 }
497
498
499
500 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
501 const struct pipe_rasterizer_state *templ)
502 {
503 unsigned key_size = sizeof(struct pipe_rasterizer_state);
504 unsigned hash_key = cso_construct_key((void*)templ, key_size);
505 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
506 hash_key,
507 CSO_RASTERIZER,
508 (void*)templ, key_size);
509 void *handle = NULL;
510
511 if (cso_hash_iter_is_null(iter)) {
512 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
513 if (!cso)
514 return PIPE_ERROR_OUT_OF_MEMORY;
515
516 memcpy(&cso->state, templ, sizeof(*templ));
517 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
518 cso->delete_state =
519 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
520 cso->context = ctx->pipe;
521
522 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
523 if (cso_hash_iter_is_null(iter)) {
524 FREE(cso);
525 return PIPE_ERROR_OUT_OF_MEMORY;
526 }
527
528 handle = cso->data;
529 }
530 else {
531 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
532 }
533
534 if (ctx->rasterizer != handle) {
535 ctx->rasterizer = handle;
536 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
537 }
538 return PIPE_OK;
539 }
540
541 void cso_save_rasterizer(struct cso_context *ctx)
542 {
543 assert(!ctx->rasterizer_saved);
544 ctx->rasterizer_saved = ctx->rasterizer;
545 }
546
547 void cso_restore_rasterizer(struct cso_context *ctx)
548 {
549 if (ctx->rasterizer != ctx->rasterizer_saved) {
550 ctx->rasterizer = ctx->rasterizer_saved;
551 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
552 }
553 ctx->rasterizer_saved = NULL;
554 }
555
556
557 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
558 {
559 if (ctx->fragment_shader != handle) {
560 ctx->fragment_shader = handle;
561 ctx->pipe->bind_fs_state(ctx->pipe, handle);
562 }
563 }
564
565 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
566 {
567 if (handle == ctx->fragment_shader) {
568 /* unbind before deleting */
569 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
570 ctx->fragment_shader = NULL;
571 }
572 ctx->pipe->delete_fs_state(ctx->pipe, handle);
573 }
574
575 void cso_save_fragment_shader(struct cso_context *ctx)
576 {
577 assert(!ctx->fragment_shader_saved);
578 ctx->fragment_shader_saved = ctx->fragment_shader;
579 }
580
581 void cso_restore_fragment_shader(struct cso_context *ctx)
582 {
583 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
584 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
585 ctx->fragment_shader = ctx->fragment_shader_saved;
586 }
587 ctx->fragment_shader_saved = NULL;
588 }
589
590
591 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
592 {
593 if (ctx->vertex_shader != handle) {
594 ctx->vertex_shader = handle;
595 ctx->pipe->bind_vs_state(ctx->pipe, handle);
596 }
597 }
598
599 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
600 {
601 if (handle == ctx->vertex_shader) {
602 /* unbind before deleting */
603 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
604 ctx->vertex_shader = NULL;
605 }
606 ctx->pipe->delete_vs_state(ctx->pipe, handle);
607 }
608
609 void cso_save_vertex_shader(struct cso_context *ctx)
610 {
611 assert(!ctx->vertex_shader_saved);
612 ctx->vertex_shader_saved = ctx->vertex_shader;
613 }
614
615 void cso_restore_vertex_shader(struct cso_context *ctx)
616 {
617 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
618 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
619 ctx->vertex_shader = ctx->vertex_shader_saved;
620 }
621 ctx->vertex_shader_saved = NULL;
622 }
623
624
625 void cso_set_framebuffer(struct cso_context *ctx,
626 const struct pipe_framebuffer_state *fb)
627 {
628 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
629 util_copy_framebuffer_state(&ctx->fb, fb);
630 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
631 }
632 }
633
634 void cso_save_framebuffer(struct cso_context *ctx)
635 {
636 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
637 }
638
639 void cso_restore_framebuffer(struct cso_context *ctx)
640 {
641 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
642 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
643 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
644 util_unreference_framebuffer_state(&ctx->fb_saved);
645 }
646 }
647
648
649 void cso_set_viewport(struct cso_context *ctx,
650 const struct pipe_viewport_state *vp)
651 {
652 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
653 ctx->vp = *vp;
654 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
655 }
656 }
657
658 void cso_save_viewport(struct cso_context *ctx)
659 {
660 ctx->vp_saved = ctx->vp;
661 }
662
663
664 void cso_restore_viewport(struct cso_context *ctx)
665 {
666 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
667 ctx->vp = ctx->vp_saved;
668 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
669 }
670 }
671
672
673 void cso_set_blend_color(struct cso_context *ctx,
674 const struct pipe_blend_color *bc)
675 {
676 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
677 ctx->blend_color = *bc;
678 ctx->pipe->set_blend_color(ctx->pipe, bc);
679 }
680 }
681
682 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
683 {
684 if (ctx->sample_mask != sample_mask) {
685 ctx->sample_mask = sample_mask;
686 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
687 }
688 }
689
690 void cso_save_sample_mask(struct cso_context *ctx)
691 {
692 ctx->sample_mask_saved = ctx->sample_mask;
693 }
694
695 void cso_restore_sample_mask(struct cso_context *ctx)
696 {
697 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
698 }
699
700 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
701 {
702 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
703 ctx->min_samples = min_samples;
704 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
705 }
706 }
707
708 void cso_save_min_samples(struct cso_context *ctx)
709 {
710 ctx->min_samples_saved = ctx->min_samples;
711 }
712
713 void cso_restore_min_samples(struct cso_context *ctx)
714 {
715 cso_set_min_samples(ctx, ctx->min_samples_saved);
716 }
717
718 void cso_set_stencil_ref(struct cso_context *ctx,
719 const struct pipe_stencil_ref *sr)
720 {
721 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
722 ctx->stencil_ref = *sr;
723 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
724 }
725 }
726
727 void cso_save_stencil_ref(struct cso_context *ctx)
728 {
729 ctx->stencil_ref_saved = ctx->stencil_ref;
730 }
731
732
733 void cso_restore_stencil_ref(struct cso_context *ctx)
734 {
735 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
736 sizeof(ctx->stencil_ref))) {
737 ctx->stencil_ref = ctx->stencil_ref_saved;
738 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
739 }
740 }
741
742 void cso_set_render_condition(struct cso_context *ctx,
743 struct pipe_query *query,
744 boolean condition, uint mode)
745 {
746 struct pipe_context *pipe = ctx->pipe;
747
748 if (ctx->render_condition != query ||
749 ctx->render_condition_mode != mode ||
750 ctx->render_condition_cond != condition) {
751 pipe->render_condition(pipe, query, condition, mode);
752 ctx->render_condition = query;
753 ctx->render_condition_cond = condition;
754 ctx->render_condition_mode = mode;
755 }
756 }
757
758 void cso_save_render_condition(struct cso_context *ctx)
759 {
760 ctx->render_condition_saved = ctx->render_condition;
761 ctx->render_condition_cond_saved = ctx->render_condition_cond;
762 ctx->render_condition_mode_saved = ctx->render_condition_mode;
763 }
764
765 void cso_restore_render_condition(struct cso_context *ctx)
766 {
767 cso_set_render_condition(ctx, ctx->render_condition_saved,
768 ctx->render_condition_cond_saved,
769 ctx->render_condition_mode_saved);
770 }
771
772 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
773 {
774 assert(ctx->has_geometry_shader || !handle);
775
776 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
777 ctx->geometry_shader = handle;
778 ctx->pipe->bind_gs_state(ctx->pipe, handle);
779 }
780 }
781
782 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
783 {
784 if (handle == ctx->geometry_shader) {
785 /* unbind before deleting */
786 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
787 ctx->geometry_shader = NULL;
788 }
789 ctx->pipe->delete_gs_state(ctx->pipe, handle);
790 }
791
792 void cso_save_geometry_shader(struct cso_context *ctx)
793 {
794 if (!ctx->has_geometry_shader) {
795 return;
796 }
797
798 assert(!ctx->geometry_shader_saved);
799 ctx->geometry_shader_saved = ctx->geometry_shader;
800 }
801
802 void cso_restore_geometry_shader(struct cso_context *ctx)
803 {
804 if (!ctx->has_geometry_shader) {
805 return;
806 }
807
808 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
809 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
810 ctx->geometry_shader = ctx->geometry_shader_saved;
811 }
812 ctx->geometry_shader_saved = NULL;
813 }
814
815 /* clip state */
816
817 static INLINE void
818 clip_state_cpy(struct pipe_clip_state *dst,
819 const struct pipe_clip_state *src)
820 {
821 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
822 }
823
824 static INLINE int
825 clip_state_cmp(const struct pipe_clip_state *a,
826 const struct pipe_clip_state *b)
827 {
828 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
829 }
830
831 void
832 cso_set_clip(struct cso_context *ctx,
833 const struct pipe_clip_state *clip)
834 {
835 if (clip_state_cmp(&ctx->clip, clip)) {
836 clip_state_cpy(&ctx->clip, clip);
837 ctx->pipe->set_clip_state(ctx->pipe, clip);
838 }
839 }
840
841 void
842 cso_save_clip(struct cso_context *ctx)
843 {
844 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
845 }
846
847 void
848 cso_restore_clip(struct cso_context *ctx)
849 {
850 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
851 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
852 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
853 }
854 }
855
856 enum pipe_error
857 cso_set_vertex_elements(struct cso_context *ctx,
858 unsigned count,
859 const struct pipe_vertex_element *states)
860 {
861 struct u_vbuf *vbuf = ctx->vbuf;
862 unsigned key_size, hash_key;
863 struct cso_hash_iter iter;
864 void *handle;
865 struct cso_velems_state velems_state;
866
867 if (vbuf) {
868 u_vbuf_set_vertex_elements(vbuf, count, states);
869 return PIPE_OK;
870 }
871
872 /* Need to include the count into the stored state data too.
873 * Otherwise first few count pipe_vertex_elements could be identical
874 * even if count is different, and there's no guarantee the hash would
875 * be different in that case neither.
876 */
877 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
878 velems_state.count = count;
879 memcpy(velems_state.velems, states,
880 sizeof(struct pipe_vertex_element) * count);
881 hash_key = cso_construct_key((void*)&velems_state, key_size);
882 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
883 (void*)&velems_state, key_size);
884
885 if (cso_hash_iter_is_null(iter)) {
886 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
887 if (!cso)
888 return PIPE_ERROR_OUT_OF_MEMORY;
889
890 memcpy(&cso->state, &velems_state, key_size);
891 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
892 &cso->state.velems[0]);
893 cso->delete_state =
894 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
895 cso->context = ctx->pipe;
896
897 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
898 if (cso_hash_iter_is_null(iter)) {
899 FREE(cso);
900 return PIPE_ERROR_OUT_OF_MEMORY;
901 }
902
903 handle = cso->data;
904 }
905 else {
906 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
907 }
908
909 if (ctx->velements != handle) {
910 ctx->velements = handle;
911 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
912 }
913 return PIPE_OK;
914 }
915
916 void cso_save_vertex_elements(struct cso_context *ctx)
917 {
918 struct u_vbuf *vbuf = ctx->vbuf;
919
920 if (vbuf) {
921 u_vbuf_save_vertex_elements(vbuf);
922 return;
923 }
924
925 assert(!ctx->velements_saved);
926 ctx->velements_saved = ctx->velements;
927 }
928
929 void cso_restore_vertex_elements(struct cso_context *ctx)
930 {
931 struct u_vbuf *vbuf = ctx->vbuf;
932
933 if (vbuf) {
934 u_vbuf_restore_vertex_elements(vbuf);
935 return;
936 }
937
938 if (ctx->velements != ctx->velements_saved) {
939 ctx->velements = ctx->velements_saved;
940 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
941 }
942 ctx->velements_saved = NULL;
943 }
944
945 /* vertex buffers */
946
947 void cso_set_vertex_buffers(struct cso_context *ctx,
948 unsigned start_slot, unsigned count,
949 const struct pipe_vertex_buffer *buffers)
950 {
951 struct u_vbuf *vbuf = ctx->vbuf;
952
953 if (vbuf) {
954 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
955 return;
956 }
957
958 /* Save what's in the auxiliary slot, so that we can save and restore it
959 * for meta ops. */
960 if (start_slot <= ctx->aux_vertex_buffer_index &&
961 start_slot+count > ctx->aux_vertex_buffer_index) {
962 if (buffers) {
963 const struct pipe_vertex_buffer *vb =
964 buffers + (ctx->aux_vertex_buffer_index - start_slot);
965
966 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
967 vb->buffer);
968 memcpy(&ctx->aux_vertex_buffer_current, vb,
969 sizeof(struct pipe_vertex_buffer));
970 }
971 else {
972 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
973 NULL);
974 ctx->aux_vertex_buffer_current.user_buffer = NULL;
975 }
976 }
977
978 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
979 }
980
981 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
982 {
983 struct u_vbuf *vbuf = ctx->vbuf;
984
985 if (vbuf) {
986 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
987 return;
988 }
989
990 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
991 ctx->aux_vertex_buffer_current.buffer);
992 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
993 sizeof(struct pipe_vertex_buffer));
994 }
995
996 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
997 {
998 struct u_vbuf *vbuf = ctx->vbuf;
999
1000 if (vbuf) {
1001 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1002 return;
1003 }
1004
1005 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1006 &ctx->aux_vertex_buffer_saved);
1007 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1008 }
1009
1010 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1011 {
1012 return ctx->aux_vertex_buffer_index;
1013 }
1014
1015
1016 /**************** fragment/vertex sampler view state *************************/
1017
1018 static enum pipe_error
1019 single_sampler(struct cso_context *ctx,
1020 struct sampler_info *info,
1021 unsigned idx,
1022 const struct pipe_sampler_state *templ)
1023 {
1024 void *handle = NULL;
1025
1026 if (templ != NULL) {
1027 unsigned key_size = sizeof(struct pipe_sampler_state);
1028 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1029 struct cso_hash_iter iter =
1030 cso_find_state_template(ctx->cache,
1031 hash_key, CSO_SAMPLER,
1032 (void *) templ, key_size);
1033
1034 if (cso_hash_iter_is_null(iter)) {
1035 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1036 if (!cso)
1037 return PIPE_ERROR_OUT_OF_MEMORY;
1038
1039 memcpy(&cso->state, templ, sizeof(*templ));
1040 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1041 cso->delete_state =
1042 (cso_state_callback) ctx->pipe->delete_sampler_state;
1043 cso->context = ctx->pipe;
1044
1045 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1046 if (cso_hash_iter_is_null(iter)) {
1047 FREE(cso);
1048 return PIPE_ERROR_OUT_OF_MEMORY;
1049 }
1050
1051 handle = cso->data;
1052 }
1053 else {
1054 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1055 }
1056 }
1057
1058 info->samplers[idx] = handle;
1059
1060 return PIPE_OK;
1061 }
1062
1063 enum pipe_error
1064 cso_single_sampler(struct cso_context *ctx,
1065 unsigned shader_stage,
1066 unsigned idx,
1067 const struct pipe_sampler_state *templ)
1068 {
1069 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1070 }
1071
1072
1073
1074 static void
1075 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1076 {
1077 struct sampler_info *info = &ctx->samplers[shader_stage];
1078 unsigned i;
1079
1080 /* find highest non-null sampler */
1081 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1082 if (info->samplers[i - 1] != NULL)
1083 break;
1084 }
1085
1086 info->nr_samplers = i;
1087
1088 if (info->hw.nr_samplers != info->nr_samplers ||
1089 memcmp(info->hw.samplers,
1090 info->samplers,
1091 info->nr_samplers * sizeof(void *)) != 0)
1092 {
1093 memcpy(info->hw.samplers,
1094 info->samplers,
1095 info->nr_samplers * sizeof(void *));
1096
1097 /* set remaining slots/pointers to null */
1098 for (i = info->nr_samplers; i < info->hw.nr_samplers; i++)
1099 info->samplers[i] = NULL;
1100
1101 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1102 MAX2(info->nr_samplers,
1103 info->hw.nr_samplers),
1104 info->samplers);
1105
1106 info->hw.nr_samplers = info->nr_samplers;
1107 }
1108 }
1109
1110 void
1111 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1112 {
1113 single_sampler_done(ctx, shader_stage);
1114 }
1115
1116
1117 /*
1118 * If the function encouters any errors it will return the
1119 * last one. Done to always try to set as many samplers
1120 * as possible.
1121 */
1122 enum pipe_error
1123 cso_set_samplers(struct cso_context *ctx,
1124 unsigned shader_stage,
1125 unsigned nr,
1126 const struct pipe_sampler_state **templates)
1127 {
1128 struct sampler_info *info = &ctx->samplers[shader_stage];
1129 unsigned i;
1130 enum pipe_error temp, error = PIPE_OK;
1131
1132 /* TODO: fastpath
1133 */
1134
1135 for (i = 0; i < nr; i++) {
1136 temp = single_sampler(ctx, info, i, templates[i]);
1137 if (temp != PIPE_OK)
1138 error = temp;
1139 }
1140
1141 for ( ; i < info->nr_samplers; i++) {
1142 temp = single_sampler(ctx, info, i, NULL);
1143 if (temp != PIPE_OK)
1144 error = temp;
1145 }
1146
1147 single_sampler_done(ctx, shader_stage);
1148
1149 return error;
1150 }
1151
1152 void
1153 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1154 {
1155 struct sampler_info *info = &ctx->samplers[shader_stage];
1156 info->nr_samplers_saved = info->nr_samplers;
1157 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1158 }
1159
1160
1161 void
1162 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1163 {
1164 struct sampler_info *info = &ctx->samplers[shader_stage];
1165 info->nr_samplers = info->nr_samplers_saved;
1166 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1167 single_sampler_done(ctx, shader_stage);
1168 }
1169
1170
1171 void
1172 cso_set_sampler_views(struct cso_context *ctx,
1173 unsigned shader_stage,
1174 unsigned count,
1175 struct pipe_sampler_view **views)
1176 {
1177 struct sampler_info *info = &ctx->samplers[shader_stage];
1178 unsigned i;
1179 boolean any_change = FALSE;
1180
1181 /* reference new views */
1182 for (i = 0; i < count; i++) {
1183 any_change |= info->views[i] != views[i];
1184 pipe_sampler_view_reference(&info->views[i], views[i]);
1185 }
1186 /* unref extra old views, if any */
1187 for (; i < info->nr_views; i++) {
1188 any_change |= info->views[i] != NULL;
1189 pipe_sampler_view_reference(&info->views[i], NULL);
1190 }
1191
1192 /* bind the new sampler views */
1193 if (any_change) {
1194 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1195 MAX2(info->nr_views, count),
1196 info->views);
1197 }
1198
1199 info->nr_views = count;
1200 }
1201
1202
1203 void
1204 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1205 {
1206 struct sampler_info *info = &ctx->samplers[shader_stage];
1207 unsigned i;
1208
1209 info->nr_views_saved = info->nr_views;
1210
1211 for (i = 0; i < info->nr_views; i++) {
1212 assert(!info->views_saved[i]);
1213 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1214 }
1215 }
1216
1217
1218 void
1219 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1220 {
1221 struct sampler_info *info = &ctx->samplers[shader_stage];
1222 unsigned i, nr_saved = info->nr_views_saved;
1223 unsigned num;
1224
1225 for (i = 0; i < nr_saved; i++) {
1226 pipe_sampler_view_reference(&info->views[i], NULL);
1227 /* move the reference from one pointer to another */
1228 info->views[i] = info->views_saved[i];
1229 info->views_saved[i] = NULL;
1230 }
1231 for (; i < info->nr_views; i++) {
1232 pipe_sampler_view_reference(&info->views[i], NULL);
1233 }
1234
1235 num = MAX2(info->nr_views, nr_saved);
1236
1237 /* bind the old/saved sampler views */
1238 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, num, info->views);
1239
1240 info->nr_views = nr_saved;
1241 info->nr_views_saved = 0;
1242 }
1243
1244
1245 void
1246 cso_set_stream_outputs(struct cso_context *ctx,
1247 unsigned num_targets,
1248 struct pipe_stream_output_target **targets,
1249 const unsigned *offsets)
1250 {
1251 struct pipe_context *pipe = ctx->pipe;
1252 uint i;
1253
1254 if (!ctx->has_streamout) {
1255 assert(num_targets == 0);
1256 return;
1257 }
1258
1259 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1260 /* Nothing to do. */
1261 return;
1262 }
1263
1264 /* reference new targets */
1265 for (i = 0; i < num_targets; i++) {
1266 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1267 }
1268 /* unref extra old targets, if any */
1269 for (; i < ctx->nr_so_targets; i++) {
1270 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1271 }
1272
1273 pipe->set_stream_output_targets(pipe, num_targets, targets,
1274 offsets);
1275 ctx->nr_so_targets = num_targets;
1276 }
1277
1278 void
1279 cso_save_stream_outputs(struct cso_context *ctx)
1280 {
1281 uint i;
1282
1283 if (!ctx->has_streamout) {
1284 return;
1285 }
1286
1287 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1288
1289 for (i = 0; i < ctx->nr_so_targets; i++) {
1290 assert(!ctx->so_targets_saved[i]);
1291 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1292 }
1293 }
1294
1295 void
1296 cso_restore_stream_outputs(struct cso_context *ctx)
1297 {
1298 struct pipe_context *pipe = ctx->pipe;
1299 uint i;
1300 unsigned offset[PIPE_MAX_SO_BUFFERS];
1301
1302 if (!ctx->has_streamout) {
1303 return;
1304 }
1305
1306 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1307 /* Nothing to do. */
1308 return;
1309 }
1310
1311 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1312 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1313 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1314 /* move the reference from one pointer to another */
1315 ctx->so_targets[i] = ctx->so_targets_saved[i];
1316 ctx->so_targets_saved[i] = NULL;
1317 /* -1 means append */
1318 offset[i] = (unsigned)-1;
1319 }
1320 for (; i < ctx->nr_so_targets; i++) {
1321 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1322 }
1323
1324 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1325 ctx->so_targets, offset);
1326
1327 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1328 ctx->nr_so_targets_saved = 0;
1329 }
1330
1331 /* constant buffers */
1332
1333 void
1334 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1335 unsigned index, struct pipe_constant_buffer *cb)
1336 {
1337 struct pipe_context *pipe = cso->pipe;
1338
1339 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1340
1341 if (index == 0) {
1342 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1343 }
1344 }
1345
1346 void
1347 cso_set_constant_buffer_resource(struct cso_context *cso,
1348 unsigned shader_stage,
1349 unsigned index,
1350 struct pipe_resource *buffer)
1351 {
1352 if (buffer) {
1353 struct pipe_constant_buffer cb;
1354 cb.buffer = buffer;
1355 cb.buffer_offset = 0;
1356 cb.buffer_size = buffer->width0;
1357 cb.user_buffer = NULL;
1358 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1359 } else {
1360 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1361 }
1362 }
1363
1364 void
1365 cso_save_constant_buffer_slot0(struct cso_context *cso,
1366 unsigned shader_stage)
1367 {
1368 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1369 &cso->aux_constbuf_current[shader_stage]);
1370 }
1371
1372 void
1373 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1374 unsigned shader_stage)
1375 {
1376 cso_set_constant_buffer(cso, shader_stage, 0,
1377 &cso->aux_constbuf_saved[shader_stage]);
1378 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1379 NULL);
1380 }
1381
1382 /* drawing */
1383
1384 void
1385 cso_set_index_buffer(struct cso_context *cso,
1386 const struct pipe_index_buffer *ib)
1387 {
1388 struct u_vbuf *vbuf = cso->vbuf;
1389
1390 if (vbuf) {
1391 u_vbuf_set_index_buffer(vbuf, ib);
1392 } else {
1393 struct pipe_context *pipe = cso->pipe;
1394 pipe->set_index_buffer(pipe, ib);
1395 }
1396 }
1397
1398 void
1399 cso_draw_vbo(struct cso_context *cso,
1400 const struct pipe_draw_info *info)
1401 {
1402 struct u_vbuf *vbuf = cso->vbuf;
1403
1404 if (vbuf) {
1405 u_vbuf_draw_vbo(vbuf, info);
1406 } else {
1407 struct pipe_context *pipe = cso->pipe;
1408 pipe->draw_vbo(pipe, info);
1409 }
1410 }
1411
1412 void
1413 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1414 {
1415 struct pipe_draw_info info;
1416
1417 util_draw_init_info(&info);
1418
1419 info.mode = mode;
1420 info.start = start;
1421 info.count = count;
1422 info.min_index = start;
1423 info.max_index = start + count - 1;
1424
1425 cso_draw_vbo(cso, &info);
1426 }
1427
1428 void
1429 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1430 uint start, uint count,
1431 uint start_instance, uint instance_count)
1432 {
1433 struct pipe_draw_info info;
1434
1435 util_draw_init_info(&info);
1436
1437 info.mode = mode;
1438 info.start = start;
1439 info.count = count;
1440 info.min_index = start;
1441 info.max_index = start + count - 1;
1442 info.start_instance = start_instance;
1443 info.instance_count = instance_count;
1444
1445 cso_draw_vbo(cso, &info);
1446 }