u_vbuf: Simplify the format fallback translation.
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 struct pipe_vertex_buffer aux_vertex_buffer_current;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved;
91 unsigned aux_vertex_buffer_index;
92
93 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
94 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *velements, *velements_saved;
112 struct pipe_query *render_condition, *render_condition_saved;
113 uint render_condition_mode, render_condition_mode_saved;
114 boolean render_condition_cond, render_condition_cond_saved;
115
116 struct pipe_clip_state clip;
117 struct pipe_clip_state clip_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 unsigned min_samples, min_samples_saved;
124 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
125 };
126
127
128 static boolean delete_blend_state(struct cso_context *ctx, void *state)
129 {
130 struct cso_blend *cso = (struct cso_blend *)state;
131
132 if (ctx->blend == cso->data)
133 return FALSE;
134
135 if (cso->delete_state)
136 cso->delete_state(cso->context, cso->data);
137 FREE(state);
138 return TRUE;
139 }
140
141 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
142 {
143 struct cso_depth_stencil_alpha *cso =
144 (struct cso_depth_stencil_alpha *)state;
145
146 if (ctx->depth_stencil == cso->data)
147 return FALSE;
148
149 if (cso->delete_state)
150 cso->delete_state(cso->context, cso->data);
151 FREE(state);
152
153 return TRUE;
154 }
155
156 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
157 {
158 struct cso_sampler *cso = (struct cso_sampler *)state;
159 if (cso->delete_state)
160 cso->delete_state(cso->context, cso->data);
161 FREE(state);
162 return TRUE;
163 }
164
165 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
166 {
167 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
168
169 if (ctx->rasterizer == cso->data)
170 return FALSE;
171 if (cso->delete_state)
172 cso->delete_state(cso->context, cso->data);
173 FREE(state);
174 return TRUE;
175 }
176
177 static boolean delete_vertex_elements(struct cso_context *ctx,
178 void *state)
179 {
180 struct cso_velements *cso = (struct cso_velements *)state;
181
182 if (ctx->velements == cso->data)
183 return FALSE;
184
185 if (cso->delete_state)
186 cso->delete_state(cso->context, cso->data);
187 FREE(state);
188 return TRUE;
189 }
190
191
192 static INLINE boolean delete_cso(struct cso_context *ctx,
193 void *state, enum cso_cache_type type)
194 {
195 switch (type) {
196 case CSO_BLEND:
197 return delete_blend_state(ctx, state);
198 case CSO_SAMPLER:
199 return delete_sampler_state(ctx, state);
200 case CSO_DEPTH_STENCIL_ALPHA:
201 return delete_depth_stencil_state(ctx, state);
202 case CSO_RASTERIZER:
203 return delete_rasterizer_state(ctx, state);
204 case CSO_VELEMENTS:
205 return delete_vertex_elements(ctx, state);
206 default:
207 assert(0);
208 FREE(state);
209 }
210 return FALSE;
211 }
212
213 static INLINE void
214 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
215 int max_size, void *user_data)
216 {
217 struct cso_context *ctx = (struct cso_context *)user_data;
218 /* if we're approach the maximum size, remove fourth of the entries
219 * otherwise every subsequent call will go through the same */
220 int hash_size = cso_hash_size(hash);
221 int max_entries = (max_size > hash_size) ? max_size : hash_size;
222 int to_remove = (max_size < max_entries) * max_entries/4;
223 struct cso_hash_iter iter = cso_hash_first_node(hash);
224 if (hash_size > max_size)
225 to_remove += hash_size - max_size;
226 while (to_remove) {
227 /*remove elements until we're good */
228 /*fixme: currently we pick the nodes to remove at random*/
229 void *cso = cso_hash_iter_data(iter);
230 if (delete_cso(ctx, cso, type)) {
231 iter = cso_hash_erase(hash, iter);
232 --to_remove;
233 } else
234 iter = cso_hash_iter_next(iter);
235 }
236 }
237
238 static void cso_init_vbuf(struct cso_context *cso)
239 {
240 struct u_vbuf_caps caps;
241
242 /* Install u_vbuf if there is anything unsupported. */
243 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
244 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
245 cso->aux_vertex_buffer_index);
246 }
247 }
248
249 struct cso_context *cso_create_context( struct pipe_context *pipe )
250 {
251 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
252 if (ctx == NULL)
253 goto out;
254
255 ctx->cache = cso_cache_create();
256 if (ctx->cache == NULL)
257 goto out;
258 cso_cache_set_sanitize_callback(ctx->cache,
259 sanitize_hash,
260 ctx);
261
262 ctx->pipe = pipe;
263 ctx->sample_mask = ~0;
264
265 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
266
267 cso_init_vbuf(ctx);
268
269 /* Enable for testing: */
270 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
271
272 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
273 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
274 ctx->has_geometry_shader = TRUE;
275 }
276 if (pipe->screen->get_param(pipe->screen,
277 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
278 ctx->has_streamout = TRUE;
279 }
280
281 return ctx;
282
283 out:
284 cso_destroy_context( ctx );
285 return NULL;
286 }
287
288 /**
289 * Prior to context destruction, this function unbinds all state objects.
290 */
291 void cso_release_all( struct cso_context *ctx )
292 {
293 unsigned i, shader;
294
295 if (ctx->pipe) {
296 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
297 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
298
299 {
300 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
301 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
302 struct pipe_screen *scr = ctx->pipe->screen;
303 unsigned sh;
304 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
305 int maxsam = scr->get_shader_param(scr, sh,
306 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
307 int maxview = scr->get_shader_param(scr, sh,
308 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
309 assert(maxsam <= PIPE_MAX_SAMPLERS);
310 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
311 if (maxsam > 0) {
312 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
313 }
314 if (maxview > 0) {
315 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
316 }
317 }
318 }
319
320 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
321 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
322 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
323 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
324
325 if (ctx->has_streamout)
326 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
327 }
328
329 /* free fragment sampler views */
330 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
331 struct sampler_info *info = &ctx->samplers[shader];
332 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
333 pipe_sampler_view_reference(&info->views[i], NULL);
334 pipe_sampler_view_reference(&info->views_saved[i], NULL);
335 }
336 }
337
338 util_unreference_framebuffer_state(&ctx->fb);
339 util_unreference_framebuffer_state(&ctx->fb_saved);
340
341 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
342 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
343
344 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
345 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
346 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
347 }
348
349 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
350 pipe_so_target_reference(&ctx->so_targets[i], NULL);
351 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
352 }
353
354 if (ctx->cache) {
355 cso_cache_delete( ctx->cache );
356 ctx->cache = NULL;
357 }
358 }
359
360
361 /**
362 * Free the CSO context. NOTE: the state tracker should have previously called
363 * cso_release_all().
364 */
365 void cso_destroy_context( struct cso_context *ctx )
366 {
367 if (ctx) {
368 if (ctx->vbuf)
369 u_vbuf_destroy(ctx->vbuf);
370 FREE( ctx );
371 }
372 }
373
374
375 /* Those function will either find the state of the given template
376 * in the cache or they will create a new state from the given
377 * template, insert it in the cache and return it.
378 */
379
380 /*
381 * If the driver returns 0 from the create method then they will assign
382 * the data member of the cso to be the template itself.
383 */
384
385 enum pipe_error cso_set_blend(struct cso_context *ctx,
386 const struct pipe_blend_state *templ)
387 {
388 unsigned key_size, hash_key;
389 struct cso_hash_iter iter;
390 void *handle;
391
392 key_size = templ->independent_blend_enable ?
393 sizeof(struct pipe_blend_state) :
394 (char *)&(templ->rt[1]) - (char *)templ;
395 hash_key = cso_construct_key((void*)templ, key_size);
396 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
397 (void*)templ, key_size);
398
399 if (cso_hash_iter_is_null(iter)) {
400 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
401 if (!cso)
402 return PIPE_ERROR_OUT_OF_MEMORY;
403
404 memset(&cso->state, 0, sizeof cso->state);
405 memcpy(&cso->state, templ, key_size);
406 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
407 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
408 cso->context = ctx->pipe;
409
410 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
411 if (cso_hash_iter_is_null(iter)) {
412 FREE(cso);
413 return PIPE_ERROR_OUT_OF_MEMORY;
414 }
415
416 handle = cso->data;
417 }
418 else {
419 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
420 }
421
422 if (ctx->blend != handle) {
423 ctx->blend = handle;
424 ctx->pipe->bind_blend_state(ctx->pipe, handle);
425 }
426 return PIPE_OK;
427 }
428
429 void cso_save_blend(struct cso_context *ctx)
430 {
431 assert(!ctx->blend_saved);
432 ctx->blend_saved = ctx->blend;
433 }
434
435 void cso_restore_blend(struct cso_context *ctx)
436 {
437 if (ctx->blend != ctx->blend_saved) {
438 ctx->blend = ctx->blend_saved;
439 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
440 }
441 ctx->blend_saved = NULL;
442 }
443
444
445
446 enum pipe_error
447 cso_set_depth_stencil_alpha(struct cso_context *ctx,
448 const struct pipe_depth_stencil_alpha_state *templ)
449 {
450 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
451 unsigned hash_key = cso_construct_key((void*)templ, key_size);
452 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
453 hash_key,
454 CSO_DEPTH_STENCIL_ALPHA,
455 (void*)templ, key_size);
456 void *handle;
457
458 if (cso_hash_iter_is_null(iter)) {
459 struct cso_depth_stencil_alpha *cso =
460 MALLOC(sizeof(struct cso_depth_stencil_alpha));
461 if (!cso)
462 return PIPE_ERROR_OUT_OF_MEMORY;
463
464 memcpy(&cso->state, templ, sizeof(*templ));
465 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
466 &cso->state);
467 cso->delete_state =
468 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
469 cso->context = ctx->pipe;
470
471 iter = cso_insert_state(ctx->cache, hash_key,
472 CSO_DEPTH_STENCIL_ALPHA, cso);
473 if (cso_hash_iter_is_null(iter)) {
474 FREE(cso);
475 return PIPE_ERROR_OUT_OF_MEMORY;
476 }
477
478 handle = cso->data;
479 }
480 else {
481 handle = ((struct cso_depth_stencil_alpha *)
482 cso_hash_iter_data(iter))->data;
483 }
484
485 if (ctx->depth_stencil != handle) {
486 ctx->depth_stencil = handle;
487 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
488 }
489 return PIPE_OK;
490 }
491
492 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
493 {
494 assert(!ctx->depth_stencil_saved);
495 ctx->depth_stencil_saved = ctx->depth_stencil;
496 }
497
498 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
499 {
500 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
501 ctx->depth_stencil = ctx->depth_stencil_saved;
502 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
503 ctx->depth_stencil_saved);
504 }
505 ctx->depth_stencil_saved = NULL;
506 }
507
508
509
510 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
511 const struct pipe_rasterizer_state *templ)
512 {
513 unsigned key_size = sizeof(struct pipe_rasterizer_state);
514 unsigned hash_key = cso_construct_key((void*)templ, key_size);
515 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
516 hash_key,
517 CSO_RASTERIZER,
518 (void*)templ, key_size);
519 void *handle = NULL;
520
521 if (cso_hash_iter_is_null(iter)) {
522 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
523 if (!cso)
524 return PIPE_ERROR_OUT_OF_MEMORY;
525
526 memcpy(&cso->state, templ, sizeof(*templ));
527 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
528 cso->delete_state =
529 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
530 cso->context = ctx->pipe;
531
532 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
533 if (cso_hash_iter_is_null(iter)) {
534 FREE(cso);
535 return PIPE_ERROR_OUT_OF_MEMORY;
536 }
537
538 handle = cso->data;
539 }
540 else {
541 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
542 }
543
544 if (ctx->rasterizer != handle) {
545 ctx->rasterizer = handle;
546 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
547 }
548 return PIPE_OK;
549 }
550
551 void cso_save_rasterizer(struct cso_context *ctx)
552 {
553 assert(!ctx->rasterizer_saved);
554 ctx->rasterizer_saved = ctx->rasterizer;
555 }
556
557 void cso_restore_rasterizer(struct cso_context *ctx)
558 {
559 if (ctx->rasterizer != ctx->rasterizer_saved) {
560 ctx->rasterizer = ctx->rasterizer_saved;
561 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
562 }
563 ctx->rasterizer_saved = NULL;
564 }
565
566
567 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
568 {
569 if (ctx->fragment_shader != handle) {
570 ctx->fragment_shader = handle;
571 ctx->pipe->bind_fs_state(ctx->pipe, handle);
572 }
573 }
574
575 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
576 {
577 if (handle == ctx->fragment_shader) {
578 /* unbind before deleting */
579 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
580 ctx->fragment_shader = NULL;
581 }
582 ctx->pipe->delete_fs_state(ctx->pipe, handle);
583 }
584
585 void cso_save_fragment_shader(struct cso_context *ctx)
586 {
587 assert(!ctx->fragment_shader_saved);
588 ctx->fragment_shader_saved = ctx->fragment_shader;
589 }
590
591 void cso_restore_fragment_shader(struct cso_context *ctx)
592 {
593 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
594 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
595 ctx->fragment_shader = ctx->fragment_shader_saved;
596 }
597 ctx->fragment_shader_saved = NULL;
598 }
599
600
601 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
602 {
603 if (ctx->vertex_shader != handle) {
604 ctx->vertex_shader = handle;
605 ctx->pipe->bind_vs_state(ctx->pipe, handle);
606 }
607 }
608
609 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
610 {
611 if (handle == ctx->vertex_shader) {
612 /* unbind before deleting */
613 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
614 ctx->vertex_shader = NULL;
615 }
616 ctx->pipe->delete_vs_state(ctx->pipe, handle);
617 }
618
619 void cso_save_vertex_shader(struct cso_context *ctx)
620 {
621 assert(!ctx->vertex_shader_saved);
622 ctx->vertex_shader_saved = ctx->vertex_shader;
623 }
624
625 void cso_restore_vertex_shader(struct cso_context *ctx)
626 {
627 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
628 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
629 ctx->vertex_shader = ctx->vertex_shader_saved;
630 }
631 ctx->vertex_shader_saved = NULL;
632 }
633
634
635 void cso_set_framebuffer(struct cso_context *ctx,
636 const struct pipe_framebuffer_state *fb)
637 {
638 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
639 util_copy_framebuffer_state(&ctx->fb, fb);
640 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
641 }
642 }
643
644 void cso_save_framebuffer(struct cso_context *ctx)
645 {
646 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
647 }
648
649 void cso_restore_framebuffer(struct cso_context *ctx)
650 {
651 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
652 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
653 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
654 util_unreference_framebuffer_state(&ctx->fb_saved);
655 }
656 }
657
658
659 void cso_set_viewport(struct cso_context *ctx,
660 const struct pipe_viewport_state *vp)
661 {
662 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
663 ctx->vp = *vp;
664 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
665 }
666 }
667
668 void cso_save_viewport(struct cso_context *ctx)
669 {
670 ctx->vp_saved = ctx->vp;
671 }
672
673
674 void cso_restore_viewport(struct cso_context *ctx)
675 {
676 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
677 ctx->vp = ctx->vp_saved;
678 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
679 }
680 }
681
682
683 void cso_set_blend_color(struct cso_context *ctx,
684 const struct pipe_blend_color *bc)
685 {
686 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
687 ctx->blend_color = *bc;
688 ctx->pipe->set_blend_color(ctx->pipe, bc);
689 }
690 }
691
692 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
693 {
694 if (ctx->sample_mask != sample_mask) {
695 ctx->sample_mask = sample_mask;
696 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
697 }
698 }
699
700 void cso_save_sample_mask(struct cso_context *ctx)
701 {
702 ctx->sample_mask_saved = ctx->sample_mask;
703 }
704
705 void cso_restore_sample_mask(struct cso_context *ctx)
706 {
707 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
708 }
709
710 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
711 {
712 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
713 ctx->min_samples = min_samples;
714 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
715 }
716 }
717
718 void cso_save_min_samples(struct cso_context *ctx)
719 {
720 ctx->min_samples_saved = ctx->min_samples;
721 }
722
723 void cso_restore_min_samples(struct cso_context *ctx)
724 {
725 cso_set_min_samples(ctx, ctx->min_samples_saved);
726 }
727
728 void cso_set_stencil_ref(struct cso_context *ctx,
729 const struct pipe_stencil_ref *sr)
730 {
731 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
732 ctx->stencil_ref = *sr;
733 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
734 }
735 }
736
737 void cso_save_stencil_ref(struct cso_context *ctx)
738 {
739 ctx->stencil_ref_saved = ctx->stencil_ref;
740 }
741
742
743 void cso_restore_stencil_ref(struct cso_context *ctx)
744 {
745 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
746 sizeof(ctx->stencil_ref))) {
747 ctx->stencil_ref = ctx->stencil_ref_saved;
748 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
749 }
750 }
751
752 void cso_set_render_condition(struct cso_context *ctx,
753 struct pipe_query *query,
754 boolean condition, uint mode)
755 {
756 struct pipe_context *pipe = ctx->pipe;
757
758 if (ctx->render_condition != query ||
759 ctx->render_condition_mode != mode ||
760 ctx->render_condition_cond != condition) {
761 pipe->render_condition(pipe, query, condition, mode);
762 ctx->render_condition = query;
763 ctx->render_condition_cond = condition;
764 ctx->render_condition_mode = mode;
765 }
766 }
767
768 void cso_save_render_condition(struct cso_context *ctx)
769 {
770 ctx->render_condition_saved = ctx->render_condition;
771 ctx->render_condition_cond_saved = ctx->render_condition_cond;
772 ctx->render_condition_mode_saved = ctx->render_condition_mode;
773 }
774
775 void cso_restore_render_condition(struct cso_context *ctx)
776 {
777 cso_set_render_condition(ctx, ctx->render_condition_saved,
778 ctx->render_condition_cond_saved,
779 ctx->render_condition_mode_saved);
780 }
781
782 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
783 {
784 assert(ctx->has_geometry_shader || !handle);
785
786 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
787 ctx->geometry_shader = handle;
788 ctx->pipe->bind_gs_state(ctx->pipe, handle);
789 }
790 }
791
792 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
793 {
794 if (handle == ctx->geometry_shader) {
795 /* unbind before deleting */
796 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
797 ctx->geometry_shader = NULL;
798 }
799 ctx->pipe->delete_gs_state(ctx->pipe, handle);
800 }
801
802 void cso_save_geometry_shader(struct cso_context *ctx)
803 {
804 if (!ctx->has_geometry_shader) {
805 return;
806 }
807
808 assert(!ctx->geometry_shader_saved);
809 ctx->geometry_shader_saved = ctx->geometry_shader;
810 }
811
812 void cso_restore_geometry_shader(struct cso_context *ctx)
813 {
814 if (!ctx->has_geometry_shader) {
815 return;
816 }
817
818 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
819 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
820 ctx->geometry_shader = ctx->geometry_shader_saved;
821 }
822 ctx->geometry_shader_saved = NULL;
823 }
824
825 /* clip state */
826
827 static INLINE void
828 clip_state_cpy(struct pipe_clip_state *dst,
829 const struct pipe_clip_state *src)
830 {
831 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
832 }
833
834 static INLINE int
835 clip_state_cmp(const struct pipe_clip_state *a,
836 const struct pipe_clip_state *b)
837 {
838 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
839 }
840
841 void
842 cso_set_clip(struct cso_context *ctx,
843 const struct pipe_clip_state *clip)
844 {
845 if (clip_state_cmp(&ctx->clip, clip)) {
846 clip_state_cpy(&ctx->clip, clip);
847 ctx->pipe->set_clip_state(ctx->pipe, clip);
848 }
849 }
850
851 void
852 cso_save_clip(struct cso_context *ctx)
853 {
854 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
855 }
856
857 void
858 cso_restore_clip(struct cso_context *ctx)
859 {
860 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
861 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
862 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
863 }
864 }
865
866 enum pipe_error
867 cso_set_vertex_elements(struct cso_context *ctx,
868 unsigned count,
869 const struct pipe_vertex_element *states)
870 {
871 struct u_vbuf *vbuf = ctx->vbuf;
872 unsigned key_size, hash_key;
873 struct cso_hash_iter iter;
874 void *handle;
875 struct cso_velems_state velems_state;
876
877 if (vbuf) {
878 u_vbuf_set_vertex_elements(vbuf, count, states);
879 return PIPE_OK;
880 }
881
882 /* Need to include the count into the stored state data too.
883 * Otherwise first few count pipe_vertex_elements could be identical
884 * even if count is different, and there's no guarantee the hash would
885 * be different in that case neither.
886 */
887 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
888 velems_state.count = count;
889 memcpy(velems_state.velems, states,
890 sizeof(struct pipe_vertex_element) * count);
891 hash_key = cso_construct_key((void*)&velems_state, key_size);
892 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
893 (void*)&velems_state, key_size);
894
895 if (cso_hash_iter_is_null(iter)) {
896 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
897 if (!cso)
898 return PIPE_ERROR_OUT_OF_MEMORY;
899
900 memcpy(&cso->state, &velems_state, key_size);
901 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
902 &cso->state.velems[0]);
903 cso->delete_state =
904 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
905 cso->context = ctx->pipe;
906
907 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
908 if (cso_hash_iter_is_null(iter)) {
909 FREE(cso);
910 return PIPE_ERROR_OUT_OF_MEMORY;
911 }
912
913 handle = cso->data;
914 }
915 else {
916 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
917 }
918
919 if (ctx->velements != handle) {
920 ctx->velements = handle;
921 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
922 }
923 return PIPE_OK;
924 }
925
926 void cso_save_vertex_elements(struct cso_context *ctx)
927 {
928 struct u_vbuf *vbuf = ctx->vbuf;
929
930 if (vbuf) {
931 u_vbuf_save_vertex_elements(vbuf);
932 return;
933 }
934
935 assert(!ctx->velements_saved);
936 ctx->velements_saved = ctx->velements;
937 }
938
939 void cso_restore_vertex_elements(struct cso_context *ctx)
940 {
941 struct u_vbuf *vbuf = ctx->vbuf;
942
943 if (vbuf) {
944 u_vbuf_restore_vertex_elements(vbuf);
945 return;
946 }
947
948 if (ctx->velements != ctx->velements_saved) {
949 ctx->velements = ctx->velements_saved;
950 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
951 }
952 ctx->velements_saved = NULL;
953 }
954
955 /* vertex buffers */
956
957 void cso_set_vertex_buffers(struct cso_context *ctx,
958 unsigned start_slot, unsigned count,
959 const struct pipe_vertex_buffer *buffers)
960 {
961 struct u_vbuf *vbuf = ctx->vbuf;
962
963 if (vbuf) {
964 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
965 return;
966 }
967
968 /* Save what's in the auxiliary slot, so that we can save and restore it
969 * for meta ops. */
970 if (start_slot <= ctx->aux_vertex_buffer_index &&
971 start_slot+count > ctx->aux_vertex_buffer_index) {
972 if (buffers) {
973 const struct pipe_vertex_buffer *vb =
974 buffers + (ctx->aux_vertex_buffer_index - start_slot);
975
976 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
977 vb->buffer);
978 memcpy(&ctx->aux_vertex_buffer_current, vb,
979 sizeof(struct pipe_vertex_buffer));
980 }
981 else {
982 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
983 NULL);
984 ctx->aux_vertex_buffer_current.user_buffer = NULL;
985 }
986 }
987
988 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
989 }
990
991 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
992 {
993 struct u_vbuf *vbuf = ctx->vbuf;
994
995 if (vbuf) {
996 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
997 return;
998 }
999
1000 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1001 ctx->aux_vertex_buffer_current.buffer);
1002 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1003 sizeof(struct pipe_vertex_buffer));
1004 }
1005
1006 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1007 {
1008 struct u_vbuf *vbuf = ctx->vbuf;
1009
1010 if (vbuf) {
1011 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1012 return;
1013 }
1014
1015 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1016 &ctx->aux_vertex_buffer_saved);
1017 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1018 }
1019
1020 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1021 {
1022 return ctx->aux_vertex_buffer_index;
1023 }
1024
1025
1026 /**************** fragment/vertex sampler view state *************************/
1027
1028 static enum pipe_error
1029 single_sampler(struct cso_context *ctx,
1030 struct sampler_info *info,
1031 unsigned idx,
1032 const struct pipe_sampler_state *templ)
1033 {
1034 void *handle = NULL;
1035
1036 if (templ != NULL) {
1037 unsigned key_size = sizeof(struct pipe_sampler_state);
1038 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1039 struct cso_hash_iter iter =
1040 cso_find_state_template(ctx->cache,
1041 hash_key, CSO_SAMPLER,
1042 (void *) templ, key_size);
1043
1044 if (cso_hash_iter_is_null(iter)) {
1045 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1046 if (!cso)
1047 return PIPE_ERROR_OUT_OF_MEMORY;
1048
1049 memcpy(&cso->state, templ, sizeof(*templ));
1050 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1051 cso->delete_state =
1052 (cso_state_callback) ctx->pipe->delete_sampler_state;
1053 cso->context = ctx->pipe;
1054
1055 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1056 if (cso_hash_iter_is_null(iter)) {
1057 FREE(cso);
1058 return PIPE_ERROR_OUT_OF_MEMORY;
1059 }
1060
1061 handle = cso->data;
1062 }
1063 else {
1064 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1065 }
1066 }
1067
1068 info->samplers[idx] = handle;
1069
1070 return PIPE_OK;
1071 }
1072
1073 enum pipe_error
1074 cso_single_sampler(struct cso_context *ctx,
1075 unsigned shader_stage,
1076 unsigned idx,
1077 const struct pipe_sampler_state *templ)
1078 {
1079 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1080 }
1081
1082
1083
1084 static void
1085 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1086 {
1087 struct sampler_info *info = &ctx->samplers[shader_stage];
1088 unsigned i;
1089
1090 /* find highest non-null sampler */
1091 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1092 if (info->samplers[i - 1] != NULL)
1093 break;
1094 }
1095
1096 info->nr_samplers = i;
1097
1098 if (info->hw.nr_samplers != info->nr_samplers ||
1099 memcmp(info->hw.samplers,
1100 info->samplers,
1101 info->nr_samplers * sizeof(void *)) != 0)
1102 {
1103 memcpy(info->hw.samplers,
1104 info->samplers,
1105 info->nr_samplers * sizeof(void *));
1106
1107 /* set remaining slots/pointers to null */
1108 for (i = info->nr_samplers; i < info->hw.nr_samplers; i++)
1109 info->samplers[i] = NULL;
1110
1111 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1112 MAX2(info->nr_samplers,
1113 info->hw.nr_samplers),
1114 info->samplers);
1115
1116 info->hw.nr_samplers = info->nr_samplers;
1117 }
1118 }
1119
1120 void
1121 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1122 {
1123 single_sampler_done(ctx, shader_stage);
1124 }
1125
1126
1127 /*
1128 * If the function encouters any errors it will return the
1129 * last one. Done to always try to set as many samplers
1130 * as possible.
1131 */
1132 enum pipe_error
1133 cso_set_samplers(struct cso_context *ctx,
1134 unsigned shader_stage,
1135 unsigned nr,
1136 const struct pipe_sampler_state **templates)
1137 {
1138 struct sampler_info *info = &ctx->samplers[shader_stage];
1139 unsigned i;
1140 enum pipe_error temp, error = PIPE_OK;
1141
1142 /* TODO: fastpath
1143 */
1144
1145 for (i = 0; i < nr; i++) {
1146 temp = single_sampler(ctx, info, i, templates[i]);
1147 if (temp != PIPE_OK)
1148 error = temp;
1149 }
1150
1151 for ( ; i < info->nr_samplers; i++) {
1152 temp = single_sampler(ctx, info, i, NULL);
1153 if (temp != PIPE_OK)
1154 error = temp;
1155 }
1156
1157 single_sampler_done(ctx, shader_stage);
1158
1159 return error;
1160 }
1161
1162 void
1163 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1164 {
1165 struct sampler_info *info = &ctx->samplers[shader_stage];
1166 info->nr_samplers_saved = info->nr_samplers;
1167 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1168 }
1169
1170
1171 void
1172 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1173 {
1174 struct sampler_info *info = &ctx->samplers[shader_stage];
1175 info->nr_samplers = info->nr_samplers_saved;
1176 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1177 single_sampler_done(ctx, shader_stage);
1178 }
1179
1180
1181 void
1182 cso_set_sampler_views(struct cso_context *ctx,
1183 unsigned shader_stage,
1184 unsigned count,
1185 struct pipe_sampler_view **views)
1186 {
1187 struct sampler_info *info = &ctx->samplers[shader_stage];
1188 unsigned i;
1189 boolean any_change = FALSE;
1190
1191 /* reference new views */
1192 for (i = 0; i < count; i++) {
1193 any_change |= info->views[i] != views[i];
1194 pipe_sampler_view_reference(&info->views[i], views[i]);
1195 }
1196 /* unref extra old views, if any */
1197 for (; i < info->nr_views; i++) {
1198 any_change |= info->views[i] != NULL;
1199 pipe_sampler_view_reference(&info->views[i], NULL);
1200 }
1201
1202 /* bind the new sampler views */
1203 if (any_change) {
1204 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1205 MAX2(info->nr_views, count),
1206 info->views);
1207 }
1208
1209 info->nr_views = count;
1210 }
1211
1212
1213 void
1214 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1215 {
1216 struct sampler_info *info = &ctx->samplers[shader_stage];
1217 unsigned i;
1218
1219 info->nr_views_saved = info->nr_views;
1220
1221 for (i = 0; i < info->nr_views; i++) {
1222 assert(!info->views_saved[i]);
1223 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1224 }
1225 }
1226
1227
1228 void
1229 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1230 {
1231 struct sampler_info *info = &ctx->samplers[shader_stage];
1232 unsigned i, nr_saved = info->nr_views_saved;
1233 unsigned num;
1234
1235 for (i = 0; i < nr_saved; i++) {
1236 pipe_sampler_view_reference(&info->views[i], NULL);
1237 /* move the reference from one pointer to another */
1238 info->views[i] = info->views_saved[i];
1239 info->views_saved[i] = NULL;
1240 }
1241 for (; i < info->nr_views; i++) {
1242 pipe_sampler_view_reference(&info->views[i], NULL);
1243 }
1244
1245 num = MAX2(info->nr_views, nr_saved);
1246
1247 /* bind the old/saved sampler views */
1248 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, num, info->views);
1249
1250 info->nr_views = nr_saved;
1251 info->nr_views_saved = 0;
1252 }
1253
1254
1255 void
1256 cso_set_stream_outputs(struct cso_context *ctx,
1257 unsigned num_targets,
1258 struct pipe_stream_output_target **targets,
1259 const unsigned *offsets)
1260 {
1261 struct pipe_context *pipe = ctx->pipe;
1262 uint i;
1263
1264 if (!ctx->has_streamout) {
1265 assert(num_targets == 0);
1266 return;
1267 }
1268
1269 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1270 /* Nothing to do. */
1271 return;
1272 }
1273
1274 /* reference new targets */
1275 for (i = 0; i < num_targets; i++) {
1276 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1277 }
1278 /* unref extra old targets, if any */
1279 for (; i < ctx->nr_so_targets; i++) {
1280 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1281 }
1282
1283 pipe->set_stream_output_targets(pipe, num_targets, targets,
1284 offsets);
1285 ctx->nr_so_targets = num_targets;
1286 }
1287
1288 void
1289 cso_save_stream_outputs(struct cso_context *ctx)
1290 {
1291 uint i;
1292
1293 if (!ctx->has_streamout) {
1294 return;
1295 }
1296
1297 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1298
1299 for (i = 0; i < ctx->nr_so_targets; i++) {
1300 assert(!ctx->so_targets_saved[i]);
1301 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1302 }
1303 }
1304
1305 void
1306 cso_restore_stream_outputs(struct cso_context *ctx)
1307 {
1308 struct pipe_context *pipe = ctx->pipe;
1309 uint i;
1310 unsigned offset[PIPE_MAX_SO_BUFFERS];
1311
1312 if (!ctx->has_streamout) {
1313 return;
1314 }
1315
1316 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1317 /* Nothing to do. */
1318 return;
1319 }
1320
1321 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1322 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1323 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1324 /* move the reference from one pointer to another */
1325 ctx->so_targets[i] = ctx->so_targets_saved[i];
1326 ctx->so_targets_saved[i] = NULL;
1327 /* -1 means append */
1328 offset[i] = (unsigned)-1;
1329 }
1330 for (; i < ctx->nr_so_targets; i++) {
1331 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1332 }
1333
1334 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1335 ctx->so_targets, offset);
1336
1337 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1338 ctx->nr_so_targets_saved = 0;
1339 }
1340
1341 /* constant buffers */
1342
1343 void
1344 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1345 unsigned index, struct pipe_constant_buffer *cb)
1346 {
1347 struct pipe_context *pipe = cso->pipe;
1348
1349 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1350
1351 if (index == 0) {
1352 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1353 }
1354 }
1355
1356 void
1357 cso_set_constant_buffer_resource(struct cso_context *cso,
1358 unsigned shader_stage,
1359 unsigned index,
1360 struct pipe_resource *buffer)
1361 {
1362 if (buffer) {
1363 struct pipe_constant_buffer cb;
1364 cb.buffer = buffer;
1365 cb.buffer_offset = 0;
1366 cb.buffer_size = buffer->width0;
1367 cb.user_buffer = NULL;
1368 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1369 } else {
1370 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1371 }
1372 }
1373
1374 void
1375 cso_save_constant_buffer_slot0(struct cso_context *cso,
1376 unsigned shader_stage)
1377 {
1378 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1379 &cso->aux_constbuf_current[shader_stage]);
1380 }
1381
1382 void
1383 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1384 unsigned shader_stage)
1385 {
1386 cso_set_constant_buffer(cso, shader_stage, 0,
1387 &cso->aux_constbuf_saved[shader_stage]);
1388 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1389 NULL);
1390 }
1391
1392 /* drawing */
1393
1394 void
1395 cso_set_index_buffer(struct cso_context *cso,
1396 const struct pipe_index_buffer *ib)
1397 {
1398 struct u_vbuf *vbuf = cso->vbuf;
1399
1400 if (vbuf) {
1401 u_vbuf_set_index_buffer(vbuf, ib);
1402 } else {
1403 struct pipe_context *pipe = cso->pipe;
1404 pipe->set_index_buffer(pipe, ib);
1405 }
1406 }
1407
1408 void
1409 cso_draw_vbo(struct cso_context *cso,
1410 const struct pipe_draw_info *info)
1411 {
1412 struct u_vbuf *vbuf = cso->vbuf;
1413
1414 if (vbuf) {
1415 u_vbuf_draw_vbo(vbuf, info);
1416 } else {
1417 struct pipe_context *pipe = cso->pipe;
1418 pipe->draw_vbo(pipe, info);
1419 }
1420 }
1421
1422 void
1423 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1424 {
1425 struct pipe_draw_info info;
1426
1427 util_draw_init_info(&info);
1428
1429 info.mode = mode;
1430 info.start = start;
1431 info.count = count;
1432 info.min_index = start;
1433 info.max_index = start + count - 1;
1434
1435 cso_draw_vbo(cso, &info);
1436 }
1437
1438 void
1439 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1440 uint start, uint count,
1441 uint start_instance, uint instance_count)
1442 {
1443 struct pipe_draw_info info;
1444
1445 util_draw_init_info(&info);
1446
1447 info.mode = mode;
1448 info.start = start;
1449 info.count = count;
1450 info.min_index = start;
1451 info.max_index = start + count - 1;
1452 info.start_instance = start_instance;
1453 info.instance_count = instance_count;
1454
1455 cso_draw_vbo(cso, &info);
1456 }