gallium: add condition parameter to render_condition
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 struct pipe_vertex_buffer aux_vertex_buffer_current;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved;
91 unsigned aux_vertex_buffer_index;
92
93 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
94 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *velements, *velements_saved;
112 struct pipe_query *render_condition, *render_condition_saved;
113 uint render_condition_mode, render_condition_mode_saved;
114 boolean render_condition_cond, render_condition_cond_saved;
115
116 struct pipe_clip_state clip;
117 struct pipe_clip_state clip_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
124 };
125
126
127 static boolean delete_blend_state(struct cso_context *ctx, void *state)
128 {
129 struct cso_blend *cso = (struct cso_blend *)state;
130
131 if (ctx->blend == cso->data)
132 return FALSE;
133
134 if (cso->delete_state)
135 cso->delete_state(cso->context, cso->data);
136 FREE(state);
137 return TRUE;
138 }
139
140 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
141 {
142 struct cso_depth_stencil_alpha *cso =
143 (struct cso_depth_stencil_alpha *)state;
144
145 if (ctx->depth_stencil == cso->data)
146 return FALSE;
147
148 if (cso->delete_state)
149 cso->delete_state(cso->context, cso->data);
150 FREE(state);
151
152 return TRUE;
153 }
154
155 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
156 {
157 struct cso_sampler *cso = (struct cso_sampler *)state;
158 if (cso->delete_state)
159 cso->delete_state(cso->context, cso->data);
160 FREE(state);
161 return TRUE;
162 }
163
164 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
165 {
166 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
167
168 if (ctx->rasterizer == cso->data)
169 return FALSE;
170 if (cso->delete_state)
171 cso->delete_state(cso->context, cso->data);
172 FREE(state);
173 return TRUE;
174 }
175
176 static boolean delete_vertex_elements(struct cso_context *ctx,
177 void *state)
178 {
179 struct cso_velements *cso = (struct cso_velements *)state;
180
181 if (ctx->velements == cso->data)
182 return FALSE;
183
184 if (cso->delete_state)
185 cso->delete_state(cso->context, cso->data);
186 FREE(state);
187 return TRUE;
188 }
189
190
191 static INLINE boolean delete_cso(struct cso_context *ctx,
192 void *state, enum cso_cache_type type)
193 {
194 switch (type) {
195 case CSO_BLEND:
196 return delete_blend_state(ctx, state);
197 case CSO_SAMPLER:
198 return delete_sampler_state(ctx, state);
199 case CSO_DEPTH_STENCIL_ALPHA:
200 return delete_depth_stencil_state(ctx, state);
201 case CSO_RASTERIZER:
202 return delete_rasterizer_state(ctx, state);
203 case CSO_VELEMENTS:
204 return delete_vertex_elements(ctx, state);
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static INLINE void
213 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
214 int max_size, void *user_data)
215 {
216 struct cso_context *ctx = (struct cso_context *)user_data;
217 /* if we're approach the maximum size, remove fourth of the entries
218 * otherwise every subsequent call will go through the same */
219 int hash_size = cso_hash_size(hash);
220 int max_entries = (max_size > hash_size) ? max_size : hash_size;
221 int to_remove = (max_size < max_entries) * max_entries/4;
222 struct cso_hash_iter iter = cso_hash_first_node(hash);
223 if (hash_size > max_size)
224 to_remove += hash_size - max_size;
225 while (to_remove) {
226 /*remove elements until we're good */
227 /*fixme: currently we pick the nodes to remove at random*/
228 void *cso = cso_hash_iter_data(iter);
229 if (delete_cso(ctx, cso, type)) {
230 iter = cso_hash_erase(hash, iter);
231 --to_remove;
232 } else
233 iter = cso_hash_iter_next(iter);
234 }
235 }
236
237 static void cso_init_vbuf(struct cso_context *cso)
238 {
239 struct u_vbuf_caps caps;
240
241 u_vbuf_get_caps(cso->pipe->screen, &caps);
242
243 /* Install u_vbuf if there is anything unsupported. */
244 if (!caps.buffer_offset_unaligned ||
245 !caps.buffer_stride_unaligned ||
246 !caps.velem_src_offset_unaligned ||
247 !caps.format_fixed32 ||
248 !caps.format_float16 ||
249 !caps.format_float64 ||
250 !caps.format_norm32 ||
251 !caps.format_scaled32 ||
252 !caps.user_vertex_buffers) {
253 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
254 cso->aux_vertex_buffer_index);
255 }
256 }
257
258 struct cso_context *cso_create_context( struct pipe_context *pipe )
259 {
260 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
261 if (ctx == NULL)
262 goto out;
263
264 ctx->cache = cso_cache_create();
265 if (ctx->cache == NULL)
266 goto out;
267 cso_cache_set_sanitize_callback(ctx->cache,
268 sanitize_hash,
269 ctx);
270
271 ctx->pipe = pipe;
272 ctx->sample_mask_saved = ~0;
273
274 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
275
276 cso_init_vbuf(ctx);
277
278 /* Enable for testing: */
279 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
280
281 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
282 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
283 ctx->has_geometry_shader = TRUE;
284 }
285 if (pipe->screen->get_param(pipe->screen,
286 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
287 ctx->has_streamout = TRUE;
288 }
289
290 return ctx;
291
292 out:
293 cso_destroy_context( ctx );
294 return NULL;
295 }
296
297 /**
298 * Prior to context destruction, this function unbinds all state objects.
299 */
300 void cso_release_all( struct cso_context *ctx )
301 {
302 unsigned i, shader;
303
304 if (ctx->pipe) {
305 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
306 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
307 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
308 if (ctx->pipe->bind_vertex_sampler_states)
309 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
310 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
311 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
312 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
313 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
314 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
315 if (ctx->pipe->set_vertex_sampler_views)
316 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
317 if (ctx->pipe->set_stream_output_targets)
318 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
319 }
320
321 /* free fragment samplers, views */
322 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
323 struct sampler_info *info = &ctx->samplers[shader];
324 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
325 pipe_sampler_view_reference(&info->views[i], NULL);
326 pipe_sampler_view_reference(&info->views_saved[i], NULL);
327 }
328 }
329
330 util_unreference_framebuffer_state(&ctx->fb);
331 util_unreference_framebuffer_state(&ctx->fb_saved);
332
333 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
334 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
335
336 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
337 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
338 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
339 }
340
341 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
342 pipe_so_target_reference(&ctx->so_targets[i], NULL);
343 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
344 }
345
346 if (ctx->cache) {
347 cso_cache_delete( ctx->cache );
348 ctx->cache = NULL;
349 }
350 }
351
352
353 /**
354 * Free the CSO context. NOTE: the state tracker should have previously called
355 * cso_release_all().
356 */
357 void cso_destroy_context( struct cso_context *ctx )
358 {
359 if (ctx) {
360 if (ctx->vbuf)
361 u_vbuf_destroy(ctx->vbuf);
362 FREE( ctx );
363 }
364 }
365
366
367 /* Those function will either find the state of the given template
368 * in the cache or they will create a new state from the given
369 * template, insert it in the cache and return it.
370 */
371
372 /*
373 * If the driver returns 0 from the create method then they will assign
374 * the data member of the cso to be the template itself.
375 */
376
377 enum pipe_error cso_set_blend(struct cso_context *ctx,
378 const struct pipe_blend_state *templ)
379 {
380 unsigned key_size, hash_key;
381 struct cso_hash_iter iter;
382 void *handle;
383
384 key_size = templ->independent_blend_enable ?
385 sizeof(struct pipe_blend_state) :
386 (char *)&(templ->rt[1]) - (char *)templ;
387 hash_key = cso_construct_key((void*)templ, key_size);
388 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
389 (void*)templ, key_size);
390
391 if (cso_hash_iter_is_null(iter)) {
392 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
393 if (!cso)
394 return PIPE_ERROR_OUT_OF_MEMORY;
395
396 memset(&cso->state, 0, sizeof cso->state);
397 memcpy(&cso->state, templ, key_size);
398 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
399 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
400 cso->context = ctx->pipe;
401
402 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
403 if (cso_hash_iter_is_null(iter)) {
404 FREE(cso);
405 return PIPE_ERROR_OUT_OF_MEMORY;
406 }
407
408 handle = cso->data;
409 }
410 else {
411 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
412 }
413
414 if (ctx->blend != handle) {
415 ctx->blend = handle;
416 ctx->pipe->bind_blend_state(ctx->pipe, handle);
417 }
418 return PIPE_OK;
419 }
420
421 void cso_save_blend(struct cso_context *ctx)
422 {
423 assert(!ctx->blend_saved);
424 ctx->blend_saved = ctx->blend;
425 }
426
427 void cso_restore_blend(struct cso_context *ctx)
428 {
429 if (ctx->blend != ctx->blend_saved) {
430 ctx->blend = ctx->blend_saved;
431 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
432 }
433 ctx->blend_saved = NULL;
434 }
435
436
437
438 enum pipe_error
439 cso_set_depth_stencil_alpha(struct cso_context *ctx,
440 const struct pipe_depth_stencil_alpha_state *templ)
441 {
442 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
443 unsigned hash_key = cso_construct_key((void*)templ, key_size);
444 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
445 hash_key,
446 CSO_DEPTH_STENCIL_ALPHA,
447 (void*)templ, key_size);
448 void *handle;
449
450 if (cso_hash_iter_is_null(iter)) {
451 struct cso_depth_stencil_alpha *cso =
452 MALLOC(sizeof(struct cso_depth_stencil_alpha));
453 if (!cso)
454 return PIPE_ERROR_OUT_OF_MEMORY;
455
456 memcpy(&cso->state, templ, sizeof(*templ));
457 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
458 &cso->state);
459 cso->delete_state =
460 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
461 cso->context = ctx->pipe;
462
463 iter = cso_insert_state(ctx->cache, hash_key,
464 CSO_DEPTH_STENCIL_ALPHA, cso);
465 if (cso_hash_iter_is_null(iter)) {
466 FREE(cso);
467 return PIPE_ERROR_OUT_OF_MEMORY;
468 }
469
470 handle = cso->data;
471 }
472 else {
473 handle = ((struct cso_depth_stencil_alpha *)
474 cso_hash_iter_data(iter))->data;
475 }
476
477 if (ctx->depth_stencil != handle) {
478 ctx->depth_stencil = handle;
479 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
480 }
481 return PIPE_OK;
482 }
483
484 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
485 {
486 assert(!ctx->depth_stencil_saved);
487 ctx->depth_stencil_saved = ctx->depth_stencil;
488 }
489
490 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
491 {
492 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
493 ctx->depth_stencil = ctx->depth_stencil_saved;
494 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
495 ctx->depth_stencil_saved);
496 }
497 ctx->depth_stencil_saved = NULL;
498 }
499
500
501
502 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
503 const struct pipe_rasterizer_state *templ)
504 {
505 unsigned key_size = sizeof(struct pipe_rasterizer_state);
506 unsigned hash_key = cso_construct_key((void*)templ, key_size);
507 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
508 hash_key,
509 CSO_RASTERIZER,
510 (void*)templ, key_size);
511 void *handle = NULL;
512
513 if (cso_hash_iter_is_null(iter)) {
514 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
515 if (!cso)
516 return PIPE_ERROR_OUT_OF_MEMORY;
517
518 memcpy(&cso->state, templ, sizeof(*templ));
519 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
520 cso->delete_state =
521 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
522 cso->context = ctx->pipe;
523
524 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
525 if (cso_hash_iter_is_null(iter)) {
526 FREE(cso);
527 return PIPE_ERROR_OUT_OF_MEMORY;
528 }
529
530 handle = cso->data;
531 }
532 else {
533 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
534 }
535
536 if (ctx->rasterizer != handle) {
537 ctx->rasterizer = handle;
538 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
539 }
540 return PIPE_OK;
541 }
542
543 void cso_save_rasterizer(struct cso_context *ctx)
544 {
545 assert(!ctx->rasterizer_saved);
546 ctx->rasterizer_saved = ctx->rasterizer;
547 }
548
549 void cso_restore_rasterizer(struct cso_context *ctx)
550 {
551 if (ctx->rasterizer != ctx->rasterizer_saved) {
552 ctx->rasterizer = ctx->rasterizer_saved;
553 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
554 }
555 ctx->rasterizer_saved = NULL;
556 }
557
558
559 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
560 {
561 if (ctx->fragment_shader != handle) {
562 ctx->fragment_shader = handle;
563 ctx->pipe->bind_fs_state(ctx->pipe, handle);
564 }
565 }
566
567 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
568 {
569 if (handle == ctx->fragment_shader) {
570 /* unbind before deleting */
571 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
572 ctx->fragment_shader = NULL;
573 }
574 ctx->pipe->delete_fs_state(ctx->pipe, handle);
575 }
576
577 void cso_save_fragment_shader(struct cso_context *ctx)
578 {
579 assert(!ctx->fragment_shader_saved);
580 ctx->fragment_shader_saved = ctx->fragment_shader;
581 }
582
583 void cso_restore_fragment_shader(struct cso_context *ctx)
584 {
585 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
586 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
587 ctx->fragment_shader = ctx->fragment_shader_saved;
588 }
589 ctx->fragment_shader_saved = NULL;
590 }
591
592
593 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
594 {
595 if (ctx->vertex_shader != handle) {
596 ctx->vertex_shader = handle;
597 ctx->pipe->bind_vs_state(ctx->pipe, handle);
598 }
599 }
600
601 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
602 {
603 if (handle == ctx->vertex_shader) {
604 /* unbind before deleting */
605 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
606 ctx->vertex_shader = NULL;
607 }
608 ctx->pipe->delete_vs_state(ctx->pipe, handle);
609 }
610
611 void cso_save_vertex_shader(struct cso_context *ctx)
612 {
613 assert(!ctx->vertex_shader_saved);
614 ctx->vertex_shader_saved = ctx->vertex_shader;
615 }
616
617 void cso_restore_vertex_shader(struct cso_context *ctx)
618 {
619 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
620 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
621 ctx->vertex_shader = ctx->vertex_shader_saved;
622 }
623 ctx->vertex_shader_saved = NULL;
624 }
625
626
627 void cso_set_framebuffer(struct cso_context *ctx,
628 const struct pipe_framebuffer_state *fb)
629 {
630 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
631 util_copy_framebuffer_state(&ctx->fb, fb);
632 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
633 }
634 }
635
636 void cso_save_framebuffer(struct cso_context *ctx)
637 {
638 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
639 }
640
641 void cso_restore_framebuffer(struct cso_context *ctx)
642 {
643 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
644 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
645 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
646 util_unreference_framebuffer_state(&ctx->fb_saved);
647 }
648 }
649
650
651 void cso_set_viewport(struct cso_context *ctx,
652 const struct pipe_viewport_state *vp)
653 {
654 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
655 ctx->vp = *vp;
656 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
657 }
658 }
659
660 void cso_save_viewport(struct cso_context *ctx)
661 {
662 ctx->vp_saved = ctx->vp;
663 }
664
665
666 void cso_restore_viewport(struct cso_context *ctx)
667 {
668 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
669 ctx->vp = ctx->vp_saved;
670 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
671 }
672 }
673
674
675 void cso_set_blend_color(struct cso_context *ctx,
676 const struct pipe_blend_color *bc)
677 {
678 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
679 ctx->blend_color = *bc;
680 ctx->pipe->set_blend_color(ctx->pipe, bc);
681 }
682 }
683
684 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
685 {
686 if (ctx->sample_mask != sample_mask) {
687 ctx->sample_mask = sample_mask;
688 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
689 }
690 }
691
692 void cso_save_sample_mask(struct cso_context *ctx)
693 {
694 ctx->sample_mask_saved = ctx->sample_mask;
695 }
696
697 void cso_restore_sample_mask(struct cso_context *ctx)
698 {
699 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
700 }
701
702 void cso_set_stencil_ref(struct cso_context *ctx,
703 const struct pipe_stencil_ref *sr)
704 {
705 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
706 ctx->stencil_ref = *sr;
707 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
708 }
709 }
710
711 void cso_save_stencil_ref(struct cso_context *ctx)
712 {
713 ctx->stencil_ref_saved = ctx->stencil_ref;
714 }
715
716
717 void cso_restore_stencil_ref(struct cso_context *ctx)
718 {
719 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
720 sizeof(ctx->stencil_ref))) {
721 ctx->stencil_ref = ctx->stencil_ref_saved;
722 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
723 }
724 }
725
726 void cso_set_render_condition(struct cso_context *ctx,
727 struct pipe_query *query,
728 boolean condition, uint mode)
729 {
730 struct pipe_context *pipe = ctx->pipe;
731
732 if (ctx->render_condition != query ||
733 ctx->render_condition_mode != mode ||
734 ctx->render_condition_cond != condition) {
735 pipe->render_condition(pipe, query, condition, mode);
736 ctx->render_condition = query;
737 ctx->render_condition_cond = condition;
738 ctx->render_condition_mode = mode;
739 }
740 }
741
742 void cso_save_render_condition(struct cso_context *ctx)
743 {
744 ctx->render_condition_saved = ctx->render_condition;
745 ctx->render_condition_cond_saved = ctx->render_condition_cond;
746 ctx->render_condition_mode_saved = ctx->render_condition_mode;
747 }
748
749 void cso_restore_render_condition(struct cso_context *ctx)
750 {
751 cso_set_render_condition(ctx, ctx->render_condition_saved,
752 ctx->render_condition_cond_saved,
753 ctx->render_condition_mode_saved);
754 }
755
756 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
757 {
758 assert(ctx->has_geometry_shader || !handle);
759
760 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
761 ctx->geometry_shader = handle;
762 ctx->pipe->bind_gs_state(ctx->pipe, handle);
763 }
764 }
765
766 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
767 {
768 if (handle == ctx->geometry_shader) {
769 /* unbind before deleting */
770 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
771 ctx->geometry_shader = NULL;
772 }
773 ctx->pipe->delete_gs_state(ctx->pipe, handle);
774 }
775
776 void cso_save_geometry_shader(struct cso_context *ctx)
777 {
778 if (!ctx->has_geometry_shader) {
779 return;
780 }
781
782 assert(!ctx->geometry_shader_saved);
783 ctx->geometry_shader_saved = ctx->geometry_shader;
784 }
785
786 void cso_restore_geometry_shader(struct cso_context *ctx)
787 {
788 if (!ctx->has_geometry_shader) {
789 return;
790 }
791
792 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
793 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
794 ctx->geometry_shader = ctx->geometry_shader_saved;
795 }
796 ctx->geometry_shader_saved = NULL;
797 }
798
799 /* clip state */
800
801 static INLINE void
802 clip_state_cpy(struct pipe_clip_state *dst,
803 const struct pipe_clip_state *src)
804 {
805 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
806 }
807
808 static INLINE int
809 clip_state_cmp(const struct pipe_clip_state *a,
810 const struct pipe_clip_state *b)
811 {
812 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
813 }
814
815 void
816 cso_set_clip(struct cso_context *ctx,
817 const struct pipe_clip_state *clip)
818 {
819 if (clip_state_cmp(&ctx->clip, clip)) {
820 clip_state_cpy(&ctx->clip, clip);
821 ctx->pipe->set_clip_state(ctx->pipe, clip);
822 }
823 }
824
825 void
826 cso_save_clip(struct cso_context *ctx)
827 {
828 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
829 }
830
831 void
832 cso_restore_clip(struct cso_context *ctx)
833 {
834 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
835 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
836 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
837 }
838 }
839
840 enum pipe_error
841 cso_set_vertex_elements(struct cso_context *ctx,
842 unsigned count,
843 const struct pipe_vertex_element *states)
844 {
845 struct u_vbuf *vbuf = ctx->vbuf;
846 unsigned key_size, hash_key;
847 struct cso_hash_iter iter;
848 void *handle;
849 struct cso_velems_state velems_state;
850
851 if (vbuf) {
852 u_vbuf_set_vertex_elements(vbuf, count, states);
853 return PIPE_OK;
854 }
855
856 /* Need to include the count into the stored state data too.
857 * Otherwise first few count pipe_vertex_elements could be identical
858 * even if count is different, and there's no guarantee the hash would
859 * be different in that case neither.
860 */
861 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
862 velems_state.count = count;
863 memcpy(velems_state.velems, states,
864 sizeof(struct pipe_vertex_element) * count);
865 hash_key = cso_construct_key((void*)&velems_state, key_size);
866 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
867 (void*)&velems_state, key_size);
868
869 if (cso_hash_iter_is_null(iter)) {
870 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
871 if (!cso)
872 return PIPE_ERROR_OUT_OF_MEMORY;
873
874 memcpy(&cso->state, &velems_state, key_size);
875 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
876 &cso->state.velems[0]);
877 cso->delete_state =
878 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
879 cso->context = ctx->pipe;
880
881 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
882 if (cso_hash_iter_is_null(iter)) {
883 FREE(cso);
884 return PIPE_ERROR_OUT_OF_MEMORY;
885 }
886
887 handle = cso->data;
888 }
889 else {
890 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
891 }
892
893 if (ctx->velements != handle) {
894 ctx->velements = handle;
895 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
896 }
897 return PIPE_OK;
898 }
899
900 void cso_save_vertex_elements(struct cso_context *ctx)
901 {
902 struct u_vbuf *vbuf = ctx->vbuf;
903
904 if (vbuf) {
905 u_vbuf_save_vertex_elements(vbuf);
906 return;
907 }
908
909 assert(!ctx->velements_saved);
910 ctx->velements_saved = ctx->velements;
911 }
912
913 void cso_restore_vertex_elements(struct cso_context *ctx)
914 {
915 struct u_vbuf *vbuf = ctx->vbuf;
916
917 if (vbuf) {
918 u_vbuf_restore_vertex_elements(vbuf);
919 return;
920 }
921
922 if (ctx->velements != ctx->velements_saved) {
923 ctx->velements = ctx->velements_saved;
924 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
925 }
926 ctx->velements_saved = NULL;
927 }
928
929 /* vertex buffers */
930
931 void cso_set_vertex_buffers(struct cso_context *ctx,
932 unsigned start_slot, unsigned count,
933 const struct pipe_vertex_buffer *buffers)
934 {
935 struct u_vbuf *vbuf = ctx->vbuf;
936
937 if (vbuf) {
938 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
939 return;
940 }
941
942 /* Save what's in the auxiliary slot, so that we can save and restore it
943 * for meta ops. */
944 if (start_slot <= ctx->aux_vertex_buffer_index &&
945 start_slot+count > ctx->aux_vertex_buffer_index) {
946 if (buffers) {
947 const struct pipe_vertex_buffer *vb =
948 buffers + (ctx->aux_vertex_buffer_index - start_slot);
949
950 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
951 vb->buffer);
952 memcpy(&ctx->aux_vertex_buffer_current, vb,
953 sizeof(struct pipe_vertex_buffer));
954 }
955 else {
956 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
957 NULL);
958 ctx->aux_vertex_buffer_current.user_buffer = NULL;
959 }
960 }
961
962 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
963 }
964
965 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
966 {
967 struct u_vbuf *vbuf = ctx->vbuf;
968
969 if (vbuf) {
970 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
971 return;
972 }
973
974 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
975 ctx->aux_vertex_buffer_current.buffer);
976 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
977 sizeof(struct pipe_vertex_buffer));
978 }
979
980 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
981 {
982 struct u_vbuf *vbuf = ctx->vbuf;
983
984 if (vbuf) {
985 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
986 return;
987 }
988
989 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
990 &ctx->aux_vertex_buffer_saved);
991 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
992 }
993
994 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
995 {
996 return ctx->aux_vertex_buffer_index;
997 }
998
999
1000 /**************** fragment/vertex sampler view state *************************/
1001
1002 static enum pipe_error
1003 single_sampler(struct cso_context *ctx,
1004 struct sampler_info *info,
1005 unsigned idx,
1006 const struct pipe_sampler_state *templ)
1007 {
1008 void *handle = NULL;
1009
1010 if (templ != NULL) {
1011 unsigned key_size = sizeof(struct pipe_sampler_state);
1012 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1013 struct cso_hash_iter iter =
1014 cso_find_state_template(ctx->cache,
1015 hash_key, CSO_SAMPLER,
1016 (void *) templ, key_size);
1017
1018 if (cso_hash_iter_is_null(iter)) {
1019 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1020 if (!cso)
1021 return PIPE_ERROR_OUT_OF_MEMORY;
1022
1023 memcpy(&cso->state, templ, sizeof(*templ));
1024 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1025 cso->delete_state =
1026 (cso_state_callback) ctx->pipe->delete_sampler_state;
1027 cso->context = ctx->pipe;
1028
1029 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1030 if (cso_hash_iter_is_null(iter)) {
1031 FREE(cso);
1032 return PIPE_ERROR_OUT_OF_MEMORY;
1033 }
1034
1035 handle = cso->data;
1036 }
1037 else {
1038 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1039 }
1040 }
1041
1042 info->samplers[idx] = handle;
1043
1044 return PIPE_OK;
1045 }
1046
1047 enum pipe_error
1048 cso_single_sampler(struct cso_context *ctx,
1049 unsigned shader_stage,
1050 unsigned idx,
1051 const struct pipe_sampler_state *templ)
1052 {
1053 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1054 }
1055
1056
1057
1058 static void
1059 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1060 {
1061 struct sampler_info *info = &ctx->samplers[shader_stage];
1062 unsigned i;
1063
1064 /* find highest non-null sampler */
1065 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1066 if (info->samplers[i - 1] != NULL)
1067 break;
1068 }
1069
1070 info->nr_samplers = i;
1071
1072 if (info->hw.nr_samplers != info->nr_samplers ||
1073 memcmp(info->hw.samplers,
1074 info->samplers,
1075 info->nr_samplers * sizeof(void *)) != 0)
1076 {
1077 memcpy(info->hw.samplers,
1078 info->samplers,
1079 info->nr_samplers * sizeof(void *));
1080 info->hw.nr_samplers = info->nr_samplers;
1081
1082 switch (shader_stage) {
1083 case PIPE_SHADER_FRAGMENT:
1084 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1085 info->nr_samplers,
1086 info->samplers);
1087 break;
1088 case PIPE_SHADER_VERTEX:
1089 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1090 info->nr_samplers,
1091 info->samplers);
1092 break;
1093 case PIPE_SHADER_GEOMETRY:
1094 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1095 info->nr_samplers,
1096 info->samplers);
1097 break;
1098 default:
1099 assert(!"bad shader type in single_sampler_done()");
1100 }
1101 }
1102 }
1103
1104 void
1105 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1106 {
1107 single_sampler_done(ctx, shader_stage);
1108 }
1109
1110
1111 /*
1112 * If the function encouters any errors it will return the
1113 * last one. Done to always try to set as many samplers
1114 * as possible.
1115 */
1116 enum pipe_error
1117 cso_set_samplers(struct cso_context *ctx,
1118 unsigned shader_stage,
1119 unsigned nr,
1120 const struct pipe_sampler_state **templates)
1121 {
1122 struct sampler_info *info = &ctx->samplers[shader_stage];
1123 unsigned i;
1124 enum pipe_error temp, error = PIPE_OK;
1125
1126 /* TODO: fastpath
1127 */
1128
1129 for (i = 0; i < nr; i++) {
1130 temp = single_sampler(ctx, info, i, templates[i]);
1131 if (temp != PIPE_OK)
1132 error = temp;
1133 }
1134
1135 for ( ; i < info->nr_samplers; i++) {
1136 temp = single_sampler(ctx, info, i, NULL);
1137 if (temp != PIPE_OK)
1138 error = temp;
1139 }
1140
1141 single_sampler_done(ctx, shader_stage);
1142
1143 return error;
1144 }
1145
1146 void
1147 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1148 {
1149 struct sampler_info *info = &ctx->samplers[shader_stage];
1150 info->nr_samplers_saved = info->nr_samplers;
1151 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1152 }
1153
1154
1155 void
1156 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1157 {
1158 struct sampler_info *info = &ctx->samplers[shader_stage];
1159 info->nr_samplers = info->nr_samplers_saved;
1160 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1161 single_sampler_done(ctx, shader_stage);
1162 }
1163
1164
1165 void
1166 cso_set_sampler_views(struct cso_context *ctx,
1167 unsigned shader_stage,
1168 unsigned count,
1169 struct pipe_sampler_view **views)
1170 {
1171 struct sampler_info *info = &ctx->samplers[shader_stage];
1172 unsigned i;
1173
1174 /* reference new views */
1175 for (i = 0; i < count; i++) {
1176 pipe_sampler_view_reference(&info->views[i], views[i]);
1177 }
1178 /* unref extra old views, if any */
1179 for (; i < info->nr_views; i++) {
1180 pipe_sampler_view_reference(&info->views[i], NULL);
1181 }
1182
1183 info->nr_views = count;
1184
1185 /* bind the new sampler views */
1186 switch (shader_stage) {
1187 case PIPE_SHADER_FRAGMENT:
1188 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1189 break;
1190 case PIPE_SHADER_VERTEX:
1191 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1192 break;
1193 case PIPE_SHADER_GEOMETRY:
1194 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1195 break;
1196 default:
1197 assert(!"bad shader type in cso_set_sampler_views()");
1198 }
1199 }
1200
1201
1202 void
1203 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1204 {
1205 struct sampler_info *info = &ctx->samplers[shader_stage];
1206 unsigned i;
1207
1208 info->nr_views_saved = info->nr_views;
1209
1210 for (i = 0; i < info->nr_views; i++) {
1211 assert(!info->views_saved[i]);
1212 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1213 }
1214 }
1215
1216
1217 void
1218 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1219 {
1220 struct sampler_info *info = &ctx->samplers[shader_stage];
1221 unsigned i, nr_saved = info->nr_views_saved;
1222
1223 for (i = 0; i < nr_saved; i++) {
1224 pipe_sampler_view_reference(&info->views[i], NULL);
1225 /* move the reference from one pointer to another */
1226 info->views[i] = info->views_saved[i];
1227 info->views_saved[i] = NULL;
1228 }
1229 for (; i < info->nr_views; i++) {
1230 pipe_sampler_view_reference(&info->views[i], NULL);
1231 }
1232
1233 /* bind the old/saved sampler views */
1234 switch (shader_stage) {
1235 case PIPE_SHADER_FRAGMENT:
1236 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1237 break;
1238 case PIPE_SHADER_VERTEX:
1239 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1240 break;
1241 case PIPE_SHADER_GEOMETRY:
1242 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1243 break;
1244 default:
1245 assert(!"bad shader type in cso_restore_sampler_views()");
1246 }
1247
1248 info->nr_views = nr_saved;
1249 info->nr_views_saved = 0;
1250 }
1251
1252
1253 void
1254 cso_set_stream_outputs(struct cso_context *ctx,
1255 unsigned num_targets,
1256 struct pipe_stream_output_target **targets,
1257 unsigned append_bitmask)
1258 {
1259 struct pipe_context *pipe = ctx->pipe;
1260 uint i;
1261
1262 if (!ctx->has_streamout) {
1263 assert(num_targets == 0);
1264 return;
1265 }
1266
1267 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1268 /* Nothing to do. */
1269 return;
1270 }
1271
1272 /* reference new targets */
1273 for (i = 0; i < num_targets; i++) {
1274 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1275 }
1276 /* unref extra old targets, if any */
1277 for (; i < ctx->nr_so_targets; i++) {
1278 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1279 }
1280
1281 pipe->set_stream_output_targets(pipe, num_targets, targets,
1282 append_bitmask);
1283 ctx->nr_so_targets = num_targets;
1284 }
1285
1286 void
1287 cso_save_stream_outputs(struct cso_context *ctx)
1288 {
1289 uint i;
1290
1291 if (!ctx->has_streamout) {
1292 return;
1293 }
1294
1295 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1296
1297 for (i = 0; i < ctx->nr_so_targets; i++) {
1298 assert(!ctx->so_targets_saved[i]);
1299 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1300 }
1301 }
1302
1303 void
1304 cso_restore_stream_outputs(struct cso_context *ctx)
1305 {
1306 struct pipe_context *pipe = ctx->pipe;
1307 uint i;
1308
1309 if (!ctx->has_streamout) {
1310 return;
1311 }
1312
1313 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1314 /* Nothing to do. */
1315 return;
1316 }
1317
1318 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1319 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1320 /* move the reference from one pointer to another */
1321 ctx->so_targets[i] = ctx->so_targets_saved[i];
1322 ctx->so_targets_saved[i] = NULL;
1323 }
1324 for (; i < ctx->nr_so_targets; i++) {
1325 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1326 }
1327
1328 /* ~0 means append */
1329 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1330 ctx->so_targets, ~0);
1331
1332 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1333 ctx->nr_so_targets_saved = 0;
1334 }
1335
1336 /* constant buffers */
1337
1338 void
1339 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1340 unsigned index, struct pipe_constant_buffer *cb)
1341 {
1342 struct pipe_context *pipe = cso->pipe;
1343
1344 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1345
1346 if (index == 0) {
1347 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1348 }
1349 }
1350
1351 void
1352 cso_set_constant_buffer_resource(struct cso_context *cso,
1353 unsigned shader_stage,
1354 unsigned index,
1355 struct pipe_resource *buffer)
1356 {
1357 if (buffer) {
1358 struct pipe_constant_buffer cb;
1359 cb.buffer = buffer;
1360 cb.buffer_offset = 0;
1361 cb.buffer_size = buffer->width0;
1362 cb.user_buffer = NULL;
1363 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1364 } else {
1365 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1366 }
1367 }
1368
1369 void
1370 cso_save_constant_buffer_slot0(struct cso_context *cso,
1371 unsigned shader_stage)
1372 {
1373 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1374 &cso->aux_constbuf_current[shader_stage]);
1375 }
1376
1377 void
1378 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1379 unsigned shader_stage)
1380 {
1381 cso_set_constant_buffer(cso, shader_stage, 0,
1382 &cso->aux_constbuf_saved[shader_stage]);
1383 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1384 NULL);
1385 }
1386
1387 /* drawing */
1388
1389 void
1390 cso_set_index_buffer(struct cso_context *cso,
1391 const struct pipe_index_buffer *ib)
1392 {
1393 struct u_vbuf *vbuf = cso->vbuf;
1394
1395 if (vbuf) {
1396 u_vbuf_set_index_buffer(vbuf, ib);
1397 } else {
1398 struct pipe_context *pipe = cso->pipe;
1399 pipe->set_index_buffer(pipe, ib);
1400 }
1401 }
1402
1403 void
1404 cso_draw_vbo(struct cso_context *cso,
1405 const struct pipe_draw_info *info)
1406 {
1407 struct u_vbuf *vbuf = cso->vbuf;
1408
1409 if (vbuf) {
1410 u_vbuf_draw_vbo(vbuf, info);
1411 } else {
1412 struct pipe_context *pipe = cso->pipe;
1413 pipe->draw_vbo(pipe, info);
1414 }
1415 }
1416
1417 void
1418 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1419 {
1420 struct pipe_draw_info info;
1421
1422 util_draw_init_info(&info);
1423
1424 info.mode = mode;
1425 info.start = start;
1426 info.count = count;
1427 info.min_index = start;
1428 info.max_index = start + count - 1;
1429
1430 cso_draw_vbo(cso, &info);
1431 }