gallium/cso: add support for compute shaders
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_compute_shader;
73 boolean has_streamout;
74
75 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
76 unsigned nr_fragment_views;
77
78 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
79 unsigned nr_fragment_views_saved;
80
81 void *fragment_samplers_saved[PIPE_MAX_SAMPLERS];
82 unsigned nr_fragment_samplers_saved;
83
84 struct sampler_info samplers[PIPE_SHADER_TYPES];
85
86 struct pipe_vertex_buffer aux_vertex_buffer_current;
87 struct pipe_vertex_buffer aux_vertex_buffer_saved;
88 unsigned aux_vertex_buffer_index;
89
90 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
91 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
92
93 unsigned nr_so_targets;
94 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
95
96 unsigned nr_so_targets_saved;
97 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
98
99 /** Current and saved state.
100 * The saved state is used as a 1-deep stack.
101 */
102 void *blend, *blend_saved;
103 void *depth_stencil, *depth_stencil_saved;
104 void *rasterizer, *rasterizer_saved;
105 void *fragment_shader, *fragment_shader_saved;
106 void *vertex_shader, *vertex_shader_saved;
107 void *geometry_shader, *geometry_shader_saved;
108 void *tessctrl_shader, *tessctrl_shader_saved;
109 void *tesseval_shader, *tesseval_shader_saved;
110 void *compute_shader;
111 void *velements, *velements_saved;
112 struct pipe_query *render_condition, *render_condition_saved;
113 uint render_condition_mode, render_condition_mode_saved;
114 boolean render_condition_cond, render_condition_cond_saved;
115
116 struct pipe_framebuffer_state fb, fb_saved;
117 struct pipe_viewport_state vp, vp_saved;
118 struct pipe_blend_color blend_color;
119 unsigned sample_mask, sample_mask_saved;
120 unsigned min_samples, min_samples_saved;
121 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
122 };
123
124
125 static boolean delete_blend_state(struct cso_context *ctx, void *state)
126 {
127 struct cso_blend *cso = (struct cso_blend *)state;
128
129 if (ctx->blend == cso->data)
130 return FALSE;
131
132 if (cso->delete_state)
133 cso->delete_state(cso->context, cso->data);
134 FREE(state);
135 return TRUE;
136 }
137
138 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
139 {
140 struct cso_depth_stencil_alpha *cso =
141 (struct cso_depth_stencil_alpha *)state;
142
143 if (ctx->depth_stencil == cso->data)
144 return FALSE;
145
146 if (cso->delete_state)
147 cso->delete_state(cso->context, cso->data);
148 FREE(state);
149
150 return TRUE;
151 }
152
153 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
154 {
155 struct cso_sampler *cso = (struct cso_sampler *)state;
156 if (cso->delete_state)
157 cso->delete_state(cso->context, cso->data);
158 FREE(state);
159 return TRUE;
160 }
161
162 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
163 {
164 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
165
166 if (ctx->rasterizer == cso->data)
167 return FALSE;
168 if (cso->delete_state)
169 cso->delete_state(cso->context, cso->data);
170 FREE(state);
171 return TRUE;
172 }
173
174 static boolean delete_vertex_elements(struct cso_context *ctx,
175 void *state)
176 {
177 struct cso_velements *cso = (struct cso_velements *)state;
178
179 if (ctx->velements == cso->data)
180 return FALSE;
181
182 if (cso->delete_state)
183 cso->delete_state(cso->context, cso->data);
184 FREE(state);
185 return TRUE;
186 }
187
188
189 static inline boolean delete_cso(struct cso_context *ctx,
190 void *state, enum cso_cache_type type)
191 {
192 switch (type) {
193 case CSO_BLEND:
194 return delete_blend_state(ctx, state);
195 case CSO_SAMPLER:
196 return delete_sampler_state(ctx, state);
197 case CSO_DEPTH_STENCIL_ALPHA:
198 return delete_depth_stencil_state(ctx, state);
199 case CSO_RASTERIZER:
200 return delete_rasterizer_state(ctx, state);
201 case CSO_VELEMENTS:
202 return delete_vertex_elements(ctx, state);
203 default:
204 assert(0);
205 FREE(state);
206 }
207 return FALSE;
208 }
209
210 static inline void
211 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
212 int max_size, void *user_data)
213 {
214 struct cso_context *ctx = (struct cso_context *)user_data;
215 /* if we're approach the maximum size, remove fourth of the entries
216 * otherwise every subsequent call will go through the same */
217 int hash_size = cso_hash_size(hash);
218 int max_entries = (max_size > hash_size) ? max_size : hash_size;
219 int to_remove = (max_size < max_entries) * max_entries/4;
220 struct cso_hash_iter iter = cso_hash_first_node(hash);
221 if (hash_size > max_size)
222 to_remove += hash_size - max_size;
223 while (to_remove) {
224 /*remove elements until we're good */
225 /*fixme: currently we pick the nodes to remove at random*/
226 void *cso = cso_hash_iter_data(iter);
227 if (delete_cso(ctx, cso, type)) {
228 iter = cso_hash_erase(hash, iter);
229 --to_remove;
230 } else
231 iter = cso_hash_iter_next(iter);
232 }
233 }
234
235 static void cso_init_vbuf(struct cso_context *cso)
236 {
237 struct u_vbuf_caps caps;
238
239 /* Install u_vbuf if there is anything unsupported. */
240 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
241 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
242 cso->aux_vertex_buffer_index);
243 }
244 }
245
246 struct cso_context *cso_create_context( struct pipe_context *pipe )
247 {
248 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
249 if (!ctx)
250 goto out;
251
252 ctx->cache = cso_cache_create();
253 if (ctx->cache == NULL)
254 goto out;
255 cso_cache_set_sanitize_callback(ctx->cache,
256 sanitize_hash,
257 ctx);
258
259 ctx->pipe = pipe;
260 ctx->sample_mask = ~0;
261
262 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
263
264 cso_init_vbuf(ctx);
265
266 /* Enable for testing: */
267 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
268
269 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
270 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
271 ctx->has_geometry_shader = TRUE;
272 }
273 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
274 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
275 ctx->has_tessellation = TRUE;
276 }
277 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
278 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
279 ctx->has_compute_shader = TRUE;
280 }
281 if (pipe->screen->get_param(pipe->screen,
282 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
283 ctx->has_streamout = TRUE;
284 }
285
286 return ctx;
287
288 out:
289 cso_destroy_context( ctx );
290 return NULL;
291 }
292
293 /**
294 * Free the CSO context.
295 */
296 void cso_destroy_context( struct cso_context *ctx )
297 {
298 unsigned i;
299
300 if (ctx->pipe) {
301 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
302
303 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
304 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
305
306 {
307 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
308 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
309 struct pipe_screen *scr = ctx->pipe->screen;
310 unsigned sh;
311 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
312 int maxsam = scr->get_shader_param(scr, sh,
313 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
314 int maxview = scr->get_shader_param(scr, sh,
315 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
316 assert(maxsam <= PIPE_MAX_SAMPLERS);
317 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
318 if (maxsam > 0) {
319 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
320 }
321 if (maxview > 0) {
322 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
323 }
324 }
325 }
326
327 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
328 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
329 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
330 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
331 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
332 if (ctx->has_geometry_shader) {
333 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
334 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
335 }
336 if (ctx->has_tessellation) {
337 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
338 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
339 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
340 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
341 }
342 if (ctx->has_compute_shader) {
343 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
344 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
345 }
346 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
347
348 if (ctx->has_streamout)
349 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
350 }
351
352 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
353 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
354 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
355 }
356
357 util_unreference_framebuffer_state(&ctx->fb);
358 util_unreference_framebuffer_state(&ctx->fb_saved);
359
360 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
361 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
362
363 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
364 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
365 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
366 }
367
368 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
369 pipe_so_target_reference(&ctx->so_targets[i], NULL);
370 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
371 }
372
373 if (ctx->cache) {
374 cso_cache_delete( ctx->cache );
375 ctx->cache = NULL;
376 }
377
378 if (ctx->vbuf)
379 u_vbuf_destroy(ctx->vbuf);
380 FREE( ctx );
381 }
382
383
384 /* Those function will either find the state of the given template
385 * in the cache or they will create a new state from the given
386 * template, insert it in the cache and return it.
387 */
388
389 /*
390 * If the driver returns 0 from the create method then they will assign
391 * the data member of the cso to be the template itself.
392 */
393
394 enum pipe_error cso_set_blend(struct cso_context *ctx,
395 const struct pipe_blend_state *templ)
396 {
397 unsigned key_size, hash_key;
398 struct cso_hash_iter iter;
399 void *handle;
400
401 key_size = templ->independent_blend_enable ?
402 sizeof(struct pipe_blend_state) :
403 (char *)&(templ->rt[1]) - (char *)templ;
404 hash_key = cso_construct_key((void*)templ, key_size);
405 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
406 (void*)templ, key_size);
407
408 if (cso_hash_iter_is_null(iter)) {
409 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
410 if (!cso)
411 return PIPE_ERROR_OUT_OF_MEMORY;
412
413 memset(&cso->state, 0, sizeof cso->state);
414 memcpy(&cso->state, templ, key_size);
415 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
416 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
417 cso->context = ctx->pipe;
418
419 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
420 if (cso_hash_iter_is_null(iter)) {
421 FREE(cso);
422 return PIPE_ERROR_OUT_OF_MEMORY;
423 }
424
425 handle = cso->data;
426 }
427 else {
428 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
429 }
430
431 if (ctx->blend != handle) {
432 ctx->blend = handle;
433 ctx->pipe->bind_blend_state(ctx->pipe, handle);
434 }
435 return PIPE_OK;
436 }
437
438 void cso_save_blend(struct cso_context *ctx)
439 {
440 assert(!ctx->blend_saved);
441 ctx->blend_saved = ctx->blend;
442 }
443
444 void cso_restore_blend(struct cso_context *ctx)
445 {
446 if (ctx->blend != ctx->blend_saved) {
447 ctx->blend = ctx->blend_saved;
448 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
449 }
450 ctx->blend_saved = NULL;
451 }
452
453
454
455 enum pipe_error
456 cso_set_depth_stencil_alpha(struct cso_context *ctx,
457 const struct pipe_depth_stencil_alpha_state *templ)
458 {
459 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
460 unsigned hash_key = cso_construct_key((void*)templ, key_size);
461 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
462 hash_key,
463 CSO_DEPTH_STENCIL_ALPHA,
464 (void*)templ, key_size);
465 void *handle;
466
467 if (cso_hash_iter_is_null(iter)) {
468 struct cso_depth_stencil_alpha *cso =
469 MALLOC(sizeof(struct cso_depth_stencil_alpha));
470 if (!cso)
471 return PIPE_ERROR_OUT_OF_MEMORY;
472
473 memcpy(&cso->state, templ, sizeof(*templ));
474 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
475 &cso->state);
476 cso->delete_state =
477 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
478 cso->context = ctx->pipe;
479
480 iter = cso_insert_state(ctx->cache, hash_key,
481 CSO_DEPTH_STENCIL_ALPHA, cso);
482 if (cso_hash_iter_is_null(iter)) {
483 FREE(cso);
484 return PIPE_ERROR_OUT_OF_MEMORY;
485 }
486
487 handle = cso->data;
488 }
489 else {
490 handle = ((struct cso_depth_stencil_alpha *)
491 cso_hash_iter_data(iter))->data;
492 }
493
494 if (ctx->depth_stencil != handle) {
495 ctx->depth_stencil = handle;
496 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
497 }
498 return PIPE_OK;
499 }
500
501 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
502 {
503 assert(!ctx->depth_stencil_saved);
504 ctx->depth_stencil_saved = ctx->depth_stencil;
505 }
506
507 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
508 {
509 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
510 ctx->depth_stencil = ctx->depth_stencil_saved;
511 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
512 ctx->depth_stencil_saved);
513 }
514 ctx->depth_stencil_saved = NULL;
515 }
516
517
518
519 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
520 const struct pipe_rasterizer_state *templ)
521 {
522 unsigned key_size = sizeof(struct pipe_rasterizer_state);
523 unsigned hash_key = cso_construct_key((void*)templ, key_size);
524 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
525 hash_key,
526 CSO_RASTERIZER,
527 (void*)templ, key_size);
528 void *handle = NULL;
529
530 if (cso_hash_iter_is_null(iter)) {
531 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
532 if (!cso)
533 return PIPE_ERROR_OUT_OF_MEMORY;
534
535 memcpy(&cso->state, templ, sizeof(*templ));
536 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
537 cso->delete_state =
538 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
539 cso->context = ctx->pipe;
540
541 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
542 if (cso_hash_iter_is_null(iter)) {
543 FREE(cso);
544 return PIPE_ERROR_OUT_OF_MEMORY;
545 }
546
547 handle = cso->data;
548 }
549 else {
550 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
551 }
552
553 if (ctx->rasterizer != handle) {
554 ctx->rasterizer = handle;
555 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
556 }
557 return PIPE_OK;
558 }
559
560 void cso_save_rasterizer(struct cso_context *ctx)
561 {
562 assert(!ctx->rasterizer_saved);
563 ctx->rasterizer_saved = ctx->rasterizer;
564 }
565
566 void cso_restore_rasterizer(struct cso_context *ctx)
567 {
568 if (ctx->rasterizer != ctx->rasterizer_saved) {
569 ctx->rasterizer = ctx->rasterizer_saved;
570 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
571 }
572 ctx->rasterizer_saved = NULL;
573 }
574
575
576 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
577 {
578 if (ctx->fragment_shader != handle) {
579 ctx->fragment_shader = handle;
580 ctx->pipe->bind_fs_state(ctx->pipe, handle);
581 }
582 }
583
584 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
585 {
586 if (handle == ctx->fragment_shader) {
587 /* unbind before deleting */
588 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
589 ctx->fragment_shader = NULL;
590 }
591 ctx->pipe->delete_fs_state(ctx->pipe, handle);
592 }
593
594 void cso_save_fragment_shader(struct cso_context *ctx)
595 {
596 assert(!ctx->fragment_shader_saved);
597 ctx->fragment_shader_saved = ctx->fragment_shader;
598 }
599
600 void cso_restore_fragment_shader(struct cso_context *ctx)
601 {
602 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
603 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
604 ctx->fragment_shader = ctx->fragment_shader_saved;
605 }
606 ctx->fragment_shader_saved = NULL;
607 }
608
609
610 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
611 {
612 if (ctx->vertex_shader != handle) {
613 ctx->vertex_shader = handle;
614 ctx->pipe->bind_vs_state(ctx->pipe, handle);
615 }
616 }
617
618 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
619 {
620 if (handle == ctx->vertex_shader) {
621 /* unbind before deleting */
622 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
623 ctx->vertex_shader = NULL;
624 }
625 ctx->pipe->delete_vs_state(ctx->pipe, handle);
626 }
627
628 void cso_save_vertex_shader(struct cso_context *ctx)
629 {
630 assert(!ctx->vertex_shader_saved);
631 ctx->vertex_shader_saved = ctx->vertex_shader;
632 }
633
634 void cso_restore_vertex_shader(struct cso_context *ctx)
635 {
636 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
637 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
638 ctx->vertex_shader = ctx->vertex_shader_saved;
639 }
640 ctx->vertex_shader_saved = NULL;
641 }
642
643
644 void cso_set_framebuffer(struct cso_context *ctx,
645 const struct pipe_framebuffer_state *fb)
646 {
647 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
648 util_copy_framebuffer_state(&ctx->fb, fb);
649 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
650 }
651 }
652
653 void cso_save_framebuffer(struct cso_context *ctx)
654 {
655 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
656 }
657
658 void cso_restore_framebuffer(struct cso_context *ctx)
659 {
660 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
661 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
662 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
663 util_unreference_framebuffer_state(&ctx->fb_saved);
664 }
665 }
666
667
668 void cso_set_viewport(struct cso_context *ctx,
669 const struct pipe_viewport_state *vp)
670 {
671 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
672 ctx->vp = *vp;
673 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
674 }
675 }
676
677 void cso_save_viewport(struct cso_context *ctx)
678 {
679 ctx->vp_saved = ctx->vp;
680 }
681
682
683 void cso_restore_viewport(struct cso_context *ctx)
684 {
685 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
686 ctx->vp = ctx->vp_saved;
687 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
688 }
689 }
690
691
692 void cso_set_blend_color(struct cso_context *ctx,
693 const struct pipe_blend_color *bc)
694 {
695 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
696 ctx->blend_color = *bc;
697 ctx->pipe->set_blend_color(ctx->pipe, bc);
698 }
699 }
700
701 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
702 {
703 if (ctx->sample_mask != sample_mask) {
704 ctx->sample_mask = sample_mask;
705 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
706 }
707 }
708
709 void cso_save_sample_mask(struct cso_context *ctx)
710 {
711 ctx->sample_mask_saved = ctx->sample_mask;
712 }
713
714 void cso_restore_sample_mask(struct cso_context *ctx)
715 {
716 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
717 }
718
719 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
720 {
721 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
722 ctx->min_samples = min_samples;
723 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
724 }
725 }
726
727 void cso_save_min_samples(struct cso_context *ctx)
728 {
729 ctx->min_samples_saved = ctx->min_samples;
730 }
731
732 void cso_restore_min_samples(struct cso_context *ctx)
733 {
734 cso_set_min_samples(ctx, ctx->min_samples_saved);
735 }
736
737 void cso_set_stencil_ref(struct cso_context *ctx,
738 const struct pipe_stencil_ref *sr)
739 {
740 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
741 ctx->stencil_ref = *sr;
742 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
743 }
744 }
745
746 void cso_save_stencil_ref(struct cso_context *ctx)
747 {
748 ctx->stencil_ref_saved = ctx->stencil_ref;
749 }
750
751
752 void cso_restore_stencil_ref(struct cso_context *ctx)
753 {
754 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
755 sizeof(ctx->stencil_ref))) {
756 ctx->stencil_ref = ctx->stencil_ref_saved;
757 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
758 }
759 }
760
761 void cso_set_render_condition(struct cso_context *ctx,
762 struct pipe_query *query,
763 boolean condition, uint mode)
764 {
765 struct pipe_context *pipe = ctx->pipe;
766
767 if (ctx->render_condition != query ||
768 ctx->render_condition_mode != mode ||
769 ctx->render_condition_cond != condition) {
770 pipe->render_condition(pipe, query, condition, mode);
771 ctx->render_condition = query;
772 ctx->render_condition_cond = condition;
773 ctx->render_condition_mode = mode;
774 }
775 }
776
777 void cso_save_render_condition(struct cso_context *ctx)
778 {
779 ctx->render_condition_saved = ctx->render_condition;
780 ctx->render_condition_cond_saved = ctx->render_condition_cond;
781 ctx->render_condition_mode_saved = ctx->render_condition_mode;
782 }
783
784 void cso_restore_render_condition(struct cso_context *ctx)
785 {
786 cso_set_render_condition(ctx, ctx->render_condition_saved,
787 ctx->render_condition_cond_saved,
788 ctx->render_condition_mode_saved);
789 }
790
791 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
792 {
793 assert(ctx->has_geometry_shader || !handle);
794
795 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
796 ctx->geometry_shader = handle;
797 ctx->pipe->bind_gs_state(ctx->pipe, handle);
798 }
799 }
800
801 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
802 {
803 if (handle == ctx->geometry_shader) {
804 /* unbind before deleting */
805 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
806 ctx->geometry_shader = NULL;
807 }
808 ctx->pipe->delete_gs_state(ctx->pipe, handle);
809 }
810
811 void cso_save_geometry_shader(struct cso_context *ctx)
812 {
813 if (!ctx->has_geometry_shader) {
814 return;
815 }
816
817 assert(!ctx->geometry_shader_saved);
818 ctx->geometry_shader_saved = ctx->geometry_shader;
819 }
820
821 void cso_restore_geometry_shader(struct cso_context *ctx)
822 {
823 if (!ctx->has_geometry_shader) {
824 return;
825 }
826
827 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
828 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
829 ctx->geometry_shader = ctx->geometry_shader_saved;
830 }
831 ctx->geometry_shader_saved = NULL;
832 }
833
834 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
835 {
836 assert(ctx->has_tessellation || !handle);
837
838 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
839 ctx->tessctrl_shader = handle;
840 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
841 }
842 }
843
844 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
845 {
846 if (handle == ctx->tessctrl_shader) {
847 /* unbind before deleting */
848 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
849 ctx->tessctrl_shader = NULL;
850 }
851 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
852 }
853
854 void cso_save_tessctrl_shader(struct cso_context *ctx)
855 {
856 if (!ctx->has_tessellation) {
857 return;
858 }
859
860 assert(!ctx->tessctrl_shader_saved);
861 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
862 }
863
864 void cso_restore_tessctrl_shader(struct cso_context *ctx)
865 {
866 if (!ctx->has_tessellation) {
867 return;
868 }
869
870 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
871 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
872 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
873 }
874 ctx->tessctrl_shader_saved = NULL;
875 }
876
877 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
878 {
879 assert(ctx->has_tessellation || !handle);
880
881 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
882 ctx->tesseval_shader = handle;
883 ctx->pipe->bind_tes_state(ctx->pipe, handle);
884 }
885 }
886
887 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
888 {
889 if (handle == ctx->tesseval_shader) {
890 /* unbind before deleting */
891 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
892 ctx->tesseval_shader = NULL;
893 }
894 ctx->pipe->delete_tes_state(ctx->pipe, handle);
895 }
896
897 void cso_save_tesseval_shader(struct cso_context *ctx)
898 {
899 if (!ctx->has_tessellation) {
900 return;
901 }
902
903 assert(!ctx->tesseval_shader_saved);
904 ctx->tesseval_shader_saved = ctx->tesseval_shader;
905 }
906
907 void cso_restore_tesseval_shader(struct cso_context *ctx)
908 {
909 if (!ctx->has_tessellation) {
910 return;
911 }
912
913 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
914 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
915 ctx->tesseval_shader = ctx->tesseval_shader_saved;
916 }
917 ctx->tesseval_shader_saved = NULL;
918 }
919
920 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
921 {
922 assert(ctx->has_compute_shader || !handle);
923
924 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
925 ctx->compute_shader = handle;
926 ctx->pipe->bind_compute_state(ctx->pipe, handle);
927 }
928 }
929
930 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
931 {
932 if (handle == ctx->compute_shader) {
933 /* unbind before deleting */
934 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
935 ctx->compute_shader = NULL;
936 }
937 ctx->pipe->delete_compute_state(ctx->pipe, handle);
938 }
939
940 enum pipe_error
941 cso_set_vertex_elements(struct cso_context *ctx,
942 unsigned count,
943 const struct pipe_vertex_element *states)
944 {
945 struct u_vbuf *vbuf = ctx->vbuf;
946 unsigned key_size, hash_key;
947 struct cso_hash_iter iter;
948 void *handle;
949 struct cso_velems_state velems_state;
950
951 if (vbuf) {
952 u_vbuf_set_vertex_elements(vbuf, count, states);
953 return PIPE_OK;
954 }
955
956 /* Need to include the count into the stored state data too.
957 * Otherwise first few count pipe_vertex_elements could be identical
958 * even if count is different, and there's no guarantee the hash would
959 * be different in that case neither.
960 */
961 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
962 velems_state.count = count;
963 memcpy(velems_state.velems, states,
964 sizeof(struct pipe_vertex_element) * count);
965 hash_key = cso_construct_key((void*)&velems_state, key_size);
966 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
967 (void*)&velems_state, key_size);
968
969 if (cso_hash_iter_is_null(iter)) {
970 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
971 if (!cso)
972 return PIPE_ERROR_OUT_OF_MEMORY;
973
974 memcpy(&cso->state, &velems_state, key_size);
975 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
976 &cso->state.velems[0]);
977 cso->delete_state =
978 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
979 cso->context = ctx->pipe;
980
981 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
982 if (cso_hash_iter_is_null(iter)) {
983 FREE(cso);
984 return PIPE_ERROR_OUT_OF_MEMORY;
985 }
986
987 handle = cso->data;
988 }
989 else {
990 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
991 }
992
993 if (ctx->velements != handle) {
994 ctx->velements = handle;
995 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
996 }
997 return PIPE_OK;
998 }
999
1000 void cso_save_vertex_elements(struct cso_context *ctx)
1001 {
1002 struct u_vbuf *vbuf = ctx->vbuf;
1003
1004 if (vbuf) {
1005 u_vbuf_save_vertex_elements(vbuf);
1006 return;
1007 }
1008
1009 assert(!ctx->velements_saved);
1010 ctx->velements_saved = ctx->velements;
1011 }
1012
1013 void cso_restore_vertex_elements(struct cso_context *ctx)
1014 {
1015 struct u_vbuf *vbuf = ctx->vbuf;
1016
1017 if (vbuf) {
1018 u_vbuf_restore_vertex_elements(vbuf);
1019 return;
1020 }
1021
1022 if (ctx->velements != ctx->velements_saved) {
1023 ctx->velements = ctx->velements_saved;
1024 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1025 }
1026 ctx->velements_saved = NULL;
1027 }
1028
1029 /* vertex buffers */
1030
1031 void cso_set_vertex_buffers(struct cso_context *ctx,
1032 unsigned start_slot, unsigned count,
1033 const struct pipe_vertex_buffer *buffers)
1034 {
1035 struct u_vbuf *vbuf = ctx->vbuf;
1036
1037 if (vbuf) {
1038 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1039 return;
1040 }
1041
1042 /* Save what's in the auxiliary slot, so that we can save and restore it
1043 * for meta ops. */
1044 if (start_slot <= ctx->aux_vertex_buffer_index &&
1045 start_slot+count > ctx->aux_vertex_buffer_index) {
1046 if (buffers) {
1047 const struct pipe_vertex_buffer *vb =
1048 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1049
1050 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1051 vb->buffer);
1052 memcpy(&ctx->aux_vertex_buffer_current, vb,
1053 sizeof(struct pipe_vertex_buffer));
1054 }
1055 else {
1056 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1057 NULL);
1058 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1059 }
1060 }
1061
1062 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1063 }
1064
1065 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1066 {
1067 struct u_vbuf *vbuf = ctx->vbuf;
1068
1069 if (vbuf) {
1070 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1071 return;
1072 }
1073
1074 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1075 ctx->aux_vertex_buffer_current.buffer);
1076 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1077 sizeof(struct pipe_vertex_buffer));
1078 }
1079
1080 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1081 {
1082 struct u_vbuf *vbuf = ctx->vbuf;
1083
1084 if (vbuf) {
1085 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1086 return;
1087 }
1088
1089 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1090 &ctx->aux_vertex_buffer_saved);
1091 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1092 }
1093
1094 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1095 {
1096 return ctx->aux_vertex_buffer_index;
1097 }
1098
1099
1100 /**************** fragment/vertex sampler view state *************************/
1101
1102 enum pipe_error
1103 cso_single_sampler(struct cso_context *ctx, unsigned shader_stage,
1104 unsigned idx, const struct pipe_sampler_state *templ)
1105 {
1106 void *handle = NULL;
1107
1108 if (templ) {
1109 unsigned key_size = sizeof(struct pipe_sampler_state);
1110 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1111 struct cso_hash_iter iter =
1112 cso_find_state_template(ctx->cache,
1113 hash_key, CSO_SAMPLER,
1114 (void *) templ, key_size);
1115
1116 if (cso_hash_iter_is_null(iter)) {
1117 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1118 if (!cso)
1119 return PIPE_ERROR_OUT_OF_MEMORY;
1120
1121 memcpy(&cso->state, templ, sizeof(*templ));
1122 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1123 cso->delete_state =
1124 (cso_state_callback) ctx->pipe->delete_sampler_state;
1125 cso->context = ctx->pipe;
1126
1127 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1128 if (cso_hash_iter_is_null(iter)) {
1129 FREE(cso);
1130 return PIPE_ERROR_OUT_OF_MEMORY;
1131 }
1132
1133 handle = cso->data;
1134 }
1135 else {
1136 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1137 }
1138 }
1139
1140 ctx->samplers[shader_stage].samplers[idx] = handle;
1141 return PIPE_OK;
1142 }
1143
1144
1145 void
1146 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1147 {
1148 struct sampler_info *info = &ctx->samplers[shader_stage];
1149 unsigned i;
1150
1151 /* find highest non-null sampler */
1152 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1153 if (info->samplers[i - 1] != NULL)
1154 break;
1155 }
1156
1157 info->nr_samplers = i;
1158 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0, i,
1159 info->samplers);
1160 }
1161
1162
1163 /*
1164 * If the function encouters any errors it will return the
1165 * last one. Done to always try to set as many samplers
1166 * as possible.
1167 */
1168 enum pipe_error
1169 cso_set_samplers(struct cso_context *ctx,
1170 unsigned shader_stage,
1171 unsigned nr,
1172 const struct pipe_sampler_state **templates)
1173 {
1174 struct sampler_info *info = &ctx->samplers[shader_stage];
1175 unsigned i;
1176 enum pipe_error temp, error = PIPE_OK;
1177
1178 /* TODO: fastpath
1179 */
1180
1181 for (i = 0; i < nr; i++) {
1182 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1183 if (temp != PIPE_OK)
1184 error = temp;
1185 }
1186
1187 for ( ; i < info->nr_samplers; i++) {
1188 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1189 if (temp != PIPE_OK)
1190 error = temp;
1191 }
1192
1193 cso_single_sampler_done(ctx, shader_stage);
1194
1195 return error;
1196 }
1197
1198 void
1199 cso_save_fragment_samplers(struct cso_context *ctx)
1200 {
1201 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1202
1203 ctx->nr_fragment_samplers_saved = info->nr_samplers;
1204 memcpy(ctx->fragment_samplers_saved, info->samplers,
1205 sizeof(info->samplers));
1206 }
1207
1208
1209 void
1210 cso_restore_fragment_samplers(struct cso_context *ctx)
1211 {
1212 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1213
1214 info->nr_samplers = ctx->nr_fragment_samplers_saved;
1215 memcpy(info->samplers, ctx->fragment_samplers_saved,
1216 sizeof(info->samplers));
1217 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1218 }
1219
1220
1221 void
1222 cso_set_sampler_views(struct cso_context *ctx,
1223 unsigned shader_stage,
1224 unsigned count,
1225 struct pipe_sampler_view **views)
1226 {
1227 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1228 unsigned i;
1229 boolean any_change = FALSE;
1230
1231 /* reference new views */
1232 for (i = 0; i < count; i++) {
1233 any_change |= ctx->fragment_views[i] != views[i];
1234 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1235 }
1236 /* unref extra old views, if any */
1237 for (; i < ctx->nr_fragment_views; i++) {
1238 any_change |= ctx->fragment_views[i] != NULL;
1239 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1240 }
1241
1242 /* bind the new sampler views */
1243 if (any_change) {
1244 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1245 MAX2(ctx->nr_fragment_views, count),
1246 ctx->fragment_views);
1247 }
1248
1249 ctx->nr_fragment_views = count;
1250 }
1251 else
1252 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1253 }
1254
1255
1256 void
1257 cso_save_fragment_sampler_views(struct cso_context *ctx)
1258 {
1259 unsigned i;
1260
1261 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1262
1263 for (i = 0; i < ctx->nr_fragment_views; i++) {
1264 assert(!ctx->fragment_views_saved[i]);
1265 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1266 ctx->fragment_views[i]);
1267 }
1268 }
1269
1270
1271 void
1272 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1273 {
1274 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1275 unsigned num;
1276
1277 for (i = 0; i < nr_saved; i++) {
1278 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1279 /* move the reference from one pointer to another */
1280 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1281 ctx->fragment_views_saved[i] = NULL;
1282 }
1283 for (; i < ctx->nr_fragment_views; i++) {
1284 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1285 }
1286
1287 num = MAX2(ctx->nr_fragment_views, nr_saved);
1288
1289 /* bind the old/saved sampler views */
1290 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1291 ctx->fragment_views);
1292
1293 ctx->nr_fragment_views = nr_saved;
1294 ctx->nr_fragment_views_saved = 0;
1295 }
1296
1297
1298 void
1299 cso_set_stream_outputs(struct cso_context *ctx,
1300 unsigned num_targets,
1301 struct pipe_stream_output_target **targets,
1302 const unsigned *offsets)
1303 {
1304 struct pipe_context *pipe = ctx->pipe;
1305 uint i;
1306
1307 if (!ctx->has_streamout) {
1308 assert(num_targets == 0);
1309 return;
1310 }
1311
1312 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1313 /* Nothing to do. */
1314 return;
1315 }
1316
1317 /* reference new targets */
1318 for (i = 0; i < num_targets; i++) {
1319 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1320 }
1321 /* unref extra old targets, if any */
1322 for (; i < ctx->nr_so_targets; i++) {
1323 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1324 }
1325
1326 pipe->set_stream_output_targets(pipe, num_targets, targets,
1327 offsets);
1328 ctx->nr_so_targets = num_targets;
1329 }
1330
1331 void
1332 cso_save_stream_outputs(struct cso_context *ctx)
1333 {
1334 uint i;
1335
1336 if (!ctx->has_streamout) {
1337 return;
1338 }
1339
1340 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1341
1342 for (i = 0; i < ctx->nr_so_targets; i++) {
1343 assert(!ctx->so_targets_saved[i]);
1344 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1345 }
1346 }
1347
1348 void
1349 cso_restore_stream_outputs(struct cso_context *ctx)
1350 {
1351 struct pipe_context *pipe = ctx->pipe;
1352 uint i;
1353 unsigned offset[PIPE_MAX_SO_BUFFERS];
1354
1355 if (!ctx->has_streamout) {
1356 return;
1357 }
1358
1359 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1360 /* Nothing to do. */
1361 return;
1362 }
1363
1364 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1365 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1366 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1367 /* move the reference from one pointer to another */
1368 ctx->so_targets[i] = ctx->so_targets_saved[i];
1369 ctx->so_targets_saved[i] = NULL;
1370 /* -1 means append */
1371 offset[i] = (unsigned)-1;
1372 }
1373 for (; i < ctx->nr_so_targets; i++) {
1374 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1375 }
1376
1377 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1378 ctx->so_targets, offset);
1379
1380 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1381 ctx->nr_so_targets_saved = 0;
1382 }
1383
1384 /* constant buffers */
1385
1386 void
1387 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1388 unsigned index, struct pipe_constant_buffer *cb)
1389 {
1390 struct pipe_context *pipe = cso->pipe;
1391
1392 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1393
1394 if (index == 0) {
1395 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1396 }
1397 }
1398
1399 void
1400 cso_set_constant_buffer_resource(struct cso_context *cso,
1401 unsigned shader_stage,
1402 unsigned index,
1403 struct pipe_resource *buffer)
1404 {
1405 if (buffer) {
1406 struct pipe_constant_buffer cb;
1407 cb.buffer = buffer;
1408 cb.buffer_offset = 0;
1409 cb.buffer_size = buffer->width0;
1410 cb.user_buffer = NULL;
1411 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1412 } else {
1413 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1414 }
1415 }
1416
1417 void
1418 cso_save_constant_buffer_slot0(struct cso_context *cso,
1419 unsigned shader_stage)
1420 {
1421 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1422 &cso->aux_constbuf_current[shader_stage]);
1423 }
1424
1425 void
1426 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1427 unsigned shader_stage)
1428 {
1429 cso_set_constant_buffer(cso, shader_stage, 0,
1430 &cso->aux_constbuf_saved[shader_stage]);
1431 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1432 NULL);
1433 }
1434
1435 /* drawing */
1436
1437 void
1438 cso_set_index_buffer(struct cso_context *cso,
1439 const struct pipe_index_buffer *ib)
1440 {
1441 struct u_vbuf *vbuf = cso->vbuf;
1442
1443 if (vbuf) {
1444 u_vbuf_set_index_buffer(vbuf, ib);
1445 } else {
1446 struct pipe_context *pipe = cso->pipe;
1447 pipe->set_index_buffer(pipe, ib);
1448 }
1449 }
1450
1451 void
1452 cso_draw_vbo(struct cso_context *cso,
1453 const struct pipe_draw_info *info)
1454 {
1455 struct u_vbuf *vbuf = cso->vbuf;
1456
1457 if (vbuf) {
1458 u_vbuf_draw_vbo(vbuf, info);
1459 } else {
1460 struct pipe_context *pipe = cso->pipe;
1461 pipe->draw_vbo(pipe, info);
1462 }
1463 }
1464
1465 void
1466 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1467 {
1468 struct pipe_draw_info info;
1469
1470 util_draw_init_info(&info);
1471
1472 info.mode = mode;
1473 info.start = start;
1474 info.count = count;
1475 info.min_index = start;
1476 info.max_index = start + count - 1;
1477
1478 cso_draw_vbo(cso, &info);
1479 }
1480
1481 void
1482 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1483 uint start, uint count,
1484 uint start_instance, uint instance_count)
1485 {
1486 struct pipe_draw_info info;
1487
1488 util_draw_init_info(&info);
1489
1490 info.mode = mode;
1491 info.start = start;
1492 info.count = count;
1493 info.min_index = start;
1494 info.max_index = start + count - 1;
1495 info.start_instance = start_instance;
1496 info.instance_count = instance_count;
1497
1498 cso_draw_vbo(cso, &info);
1499 }