Merge remote-tracking branch 'origin/master' into vulkan
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_compute_shader;
73 boolean has_streamout;
74
75 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
76
77 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
78 unsigned nr_fragment_views;
79
80 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
81 unsigned nr_fragment_views_saved;
82
83 void *fragment_samplers_saved[PIPE_MAX_SAMPLERS];
84 unsigned nr_fragment_samplers_saved;
85
86 struct sampler_info samplers[PIPE_SHADER_TYPES];
87
88 struct pipe_vertex_buffer aux_vertex_buffer_current;
89 struct pipe_vertex_buffer aux_vertex_buffer_saved;
90 unsigned aux_vertex_buffer_index;
91
92 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
93 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved;
108 void *vertex_shader, *vertex_shader_saved;
109 void *geometry_shader, *geometry_shader_saved;
110 void *tessctrl_shader, *tessctrl_shader_saved;
111 void *tesseval_shader, *tesseval_shader_saved;
112 void *compute_shader;
113 void *velements, *velements_saved;
114 struct pipe_query *render_condition, *render_condition_saved;
115 uint render_condition_mode, render_condition_mode_saved;
116 boolean render_condition_cond, render_condition_cond_saved;
117
118 struct pipe_framebuffer_state fb, fb_saved;
119 struct pipe_viewport_state vp, vp_saved;
120 struct pipe_blend_color blend_color;
121 unsigned sample_mask, sample_mask_saved;
122 unsigned min_samples, min_samples_saved;
123 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
124 };
125
126
127 static boolean delete_blend_state(struct cso_context *ctx, void *state)
128 {
129 struct cso_blend *cso = (struct cso_blend *)state;
130
131 if (ctx->blend == cso->data)
132 return FALSE;
133
134 if (cso->delete_state)
135 cso->delete_state(cso->context, cso->data);
136 FREE(state);
137 return TRUE;
138 }
139
140 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
141 {
142 struct cso_depth_stencil_alpha *cso =
143 (struct cso_depth_stencil_alpha *)state;
144
145 if (ctx->depth_stencil == cso->data)
146 return FALSE;
147
148 if (cso->delete_state)
149 cso->delete_state(cso->context, cso->data);
150 FREE(state);
151
152 return TRUE;
153 }
154
155 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
156 {
157 struct cso_sampler *cso = (struct cso_sampler *)state;
158 if (cso->delete_state)
159 cso->delete_state(cso->context, cso->data);
160 FREE(state);
161 return TRUE;
162 }
163
164 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
165 {
166 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
167
168 if (ctx->rasterizer == cso->data)
169 return FALSE;
170 if (cso->delete_state)
171 cso->delete_state(cso->context, cso->data);
172 FREE(state);
173 return TRUE;
174 }
175
176 static boolean delete_vertex_elements(struct cso_context *ctx,
177 void *state)
178 {
179 struct cso_velements *cso = (struct cso_velements *)state;
180
181 if (ctx->velements == cso->data)
182 return FALSE;
183
184 if (cso->delete_state)
185 cso->delete_state(cso->context, cso->data);
186 FREE(state);
187 return TRUE;
188 }
189
190
191 static inline boolean delete_cso(struct cso_context *ctx,
192 void *state, enum cso_cache_type type)
193 {
194 switch (type) {
195 case CSO_BLEND:
196 return delete_blend_state(ctx, state);
197 case CSO_SAMPLER:
198 return delete_sampler_state(ctx, state);
199 case CSO_DEPTH_STENCIL_ALPHA:
200 return delete_depth_stencil_state(ctx, state);
201 case CSO_RASTERIZER:
202 return delete_rasterizer_state(ctx, state);
203 case CSO_VELEMENTS:
204 return delete_vertex_elements(ctx, state);
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static inline void
213 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
214 int max_size, void *user_data)
215 {
216 struct cso_context *ctx = (struct cso_context *)user_data;
217 /* if we're approach the maximum size, remove fourth of the entries
218 * otherwise every subsequent call will go through the same */
219 int hash_size = cso_hash_size(hash);
220 int max_entries = (max_size > hash_size) ? max_size : hash_size;
221 int to_remove = (max_size < max_entries) * max_entries/4;
222 struct cso_hash_iter iter = cso_hash_first_node(hash);
223 if (hash_size > max_size)
224 to_remove += hash_size - max_size;
225 while (to_remove) {
226 /*remove elements until we're good */
227 /*fixme: currently we pick the nodes to remove at random*/
228 void *cso = cso_hash_iter_data(iter);
229 if (delete_cso(ctx, cso, type)) {
230 iter = cso_hash_erase(hash, iter);
231 --to_remove;
232 } else
233 iter = cso_hash_iter_next(iter);
234 }
235 }
236
237 static void cso_init_vbuf(struct cso_context *cso)
238 {
239 struct u_vbuf_caps caps;
240
241 /* Install u_vbuf if there is anything unsupported. */
242 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
243 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
244 cso->aux_vertex_buffer_index);
245 }
246 }
247
248 struct cso_context *cso_create_context( struct pipe_context *pipe )
249 {
250 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
251 if (!ctx)
252 goto out;
253
254 ctx->cache = cso_cache_create();
255 if (ctx->cache == NULL)
256 goto out;
257 cso_cache_set_sanitize_callback(ctx->cache,
258 sanitize_hash,
259 ctx);
260
261 ctx->pipe = pipe;
262 ctx->sample_mask = ~0;
263
264 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
265
266 cso_init_vbuf(ctx);
267
268 /* Enable for testing: */
269 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
270
271 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
272 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
273 ctx->has_geometry_shader = TRUE;
274 }
275 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
276 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
277 ctx->has_tessellation = TRUE;
278 }
279 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
280 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
281 int supported_irs =
282 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
283 PIPE_SHADER_CAP_SUPPORTED_IRS);
284 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
285 ctx->has_compute_shader = TRUE;
286 }
287 }
288 if (pipe->screen->get_param(pipe->screen,
289 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
290 ctx->has_streamout = TRUE;
291 }
292
293 return ctx;
294
295 out:
296 cso_destroy_context( ctx );
297 return NULL;
298 }
299
300 /**
301 * Free the CSO context.
302 */
303 void cso_destroy_context( struct cso_context *ctx )
304 {
305 unsigned i;
306
307 if (ctx->pipe) {
308 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
309
310 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
311 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
312
313 {
314 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
315 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
316 struct pipe_screen *scr = ctx->pipe->screen;
317 unsigned sh;
318 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
319 int maxsam = scr->get_shader_param(scr, sh,
320 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
321 int maxview = scr->get_shader_param(scr, sh,
322 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
323 assert(maxsam <= PIPE_MAX_SAMPLERS);
324 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
325 if (maxsam > 0) {
326 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
327 }
328 if (maxview > 0) {
329 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
330 }
331 }
332 }
333
334 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
335 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
336 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
337 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
338 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
339 if (ctx->has_geometry_shader) {
340 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
341 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
342 }
343 if (ctx->has_tessellation) {
344 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
345 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
346 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
347 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
348 }
349 if (ctx->has_compute_shader) {
350 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
351 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
352 }
353 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
354
355 if (ctx->has_streamout)
356 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
357 }
358
359 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
360 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
361 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
362 }
363
364 util_unreference_framebuffer_state(&ctx->fb);
365 util_unreference_framebuffer_state(&ctx->fb_saved);
366
367 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
368 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
369
370 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
371 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
372 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
373 }
374
375 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
376 pipe_so_target_reference(&ctx->so_targets[i], NULL);
377 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
378 }
379
380 if (ctx->cache) {
381 cso_cache_delete( ctx->cache );
382 ctx->cache = NULL;
383 }
384
385 if (ctx->vbuf)
386 u_vbuf_destroy(ctx->vbuf);
387 FREE( ctx );
388 }
389
390
391 /* Those function will either find the state of the given template
392 * in the cache or they will create a new state from the given
393 * template, insert it in the cache and return it.
394 */
395
396 /*
397 * If the driver returns 0 from the create method then they will assign
398 * the data member of the cso to be the template itself.
399 */
400
401 enum pipe_error cso_set_blend(struct cso_context *ctx,
402 const struct pipe_blend_state *templ)
403 {
404 unsigned key_size, hash_key;
405 struct cso_hash_iter iter;
406 void *handle;
407
408 key_size = templ->independent_blend_enable ?
409 sizeof(struct pipe_blend_state) :
410 (char *)&(templ->rt[1]) - (char *)templ;
411 hash_key = cso_construct_key((void*)templ, key_size);
412 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
413 (void*)templ, key_size);
414
415 if (cso_hash_iter_is_null(iter)) {
416 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
417 if (!cso)
418 return PIPE_ERROR_OUT_OF_MEMORY;
419
420 memset(&cso->state, 0, sizeof cso->state);
421 memcpy(&cso->state, templ, key_size);
422 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
423 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
424 cso->context = ctx->pipe;
425
426 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
427 if (cso_hash_iter_is_null(iter)) {
428 FREE(cso);
429 return PIPE_ERROR_OUT_OF_MEMORY;
430 }
431
432 handle = cso->data;
433 }
434 else {
435 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
436 }
437
438 if (ctx->blend != handle) {
439 ctx->blend = handle;
440 ctx->pipe->bind_blend_state(ctx->pipe, handle);
441 }
442 return PIPE_OK;
443 }
444
445 static void
446 cso_save_blend(struct cso_context *ctx)
447 {
448 assert(!ctx->blend_saved);
449 ctx->blend_saved = ctx->blend;
450 }
451
452 static void
453 cso_restore_blend(struct cso_context *ctx)
454 {
455 if (ctx->blend != ctx->blend_saved) {
456 ctx->blend = ctx->blend_saved;
457 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
458 }
459 ctx->blend_saved = NULL;
460 }
461
462
463
464 enum pipe_error
465 cso_set_depth_stencil_alpha(struct cso_context *ctx,
466 const struct pipe_depth_stencil_alpha_state *templ)
467 {
468 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
469 unsigned hash_key = cso_construct_key((void*)templ, key_size);
470 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
471 hash_key,
472 CSO_DEPTH_STENCIL_ALPHA,
473 (void*)templ, key_size);
474 void *handle;
475
476 if (cso_hash_iter_is_null(iter)) {
477 struct cso_depth_stencil_alpha *cso =
478 MALLOC(sizeof(struct cso_depth_stencil_alpha));
479 if (!cso)
480 return PIPE_ERROR_OUT_OF_MEMORY;
481
482 memcpy(&cso->state, templ, sizeof(*templ));
483 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
484 &cso->state);
485 cso->delete_state =
486 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
487 cso->context = ctx->pipe;
488
489 iter = cso_insert_state(ctx->cache, hash_key,
490 CSO_DEPTH_STENCIL_ALPHA, cso);
491 if (cso_hash_iter_is_null(iter)) {
492 FREE(cso);
493 return PIPE_ERROR_OUT_OF_MEMORY;
494 }
495
496 handle = cso->data;
497 }
498 else {
499 handle = ((struct cso_depth_stencil_alpha *)
500 cso_hash_iter_data(iter))->data;
501 }
502
503 if (ctx->depth_stencil != handle) {
504 ctx->depth_stencil = handle;
505 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
506 }
507 return PIPE_OK;
508 }
509
510 static void
511 cso_save_depth_stencil_alpha(struct cso_context *ctx)
512 {
513 assert(!ctx->depth_stencil_saved);
514 ctx->depth_stencil_saved = ctx->depth_stencil;
515 }
516
517 static void
518 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
519 {
520 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
521 ctx->depth_stencil = ctx->depth_stencil_saved;
522 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
523 ctx->depth_stencil_saved);
524 }
525 ctx->depth_stencil_saved = NULL;
526 }
527
528
529
530 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
531 const struct pipe_rasterizer_state *templ)
532 {
533 unsigned key_size = sizeof(struct pipe_rasterizer_state);
534 unsigned hash_key = cso_construct_key((void*)templ, key_size);
535 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
536 hash_key,
537 CSO_RASTERIZER,
538 (void*)templ, key_size);
539 void *handle = NULL;
540
541 if (cso_hash_iter_is_null(iter)) {
542 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
543 if (!cso)
544 return PIPE_ERROR_OUT_OF_MEMORY;
545
546 memcpy(&cso->state, templ, sizeof(*templ));
547 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
548 cso->delete_state =
549 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
550 cso->context = ctx->pipe;
551
552 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
553 if (cso_hash_iter_is_null(iter)) {
554 FREE(cso);
555 return PIPE_ERROR_OUT_OF_MEMORY;
556 }
557
558 handle = cso->data;
559 }
560 else {
561 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
562 }
563
564 if (ctx->rasterizer != handle) {
565 ctx->rasterizer = handle;
566 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
567 }
568 return PIPE_OK;
569 }
570
571 static void
572 cso_save_rasterizer(struct cso_context *ctx)
573 {
574 assert(!ctx->rasterizer_saved);
575 ctx->rasterizer_saved = ctx->rasterizer;
576 }
577
578 static void
579 cso_restore_rasterizer(struct cso_context *ctx)
580 {
581 if (ctx->rasterizer != ctx->rasterizer_saved) {
582 ctx->rasterizer = ctx->rasterizer_saved;
583 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
584 }
585 ctx->rasterizer_saved = NULL;
586 }
587
588
589 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
590 {
591 if (ctx->fragment_shader != handle) {
592 ctx->fragment_shader = handle;
593 ctx->pipe->bind_fs_state(ctx->pipe, handle);
594 }
595 }
596
597 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
598 {
599 if (handle == ctx->fragment_shader) {
600 /* unbind before deleting */
601 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
602 ctx->fragment_shader = NULL;
603 }
604 ctx->pipe->delete_fs_state(ctx->pipe, handle);
605 }
606
607 static void
608 cso_save_fragment_shader(struct cso_context *ctx)
609 {
610 assert(!ctx->fragment_shader_saved);
611 ctx->fragment_shader_saved = ctx->fragment_shader;
612 }
613
614 static void
615 cso_restore_fragment_shader(struct cso_context *ctx)
616 {
617 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
618 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
619 ctx->fragment_shader = ctx->fragment_shader_saved;
620 }
621 ctx->fragment_shader_saved = NULL;
622 }
623
624
625 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
626 {
627 if (ctx->vertex_shader != handle) {
628 ctx->vertex_shader = handle;
629 ctx->pipe->bind_vs_state(ctx->pipe, handle);
630 }
631 }
632
633 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
634 {
635 if (handle == ctx->vertex_shader) {
636 /* unbind before deleting */
637 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
638 ctx->vertex_shader = NULL;
639 }
640 ctx->pipe->delete_vs_state(ctx->pipe, handle);
641 }
642
643 static void
644 cso_save_vertex_shader(struct cso_context *ctx)
645 {
646 assert(!ctx->vertex_shader_saved);
647 ctx->vertex_shader_saved = ctx->vertex_shader;
648 }
649
650 static void
651 cso_restore_vertex_shader(struct cso_context *ctx)
652 {
653 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
654 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
655 ctx->vertex_shader = ctx->vertex_shader_saved;
656 }
657 ctx->vertex_shader_saved = NULL;
658 }
659
660
661 void cso_set_framebuffer(struct cso_context *ctx,
662 const struct pipe_framebuffer_state *fb)
663 {
664 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
665 util_copy_framebuffer_state(&ctx->fb, fb);
666 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
667 }
668 }
669
670 static void
671 cso_save_framebuffer(struct cso_context *ctx)
672 {
673 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
674 }
675
676 static void
677 cso_restore_framebuffer(struct cso_context *ctx)
678 {
679 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
680 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
681 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
682 util_unreference_framebuffer_state(&ctx->fb_saved);
683 }
684 }
685
686
687 void cso_set_viewport(struct cso_context *ctx,
688 const struct pipe_viewport_state *vp)
689 {
690 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
691 ctx->vp = *vp;
692 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
693 }
694 }
695
696 /**
697 * Setup viewport state for given width and height (position is always (0,0)).
698 * Invert the Y axis if 'invert' is true.
699 */
700 void
701 cso_set_viewport_dims(struct cso_context *ctx,
702 float width, float height, boolean invert)
703 {
704 struct pipe_viewport_state vp;
705 vp.scale[0] = width * 0.5f;
706 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
707 vp.scale[2] = 0.5f;
708 vp.translate[0] = 0.5f * width;
709 vp.translate[1] = 0.5f * height;
710 vp.translate[2] = 0.5f;
711 cso_set_viewport(ctx, &vp);
712 }
713
714 static void
715 cso_save_viewport(struct cso_context *ctx)
716 {
717 ctx->vp_saved = ctx->vp;
718 }
719
720
721 static void
722 cso_restore_viewport(struct cso_context *ctx)
723 {
724 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
725 ctx->vp = ctx->vp_saved;
726 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
727 }
728 }
729
730
731 void cso_set_blend_color(struct cso_context *ctx,
732 const struct pipe_blend_color *bc)
733 {
734 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
735 ctx->blend_color = *bc;
736 ctx->pipe->set_blend_color(ctx->pipe, bc);
737 }
738 }
739
740 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
741 {
742 if (ctx->sample_mask != sample_mask) {
743 ctx->sample_mask = sample_mask;
744 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
745 }
746 }
747
748 static void
749 cso_save_sample_mask(struct cso_context *ctx)
750 {
751 ctx->sample_mask_saved = ctx->sample_mask;
752 }
753
754 static void
755 cso_restore_sample_mask(struct cso_context *ctx)
756 {
757 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
758 }
759
760 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
761 {
762 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
763 ctx->min_samples = min_samples;
764 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
765 }
766 }
767
768 static void
769 cso_save_min_samples(struct cso_context *ctx)
770 {
771 ctx->min_samples_saved = ctx->min_samples;
772 }
773
774 static void
775 cso_restore_min_samples(struct cso_context *ctx)
776 {
777 cso_set_min_samples(ctx, ctx->min_samples_saved);
778 }
779
780 void cso_set_stencil_ref(struct cso_context *ctx,
781 const struct pipe_stencil_ref *sr)
782 {
783 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
784 ctx->stencil_ref = *sr;
785 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
786 }
787 }
788
789 static void
790 cso_save_stencil_ref(struct cso_context *ctx)
791 {
792 ctx->stencil_ref_saved = ctx->stencil_ref;
793 }
794
795
796 static void
797 cso_restore_stencil_ref(struct cso_context *ctx)
798 {
799 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
800 sizeof(ctx->stencil_ref))) {
801 ctx->stencil_ref = ctx->stencil_ref_saved;
802 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
803 }
804 }
805
806 void cso_set_render_condition(struct cso_context *ctx,
807 struct pipe_query *query,
808 boolean condition, uint mode)
809 {
810 struct pipe_context *pipe = ctx->pipe;
811
812 if (ctx->render_condition != query ||
813 ctx->render_condition_mode != mode ||
814 ctx->render_condition_cond != condition) {
815 pipe->render_condition(pipe, query, condition, mode);
816 ctx->render_condition = query;
817 ctx->render_condition_cond = condition;
818 ctx->render_condition_mode = mode;
819 }
820 }
821
822 static void
823 cso_save_render_condition(struct cso_context *ctx)
824 {
825 ctx->render_condition_saved = ctx->render_condition;
826 ctx->render_condition_cond_saved = ctx->render_condition_cond;
827 ctx->render_condition_mode_saved = ctx->render_condition_mode;
828 }
829
830 static void
831 cso_restore_render_condition(struct cso_context *ctx)
832 {
833 cso_set_render_condition(ctx, ctx->render_condition_saved,
834 ctx->render_condition_cond_saved,
835 ctx->render_condition_mode_saved);
836 }
837
838 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
839 {
840 assert(ctx->has_geometry_shader || !handle);
841
842 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
843 ctx->geometry_shader = handle;
844 ctx->pipe->bind_gs_state(ctx->pipe, handle);
845 }
846 }
847
848 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
849 {
850 if (handle == ctx->geometry_shader) {
851 /* unbind before deleting */
852 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
853 ctx->geometry_shader = NULL;
854 }
855 ctx->pipe->delete_gs_state(ctx->pipe, handle);
856 }
857
858 static void
859 cso_save_geometry_shader(struct cso_context *ctx)
860 {
861 if (!ctx->has_geometry_shader) {
862 return;
863 }
864
865 assert(!ctx->geometry_shader_saved);
866 ctx->geometry_shader_saved = ctx->geometry_shader;
867 }
868
869 static void
870 cso_restore_geometry_shader(struct cso_context *ctx)
871 {
872 if (!ctx->has_geometry_shader) {
873 return;
874 }
875
876 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
877 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
878 ctx->geometry_shader = ctx->geometry_shader_saved;
879 }
880 ctx->geometry_shader_saved = NULL;
881 }
882
883 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
884 {
885 assert(ctx->has_tessellation || !handle);
886
887 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
888 ctx->tessctrl_shader = handle;
889 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
890 }
891 }
892
893 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
894 {
895 if (handle == ctx->tessctrl_shader) {
896 /* unbind before deleting */
897 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
898 ctx->tessctrl_shader = NULL;
899 }
900 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
901 }
902
903 static void
904 cso_save_tessctrl_shader(struct cso_context *ctx)
905 {
906 if (!ctx->has_tessellation) {
907 return;
908 }
909
910 assert(!ctx->tessctrl_shader_saved);
911 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
912 }
913
914 static void
915 cso_restore_tessctrl_shader(struct cso_context *ctx)
916 {
917 if (!ctx->has_tessellation) {
918 return;
919 }
920
921 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
922 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
923 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
924 }
925 ctx->tessctrl_shader_saved = NULL;
926 }
927
928 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
929 {
930 assert(ctx->has_tessellation || !handle);
931
932 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
933 ctx->tesseval_shader = handle;
934 ctx->pipe->bind_tes_state(ctx->pipe, handle);
935 }
936 }
937
938 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
939 {
940 if (handle == ctx->tesseval_shader) {
941 /* unbind before deleting */
942 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
943 ctx->tesseval_shader = NULL;
944 }
945 ctx->pipe->delete_tes_state(ctx->pipe, handle);
946 }
947
948 static void
949 cso_save_tesseval_shader(struct cso_context *ctx)
950 {
951 if (!ctx->has_tessellation) {
952 return;
953 }
954
955 assert(!ctx->tesseval_shader_saved);
956 ctx->tesseval_shader_saved = ctx->tesseval_shader;
957 }
958
959 static void
960 cso_restore_tesseval_shader(struct cso_context *ctx)
961 {
962 if (!ctx->has_tessellation) {
963 return;
964 }
965
966 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
967 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
968 ctx->tesseval_shader = ctx->tesseval_shader_saved;
969 }
970 ctx->tesseval_shader_saved = NULL;
971 }
972
973 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
974 {
975 assert(ctx->has_compute_shader || !handle);
976
977 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
978 ctx->compute_shader = handle;
979 ctx->pipe->bind_compute_state(ctx->pipe, handle);
980 }
981 }
982
983 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
984 {
985 if (handle == ctx->compute_shader) {
986 /* unbind before deleting */
987 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
988 ctx->compute_shader = NULL;
989 }
990 ctx->pipe->delete_compute_state(ctx->pipe, handle);
991 }
992
993 enum pipe_error
994 cso_set_vertex_elements(struct cso_context *ctx,
995 unsigned count,
996 const struct pipe_vertex_element *states)
997 {
998 struct u_vbuf *vbuf = ctx->vbuf;
999 unsigned key_size, hash_key;
1000 struct cso_hash_iter iter;
1001 void *handle;
1002 struct cso_velems_state velems_state;
1003
1004 if (vbuf) {
1005 u_vbuf_set_vertex_elements(vbuf, count, states);
1006 return PIPE_OK;
1007 }
1008
1009 /* Need to include the count into the stored state data too.
1010 * Otherwise first few count pipe_vertex_elements could be identical
1011 * even if count is different, and there's no guarantee the hash would
1012 * be different in that case neither.
1013 */
1014 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1015 velems_state.count = count;
1016 memcpy(velems_state.velems, states,
1017 sizeof(struct pipe_vertex_element) * count);
1018 hash_key = cso_construct_key((void*)&velems_state, key_size);
1019 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1020 (void*)&velems_state, key_size);
1021
1022 if (cso_hash_iter_is_null(iter)) {
1023 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1024 if (!cso)
1025 return PIPE_ERROR_OUT_OF_MEMORY;
1026
1027 memcpy(&cso->state, &velems_state, key_size);
1028 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1029 &cso->state.velems[0]);
1030 cso->delete_state =
1031 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1032 cso->context = ctx->pipe;
1033
1034 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1035 if (cso_hash_iter_is_null(iter)) {
1036 FREE(cso);
1037 return PIPE_ERROR_OUT_OF_MEMORY;
1038 }
1039
1040 handle = cso->data;
1041 }
1042 else {
1043 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1044 }
1045
1046 if (ctx->velements != handle) {
1047 ctx->velements = handle;
1048 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1049 }
1050 return PIPE_OK;
1051 }
1052
1053 static void
1054 cso_save_vertex_elements(struct cso_context *ctx)
1055 {
1056 struct u_vbuf *vbuf = ctx->vbuf;
1057
1058 if (vbuf) {
1059 u_vbuf_save_vertex_elements(vbuf);
1060 return;
1061 }
1062
1063 assert(!ctx->velements_saved);
1064 ctx->velements_saved = ctx->velements;
1065 }
1066
1067 static void
1068 cso_restore_vertex_elements(struct cso_context *ctx)
1069 {
1070 struct u_vbuf *vbuf = ctx->vbuf;
1071
1072 if (vbuf) {
1073 u_vbuf_restore_vertex_elements(vbuf);
1074 return;
1075 }
1076
1077 if (ctx->velements != ctx->velements_saved) {
1078 ctx->velements = ctx->velements_saved;
1079 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1080 }
1081 ctx->velements_saved = NULL;
1082 }
1083
1084 /* vertex buffers */
1085
1086 void cso_set_vertex_buffers(struct cso_context *ctx,
1087 unsigned start_slot, unsigned count,
1088 const struct pipe_vertex_buffer *buffers)
1089 {
1090 struct u_vbuf *vbuf = ctx->vbuf;
1091
1092 if (vbuf) {
1093 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1094 return;
1095 }
1096
1097 /* Save what's in the auxiliary slot, so that we can save and restore it
1098 * for meta ops. */
1099 if (start_slot <= ctx->aux_vertex_buffer_index &&
1100 start_slot+count > ctx->aux_vertex_buffer_index) {
1101 if (buffers) {
1102 const struct pipe_vertex_buffer *vb =
1103 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1104
1105 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1106 vb->buffer);
1107 memcpy(&ctx->aux_vertex_buffer_current, vb,
1108 sizeof(struct pipe_vertex_buffer));
1109 }
1110 else {
1111 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1112 NULL);
1113 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1114 }
1115 }
1116
1117 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1118 }
1119
1120 static void
1121 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1122 {
1123 struct u_vbuf *vbuf = ctx->vbuf;
1124
1125 if (vbuf) {
1126 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1127 return;
1128 }
1129
1130 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1131 ctx->aux_vertex_buffer_current.buffer);
1132 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1133 sizeof(struct pipe_vertex_buffer));
1134 }
1135
1136 static void
1137 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1138 {
1139 struct u_vbuf *vbuf = ctx->vbuf;
1140
1141 if (vbuf) {
1142 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1143 return;
1144 }
1145
1146 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1147 &ctx->aux_vertex_buffer_saved);
1148 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1149 }
1150
1151 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1152 {
1153 return ctx->aux_vertex_buffer_index;
1154 }
1155
1156
1157 /**************** fragment/vertex sampler view state *************************/
1158
1159 enum pipe_error
1160 cso_single_sampler(struct cso_context *ctx, unsigned shader_stage,
1161 unsigned idx, const struct pipe_sampler_state *templ)
1162 {
1163 void *handle = NULL;
1164
1165 if (templ) {
1166 unsigned key_size = sizeof(struct pipe_sampler_state);
1167 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1168 struct cso_hash_iter iter =
1169 cso_find_state_template(ctx->cache,
1170 hash_key, CSO_SAMPLER,
1171 (void *) templ, key_size);
1172
1173 if (cso_hash_iter_is_null(iter)) {
1174 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1175 if (!cso)
1176 return PIPE_ERROR_OUT_OF_MEMORY;
1177
1178 memcpy(&cso->state, templ, sizeof(*templ));
1179 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1180 cso->delete_state =
1181 (cso_state_callback) ctx->pipe->delete_sampler_state;
1182 cso->context = ctx->pipe;
1183
1184 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1185 if (cso_hash_iter_is_null(iter)) {
1186 FREE(cso);
1187 return PIPE_ERROR_OUT_OF_MEMORY;
1188 }
1189
1190 handle = cso->data;
1191 }
1192 else {
1193 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1194 }
1195 }
1196
1197 ctx->samplers[shader_stage].samplers[idx] = handle;
1198 return PIPE_OK;
1199 }
1200
1201
1202 void
1203 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1204 {
1205 struct sampler_info *info = &ctx->samplers[shader_stage];
1206 unsigned i;
1207
1208 /* find highest non-null sampler */
1209 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1210 if (info->samplers[i - 1] != NULL)
1211 break;
1212 }
1213
1214 info->nr_samplers = i;
1215 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0, i,
1216 info->samplers);
1217 }
1218
1219
1220 /*
1221 * If the function encouters any errors it will return the
1222 * last one. Done to always try to set as many samplers
1223 * as possible.
1224 */
1225 enum pipe_error
1226 cso_set_samplers(struct cso_context *ctx,
1227 unsigned shader_stage,
1228 unsigned nr,
1229 const struct pipe_sampler_state **templates)
1230 {
1231 struct sampler_info *info = &ctx->samplers[shader_stage];
1232 unsigned i;
1233 enum pipe_error temp, error = PIPE_OK;
1234
1235 /* TODO: fastpath
1236 */
1237
1238 for (i = 0; i < nr; i++) {
1239 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1240 if (temp != PIPE_OK)
1241 error = temp;
1242 }
1243
1244 for ( ; i < info->nr_samplers; i++) {
1245 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1246 if (temp != PIPE_OK)
1247 error = temp;
1248 }
1249
1250 cso_single_sampler_done(ctx, shader_stage);
1251
1252 return error;
1253 }
1254
1255 static void
1256 cso_save_fragment_samplers(struct cso_context *ctx)
1257 {
1258 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1259
1260 ctx->nr_fragment_samplers_saved = info->nr_samplers;
1261 memcpy(ctx->fragment_samplers_saved, info->samplers,
1262 sizeof(info->samplers));
1263 }
1264
1265
1266 static void
1267 cso_restore_fragment_samplers(struct cso_context *ctx)
1268 {
1269 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1270
1271 info->nr_samplers = ctx->nr_fragment_samplers_saved;
1272 memcpy(info->samplers, ctx->fragment_samplers_saved,
1273 sizeof(info->samplers));
1274 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1275 }
1276
1277
1278 void
1279 cso_set_sampler_views(struct cso_context *ctx,
1280 unsigned shader_stage,
1281 unsigned count,
1282 struct pipe_sampler_view **views)
1283 {
1284 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1285 unsigned i;
1286 boolean any_change = FALSE;
1287
1288 /* reference new views */
1289 for (i = 0; i < count; i++) {
1290 any_change |= ctx->fragment_views[i] != views[i];
1291 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1292 }
1293 /* unref extra old views, if any */
1294 for (; i < ctx->nr_fragment_views; i++) {
1295 any_change |= ctx->fragment_views[i] != NULL;
1296 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1297 }
1298
1299 /* bind the new sampler views */
1300 if (any_change) {
1301 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1302 MAX2(ctx->nr_fragment_views, count),
1303 ctx->fragment_views);
1304 }
1305
1306 ctx->nr_fragment_views = count;
1307 }
1308 else
1309 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1310 }
1311
1312
1313 static void
1314 cso_save_fragment_sampler_views(struct cso_context *ctx)
1315 {
1316 unsigned i;
1317
1318 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1319
1320 for (i = 0; i < ctx->nr_fragment_views; i++) {
1321 assert(!ctx->fragment_views_saved[i]);
1322 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1323 ctx->fragment_views[i]);
1324 }
1325 }
1326
1327
1328 static void
1329 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1330 {
1331 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1332 unsigned num;
1333
1334 for (i = 0; i < nr_saved; i++) {
1335 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1336 /* move the reference from one pointer to another */
1337 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1338 ctx->fragment_views_saved[i] = NULL;
1339 }
1340 for (; i < ctx->nr_fragment_views; i++) {
1341 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1342 }
1343
1344 num = MAX2(ctx->nr_fragment_views, nr_saved);
1345
1346 /* bind the old/saved sampler views */
1347 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1348 ctx->fragment_views);
1349
1350 ctx->nr_fragment_views = nr_saved;
1351 ctx->nr_fragment_views_saved = 0;
1352 }
1353
1354
1355 void
1356 cso_set_stream_outputs(struct cso_context *ctx,
1357 unsigned num_targets,
1358 struct pipe_stream_output_target **targets,
1359 const unsigned *offsets)
1360 {
1361 struct pipe_context *pipe = ctx->pipe;
1362 uint i;
1363
1364 if (!ctx->has_streamout) {
1365 assert(num_targets == 0);
1366 return;
1367 }
1368
1369 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1370 /* Nothing to do. */
1371 return;
1372 }
1373
1374 /* reference new targets */
1375 for (i = 0; i < num_targets; i++) {
1376 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1377 }
1378 /* unref extra old targets, if any */
1379 for (; i < ctx->nr_so_targets; i++) {
1380 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1381 }
1382
1383 pipe->set_stream_output_targets(pipe, num_targets, targets,
1384 offsets);
1385 ctx->nr_so_targets = num_targets;
1386 }
1387
1388 static void
1389 cso_save_stream_outputs(struct cso_context *ctx)
1390 {
1391 uint i;
1392
1393 if (!ctx->has_streamout) {
1394 return;
1395 }
1396
1397 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1398
1399 for (i = 0; i < ctx->nr_so_targets; i++) {
1400 assert(!ctx->so_targets_saved[i]);
1401 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1402 }
1403 }
1404
1405 static void
1406 cso_restore_stream_outputs(struct cso_context *ctx)
1407 {
1408 struct pipe_context *pipe = ctx->pipe;
1409 uint i;
1410 unsigned offset[PIPE_MAX_SO_BUFFERS];
1411
1412 if (!ctx->has_streamout) {
1413 return;
1414 }
1415
1416 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1417 /* Nothing to do. */
1418 return;
1419 }
1420
1421 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1422 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1423 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1424 /* move the reference from one pointer to another */
1425 ctx->so_targets[i] = ctx->so_targets_saved[i];
1426 ctx->so_targets_saved[i] = NULL;
1427 /* -1 means append */
1428 offset[i] = (unsigned)-1;
1429 }
1430 for (; i < ctx->nr_so_targets; i++) {
1431 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1432 }
1433
1434 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1435 ctx->so_targets, offset);
1436
1437 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1438 ctx->nr_so_targets_saved = 0;
1439 }
1440
1441 /* constant buffers */
1442
1443 void
1444 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1445 unsigned index, struct pipe_constant_buffer *cb)
1446 {
1447 struct pipe_context *pipe = cso->pipe;
1448
1449 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1450
1451 if (index == 0) {
1452 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1453 }
1454 }
1455
1456 void
1457 cso_set_constant_buffer_resource(struct cso_context *cso,
1458 unsigned shader_stage,
1459 unsigned index,
1460 struct pipe_resource *buffer)
1461 {
1462 if (buffer) {
1463 struct pipe_constant_buffer cb;
1464 cb.buffer = buffer;
1465 cb.buffer_offset = 0;
1466 cb.buffer_size = buffer->width0;
1467 cb.user_buffer = NULL;
1468 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1469 } else {
1470 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1471 }
1472 }
1473
1474 void
1475 cso_save_constant_buffer_slot0(struct cso_context *cso,
1476 unsigned shader_stage)
1477 {
1478 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1479 &cso->aux_constbuf_current[shader_stage]);
1480 }
1481
1482 void
1483 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1484 unsigned shader_stage)
1485 {
1486 cso_set_constant_buffer(cso, shader_stage, 0,
1487 &cso->aux_constbuf_saved[shader_stage]);
1488 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1489 NULL);
1490 }
1491
1492
1493 /**
1494 * Save all the CSO state items specified by the state_mask bitmask
1495 * of CSO_BIT_x flags.
1496 */
1497 void
1498 cso_save_state(struct cso_context *cso, unsigned state_mask)
1499 {
1500 assert(cso->saved_state == 0);
1501
1502 cso->saved_state = state_mask;
1503
1504 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1505 cso_save_aux_vertex_buffer_slot(cso);
1506 if (state_mask & CSO_BIT_BLEND)
1507 cso_save_blend(cso);
1508 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1509 cso_save_depth_stencil_alpha(cso);
1510 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1511 cso_save_fragment_samplers(cso);
1512 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1513 cso_save_fragment_sampler_views(cso);
1514 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1515 cso_save_fragment_shader(cso);
1516 if (state_mask & CSO_BIT_FRAMEBUFFER)
1517 cso_save_framebuffer(cso);
1518 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1519 cso_save_geometry_shader(cso);
1520 if (state_mask & CSO_BIT_MIN_SAMPLES)
1521 cso_save_min_samples(cso);
1522 if (state_mask & CSO_BIT_RASTERIZER)
1523 cso_save_rasterizer(cso);
1524 if (state_mask & CSO_BIT_RENDER_CONDITION)
1525 cso_save_render_condition(cso);
1526 if (state_mask & CSO_BIT_SAMPLE_MASK)
1527 cso_save_sample_mask(cso);
1528 if (state_mask & CSO_BIT_STENCIL_REF)
1529 cso_save_stencil_ref(cso);
1530 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1531 cso_save_stream_outputs(cso);
1532 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1533 cso_save_tessctrl_shader(cso);
1534 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1535 cso_save_tesseval_shader(cso);
1536 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1537 cso_save_vertex_elements(cso);
1538 if (state_mask & CSO_BIT_VERTEX_SHADER)
1539 cso_save_vertex_shader(cso);
1540 if (state_mask & CSO_BIT_VIEWPORT)
1541 cso_save_viewport(cso);
1542 }
1543
1544
1545 /**
1546 * Restore the state which was saved by cso_save_state().
1547 */
1548 void
1549 cso_restore_state(struct cso_context *cso)
1550 {
1551 unsigned state_mask = cso->saved_state;
1552
1553 assert(state_mask);
1554
1555 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1556 cso_restore_aux_vertex_buffer_slot(cso);
1557 if (state_mask & CSO_BIT_BLEND)
1558 cso_restore_blend(cso);
1559 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1560 cso_restore_depth_stencil_alpha(cso);
1561 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1562 cso_restore_fragment_samplers(cso);
1563 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1564 cso_restore_fragment_sampler_views(cso);
1565 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1566 cso_restore_fragment_shader(cso);
1567 if (state_mask & CSO_BIT_FRAMEBUFFER)
1568 cso_restore_framebuffer(cso);
1569 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1570 cso_restore_geometry_shader(cso);
1571 if (state_mask & CSO_BIT_MIN_SAMPLES)
1572 cso_restore_min_samples(cso);
1573 if (state_mask & CSO_BIT_RASTERIZER)
1574 cso_restore_rasterizer(cso);
1575 if (state_mask & CSO_BIT_RENDER_CONDITION)
1576 cso_restore_render_condition(cso);
1577 if (state_mask & CSO_BIT_SAMPLE_MASK)
1578 cso_restore_sample_mask(cso);
1579 if (state_mask & CSO_BIT_STENCIL_REF)
1580 cso_restore_stencil_ref(cso);
1581 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1582 cso_restore_stream_outputs(cso);
1583 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1584 cso_restore_tessctrl_shader(cso);
1585 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1586 cso_restore_tesseval_shader(cso);
1587 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1588 cso_restore_vertex_elements(cso);
1589 if (state_mask & CSO_BIT_VERTEX_SHADER)
1590 cso_restore_vertex_shader(cso);
1591 if (state_mask & CSO_BIT_VIEWPORT)
1592 cso_restore_viewport(cso);
1593
1594 cso->saved_state = 0;
1595 }
1596
1597
1598
1599 /* drawing */
1600
1601 void
1602 cso_set_index_buffer(struct cso_context *cso,
1603 const struct pipe_index_buffer *ib)
1604 {
1605 struct u_vbuf *vbuf = cso->vbuf;
1606
1607 if (vbuf) {
1608 u_vbuf_set_index_buffer(vbuf, ib);
1609 } else {
1610 struct pipe_context *pipe = cso->pipe;
1611 pipe->set_index_buffer(pipe, ib);
1612 }
1613 }
1614
1615 void
1616 cso_draw_vbo(struct cso_context *cso,
1617 const struct pipe_draw_info *info)
1618 {
1619 struct u_vbuf *vbuf = cso->vbuf;
1620
1621 if (vbuf) {
1622 u_vbuf_draw_vbo(vbuf, info);
1623 } else {
1624 struct pipe_context *pipe = cso->pipe;
1625 pipe->draw_vbo(pipe, info);
1626 }
1627 }
1628
1629 void
1630 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1631 {
1632 struct pipe_draw_info info;
1633
1634 util_draw_init_info(&info);
1635
1636 info.mode = mode;
1637 info.start = start;
1638 info.count = count;
1639 info.min_index = start;
1640 info.max_index = start + count - 1;
1641
1642 cso_draw_vbo(cso, &info);
1643 }
1644
1645 void
1646 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1647 uint start, uint count,
1648 uint start_instance, uint instance_count)
1649 {
1650 struct pipe_draw_info info;
1651
1652 util_draw_init_info(&info);
1653
1654 info.mode = mode;
1655 info.start = start;
1656 info.count = count;
1657 info.min_index = start;
1658 info.max_index = start + count - 1;
1659 info.start_instance = start_instance;
1660 info.instance_count = instance_count;
1661
1662 cso_draw_vbo(cso, &info);
1663 }