gallium: Use enum pipe_shader_type in set_sampler_views()
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 void *samplers[PIPE_MAX_SAMPLERS];
59 unsigned nr_samplers;
60 };
61
62
63
64 struct cso_context {
65 struct pipe_context *pipe;
66 struct cso_cache *cache;
67 struct u_vbuf *vbuf;
68
69 boolean has_geometry_shader;
70 boolean has_tessellation;
71 boolean has_compute_shader;
72 boolean has_streamout;
73
74 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
75
76 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
77 unsigned nr_fragment_views;
78
79 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80 unsigned nr_fragment_views_saved;
81
82 void *fragment_samplers_saved[PIPE_MAX_SAMPLERS];
83 unsigned nr_fragment_samplers_saved;
84
85 struct sampler_info samplers[PIPE_SHADER_TYPES];
86
87 struct pipe_vertex_buffer aux_vertex_buffer_current;
88 struct pipe_vertex_buffer aux_vertex_buffer_saved;
89 unsigned aux_vertex_buffer_index;
90
91 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
92 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
93
94 struct pipe_image_view fragment_image0_current;
95 struct pipe_image_view fragment_image0_saved;
96
97 unsigned nr_so_targets;
98 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
99
100 unsigned nr_so_targets_saved;
101 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
102
103 /** Current and saved state.
104 * The saved state is used as a 1-deep stack.
105 */
106 void *blend, *blend_saved;
107 void *depth_stencil, *depth_stencil_saved;
108 void *rasterizer, *rasterizer_saved;
109 void *fragment_shader, *fragment_shader_saved;
110 void *vertex_shader, *vertex_shader_saved;
111 void *geometry_shader, *geometry_shader_saved;
112 void *tessctrl_shader, *tessctrl_shader_saved;
113 void *tesseval_shader, *tesseval_shader_saved;
114 void *compute_shader;
115 void *velements, *velements_saved;
116 struct pipe_query *render_condition, *render_condition_saved;
117 uint render_condition_mode, render_condition_mode_saved;
118 boolean render_condition_cond, render_condition_cond_saved;
119
120 struct pipe_framebuffer_state fb, fb_saved;
121 struct pipe_viewport_state vp, vp_saved;
122 struct pipe_blend_color blend_color;
123 unsigned sample_mask, sample_mask_saved;
124 unsigned min_samples, min_samples_saved;
125 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
126 };
127
128
129 static boolean delete_blend_state(struct cso_context *ctx, void *state)
130 {
131 struct cso_blend *cso = (struct cso_blend *)state;
132
133 if (ctx->blend == cso->data)
134 return FALSE;
135
136 if (cso->delete_state)
137 cso->delete_state(cso->context, cso->data);
138 FREE(state);
139 return TRUE;
140 }
141
142 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
143 {
144 struct cso_depth_stencil_alpha *cso =
145 (struct cso_depth_stencil_alpha *)state;
146
147 if (ctx->depth_stencil == cso->data)
148 return FALSE;
149
150 if (cso->delete_state)
151 cso->delete_state(cso->context, cso->data);
152 FREE(state);
153
154 return TRUE;
155 }
156
157 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
158 {
159 struct cso_sampler *cso = (struct cso_sampler *)state;
160 if (cso->delete_state)
161 cso->delete_state(cso->context, cso->data);
162 FREE(state);
163 return TRUE;
164 }
165
166 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
167 {
168 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
169
170 if (ctx->rasterizer == cso->data)
171 return FALSE;
172 if (cso->delete_state)
173 cso->delete_state(cso->context, cso->data);
174 FREE(state);
175 return TRUE;
176 }
177
178 static boolean delete_vertex_elements(struct cso_context *ctx,
179 void *state)
180 {
181 struct cso_velements *cso = (struct cso_velements *)state;
182
183 if (ctx->velements == cso->data)
184 return FALSE;
185
186 if (cso->delete_state)
187 cso->delete_state(cso->context, cso->data);
188 FREE(state);
189 return TRUE;
190 }
191
192
193 static inline boolean delete_cso(struct cso_context *ctx,
194 void *state, enum cso_cache_type type)
195 {
196 switch (type) {
197 case CSO_BLEND:
198 return delete_blend_state(ctx, state);
199 case CSO_SAMPLER:
200 return delete_sampler_state(ctx, state);
201 case CSO_DEPTH_STENCIL_ALPHA:
202 return delete_depth_stencil_state(ctx, state);
203 case CSO_RASTERIZER:
204 return delete_rasterizer_state(ctx, state);
205 case CSO_VELEMENTS:
206 return delete_vertex_elements(ctx, state);
207 default:
208 assert(0);
209 FREE(state);
210 }
211 return FALSE;
212 }
213
214 static inline void
215 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
216 int max_size, void *user_data)
217 {
218 struct cso_context *ctx = (struct cso_context *)user_data;
219 /* if we're approach the maximum size, remove fourth of the entries
220 * otherwise every subsequent call will go through the same */
221 int hash_size = cso_hash_size(hash);
222 int max_entries = (max_size > hash_size) ? max_size : hash_size;
223 int to_remove = (max_size < max_entries) * max_entries/4;
224 struct cso_hash_iter iter = cso_hash_first_node(hash);
225 if (hash_size > max_size)
226 to_remove += hash_size - max_size;
227 while (to_remove) {
228 /*remove elements until we're good */
229 /*fixme: currently we pick the nodes to remove at random*/
230 void *cso = cso_hash_iter_data(iter);
231 if (delete_cso(ctx, cso, type)) {
232 iter = cso_hash_erase(hash, iter);
233 --to_remove;
234 } else
235 iter = cso_hash_iter_next(iter);
236 }
237 }
238
239 static void cso_init_vbuf(struct cso_context *cso)
240 {
241 struct u_vbuf_caps caps;
242
243 /* Install u_vbuf if there is anything unsupported. */
244 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
245 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
246 cso->aux_vertex_buffer_index);
247 }
248 }
249
250 struct cso_context *cso_create_context( struct pipe_context *pipe )
251 {
252 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
253 if (!ctx)
254 return NULL;
255
256 ctx->cache = cso_cache_create();
257 if (ctx->cache == NULL)
258 goto out;
259 cso_cache_set_sanitize_callback(ctx->cache,
260 sanitize_hash,
261 ctx);
262
263 ctx->pipe = pipe;
264 ctx->sample_mask = ~0;
265
266 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
267
268 cso_init_vbuf(ctx);
269
270 /* Enable for testing: */
271 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
272
273 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
274 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
275 ctx->has_geometry_shader = TRUE;
276 }
277 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
278 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
279 ctx->has_tessellation = TRUE;
280 }
281 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
282 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
283 int supported_irs =
284 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
285 PIPE_SHADER_CAP_SUPPORTED_IRS);
286 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
287 ctx->has_compute_shader = TRUE;
288 }
289 }
290 if (pipe->screen->get_param(pipe->screen,
291 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
292 ctx->has_streamout = TRUE;
293 }
294
295 return ctx;
296
297 out:
298 cso_destroy_context( ctx );
299 return NULL;
300 }
301
302 /**
303 * Free the CSO context.
304 */
305 void cso_destroy_context( struct cso_context *ctx )
306 {
307 unsigned i;
308
309 if (ctx->pipe) {
310 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
311
312 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
313 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
314
315 {
316 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
317 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
318 struct pipe_screen *scr = ctx->pipe->screen;
319 enum pipe_shader_type sh;
320 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
321 int maxsam = scr->get_shader_param(scr, sh,
322 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
323 int maxview = scr->get_shader_param(scr, sh,
324 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
325 assert(maxsam <= PIPE_MAX_SAMPLERS);
326 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
327 if (maxsam > 0) {
328 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
329 }
330 if (maxview > 0) {
331 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
332 }
333 }
334 }
335
336 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
337 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
338 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
339 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
340 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
341 if (ctx->has_geometry_shader) {
342 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
343 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
344 }
345 if (ctx->has_tessellation) {
346 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
347 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
348 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
349 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
350 }
351 if (ctx->has_compute_shader) {
352 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
353 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
354 }
355 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
356
357 if (ctx->has_streamout)
358 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
359 }
360
361 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
362 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
363 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
364 }
365
366 util_unreference_framebuffer_state(&ctx->fb);
367 util_unreference_framebuffer_state(&ctx->fb_saved);
368
369 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
370 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
371
372 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
373 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
374 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
375 }
376
377 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
378 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
379
380 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
381 pipe_so_target_reference(&ctx->so_targets[i], NULL);
382 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
383 }
384
385 if (ctx->cache) {
386 cso_cache_delete( ctx->cache );
387 ctx->cache = NULL;
388 }
389
390 if (ctx->vbuf)
391 u_vbuf_destroy(ctx->vbuf);
392 FREE( ctx );
393 }
394
395
396 /* Those function will either find the state of the given template
397 * in the cache or they will create a new state from the given
398 * template, insert it in the cache and return it.
399 */
400
401 /*
402 * If the driver returns 0 from the create method then they will assign
403 * the data member of the cso to be the template itself.
404 */
405
406 enum pipe_error cso_set_blend(struct cso_context *ctx,
407 const struct pipe_blend_state *templ)
408 {
409 unsigned key_size, hash_key;
410 struct cso_hash_iter iter;
411 void *handle;
412
413 key_size = templ->independent_blend_enable ?
414 sizeof(struct pipe_blend_state) :
415 (char *)&(templ->rt[1]) - (char *)templ;
416 hash_key = cso_construct_key((void*)templ, key_size);
417 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
418 (void*)templ, key_size);
419
420 if (cso_hash_iter_is_null(iter)) {
421 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
422 if (!cso)
423 return PIPE_ERROR_OUT_OF_MEMORY;
424
425 memset(&cso->state, 0, sizeof cso->state);
426 memcpy(&cso->state, templ, key_size);
427 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
428 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
429 cso->context = ctx->pipe;
430
431 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
432 if (cso_hash_iter_is_null(iter)) {
433 FREE(cso);
434 return PIPE_ERROR_OUT_OF_MEMORY;
435 }
436
437 handle = cso->data;
438 }
439 else {
440 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
441 }
442
443 if (ctx->blend != handle) {
444 ctx->blend = handle;
445 ctx->pipe->bind_blend_state(ctx->pipe, handle);
446 }
447 return PIPE_OK;
448 }
449
450 static void
451 cso_save_blend(struct cso_context *ctx)
452 {
453 assert(!ctx->blend_saved);
454 ctx->blend_saved = ctx->blend;
455 }
456
457 static void
458 cso_restore_blend(struct cso_context *ctx)
459 {
460 if (ctx->blend != ctx->blend_saved) {
461 ctx->blend = ctx->blend_saved;
462 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
463 }
464 ctx->blend_saved = NULL;
465 }
466
467
468
469 enum pipe_error
470 cso_set_depth_stencil_alpha(struct cso_context *ctx,
471 const struct pipe_depth_stencil_alpha_state *templ)
472 {
473 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
474 unsigned hash_key = cso_construct_key((void*)templ, key_size);
475 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
476 hash_key,
477 CSO_DEPTH_STENCIL_ALPHA,
478 (void*)templ, key_size);
479 void *handle;
480
481 if (cso_hash_iter_is_null(iter)) {
482 struct cso_depth_stencil_alpha *cso =
483 MALLOC(sizeof(struct cso_depth_stencil_alpha));
484 if (!cso)
485 return PIPE_ERROR_OUT_OF_MEMORY;
486
487 memcpy(&cso->state, templ, sizeof(*templ));
488 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
489 &cso->state);
490 cso->delete_state =
491 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
492 cso->context = ctx->pipe;
493
494 iter = cso_insert_state(ctx->cache, hash_key,
495 CSO_DEPTH_STENCIL_ALPHA, cso);
496 if (cso_hash_iter_is_null(iter)) {
497 FREE(cso);
498 return PIPE_ERROR_OUT_OF_MEMORY;
499 }
500
501 handle = cso->data;
502 }
503 else {
504 handle = ((struct cso_depth_stencil_alpha *)
505 cso_hash_iter_data(iter))->data;
506 }
507
508 if (ctx->depth_stencil != handle) {
509 ctx->depth_stencil = handle;
510 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
511 }
512 return PIPE_OK;
513 }
514
515 static void
516 cso_save_depth_stencil_alpha(struct cso_context *ctx)
517 {
518 assert(!ctx->depth_stencil_saved);
519 ctx->depth_stencil_saved = ctx->depth_stencil;
520 }
521
522 static void
523 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
524 {
525 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
526 ctx->depth_stencil = ctx->depth_stencil_saved;
527 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
528 ctx->depth_stencil_saved);
529 }
530 ctx->depth_stencil_saved = NULL;
531 }
532
533
534
535 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
536 const struct pipe_rasterizer_state *templ)
537 {
538 unsigned key_size = sizeof(struct pipe_rasterizer_state);
539 unsigned hash_key = cso_construct_key((void*)templ, key_size);
540 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
541 hash_key,
542 CSO_RASTERIZER,
543 (void*)templ, key_size);
544 void *handle = NULL;
545
546 if (cso_hash_iter_is_null(iter)) {
547 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
548 if (!cso)
549 return PIPE_ERROR_OUT_OF_MEMORY;
550
551 memcpy(&cso->state, templ, sizeof(*templ));
552 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
553 cso->delete_state =
554 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
555 cso->context = ctx->pipe;
556
557 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
558 if (cso_hash_iter_is_null(iter)) {
559 FREE(cso);
560 return PIPE_ERROR_OUT_OF_MEMORY;
561 }
562
563 handle = cso->data;
564 }
565 else {
566 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
567 }
568
569 if (ctx->rasterizer != handle) {
570 ctx->rasterizer = handle;
571 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
572 }
573 return PIPE_OK;
574 }
575
576 static void
577 cso_save_rasterizer(struct cso_context *ctx)
578 {
579 assert(!ctx->rasterizer_saved);
580 ctx->rasterizer_saved = ctx->rasterizer;
581 }
582
583 static void
584 cso_restore_rasterizer(struct cso_context *ctx)
585 {
586 if (ctx->rasterizer != ctx->rasterizer_saved) {
587 ctx->rasterizer = ctx->rasterizer_saved;
588 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
589 }
590 ctx->rasterizer_saved = NULL;
591 }
592
593
594 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
595 {
596 if (ctx->fragment_shader != handle) {
597 ctx->fragment_shader = handle;
598 ctx->pipe->bind_fs_state(ctx->pipe, handle);
599 }
600 }
601
602 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
603 {
604 if (handle == ctx->fragment_shader) {
605 /* unbind before deleting */
606 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
607 ctx->fragment_shader = NULL;
608 }
609 ctx->pipe->delete_fs_state(ctx->pipe, handle);
610 }
611
612 static void
613 cso_save_fragment_shader(struct cso_context *ctx)
614 {
615 assert(!ctx->fragment_shader_saved);
616 ctx->fragment_shader_saved = ctx->fragment_shader;
617 }
618
619 static void
620 cso_restore_fragment_shader(struct cso_context *ctx)
621 {
622 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
623 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
624 ctx->fragment_shader = ctx->fragment_shader_saved;
625 }
626 ctx->fragment_shader_saved = NULL;
627 }
628
629
630 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
631 {
632 if (ctx->vertex_shader != handle) {
633 ctx->vertex_shader = handle;
634 ctx->pipe->bind_vs_state(ctx->pipe, handle);
635 }
636 }
637
638 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
639 {
640 if (handle == ctx->vertex_shader) {
641 /* unbind before deleting */
642 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
643 ctx->vertex_shader = NULL;
644 }
645 ctx->pipe->delete_vs_state(ctx->pipe, handle);
646 }
647
648 static void
649 cso_save_vertex_shader(struct cso_context *ctx)
650 {
651 assert(!ctx->vertex_shader_saved);
652 ctx->vertex_shader_saved = ctx->vertex_shader;
653 }
654
655 static void
656 cso_restore_vertex_shader(struct cso_context *ctx)
657 {
658 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
659 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
660 ctx->vertex_shader = ctx->vertex_shader_saved;
661 }
662 ctx->vertex_shader_saved = NULL;
663 }
664
665
666 void cso_set_framebuffer(struct cso_context *ctx,
667 const struct pipe_framebuffer_state *fb)
668 {
669 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
670 util_copy_framebuffer_state(&ctx->fb, fb);
671 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
672 }
673 }
674
675 static void
676 cso_save_framebuffer(struct cso_context *ctx)
677 {
678 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
679 }
680
681 static void
682 cso_restore_framebuffer(struct cso_context *ctx)
683 {
684 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
685 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
686 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
687 util_unreference_framebuffer_state(&ctx->fb_saved);
688 }
689 }
690
691
692 void cso_set_viewport(struct cso_context *ctx,
693 const struct pipe_viewport_state *vp)
694 {
695 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
696 ctx->vp = *vp;
697 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
698 }
699 }
700
701 /**
702 * Setup viewport state for given width and height (position is always (0,0)).
703 * Invert the Y axis if 'invert' is true.
704 */
705 void
706 cso_set_viewport_dims(struct cso_context *ctx,
707 float width, float height, boolean invert)
708 {
709 struct pipe_viewport_state vp;
710 vp.scale[0] = width * 0.5f;
711 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
712 vp.scale[2] = 0.5f;
713 vp.translate[0] = 0.5f * width;
714 vp.translate[1] = 0.5f * height;
715 vp.translate[2] = 0.5f;
716 cso_set_viewport(ctx, &vp);
717 }
718
719 static void
720 cso_save_viewport(struct cso_context *ctx)
721 {
722 ctx->vp_saved = ctx->vp;
723 }
724
725
726 static void
727 cso_restore_viewport(struct cso_context *ctx)
728 {
729 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
730 ctx->vp = ctx->vp_saved;
731 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
732 }
733 }
734
735
736 void cso_set_blend_color(struct cso_context *ctx,
737 const struct pipe_blend_color *bc)
738 {
739 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
740 ctx->blend_color = *bc;
741 ctx->pipe->set_blend_color(ctx->pipe, bc);
742 }
743 }
744
745 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
746 {
747 if (ctx->sample_mask != sample_mask) {
748 ctx->sample_mask = sample_mask;
749 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
750 }
751 }
752
753 static void
754 cso_save_sample_mask(struct cso_context *ctx)
755 {
756 ctx->sample_mask_saved = ctx->sample_mask;
757 }
758
759 static void
760 cso_restore_sample_mask(struct cso_context *ctx)
761 {
762 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
763 }
764
765 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
766 {
767 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
768 ctx->min_samples = min_samples;
769 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
770 }
771 }
772
773 static void
774 cso_save_min_samples(struct cso_context *ctx)
775 {
776 ctx->min_samples_saved = ctx->min_samples;
777 }
778
779 static void
780 cso_restore_min_samples(struct cso_context *ctx)
781 {
782 cso_set_min_samples(ctx, ctx->min_samples_saved);
783 }
784
785 void cso_set_stencil_ref(struct cso_context *ctx,
786 const struct pipe_stencil_ref *sr)
787 {
788 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
789 ctx->stencil_ref = *sr;
790 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
791 }
792 }
793
794 static void
795 cso_save_stencil_ref(struct cso_context *ctx)
796 {
797 ctx->stencil_ref_saved = ctx->stencil_ref;
798 }
799
800
801 static void
802 cso_restore_stencil_ref(struct cso_context *ctx)
803 {
804 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
805 sizeof(ctx->stencil_ref))) {
806 ctx->stencil_ref = ctx->stencil_ref_saved;
807 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
808 }
809 }
810
811 void cso_set_render_condition(struct cso_context *ctx,
812 struct pipe_query *query,
813 boolean condition, uint mode)
814 {
815 struct pipe_context *pipe = ctx->pipe;
816
817 if (ctx->render_condition != query ||
818 ctx->render_condition_mode != mode ||
819 ctx->render_condition_cond != condition) {
820 pipe->render_condition(pipe, query, condition, mode);
821 ctx->render_condition = query;
822 ctx->render_condition_cond = condition;
823 ctx->render_condition_mode = mode;
824 }
825 }
826
827 static void
828 cso_save_render_condition(struct cso_context *ctx)
829 {
830 ctx->render_condition_saved = ctx->render_condition;
831 ctx->render_condition_cond_saved = ctx->render_condition_cond;
832 ctx->render_condition_mode_saved = ctx->render_condition_mode;
833 }
834
835 static void
836 cso_restore_render_condition(struct cso_context *ctx)
837 {
838 cso_set_render_condition(ctx, ctx->render_condition_saved,
839 ctx->render_condition_cond_saved,
840 ctx->render_condition_mode_saved);
841 }
842
843 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
844 {
845 assert(ctx->has_geometry_shader || !handle);
846
847 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
848 ctx->geometry_shader = handle;
849 ctx->pipe->bind_gs_state(ctx->pipe, handle);
850 }
851 }
852
853 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
854 {
855 if (handle == ctx->geometry_shader) {
856 /* unbind before deleting */
857 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
858 ctx->geometry_shader = NULL;
859 }
860 ctx->pipe->delete_gs_state(ctx->pipe, handle);
861 }
862
863 static void
864 cso_save_geometry_shader(struct cso_context *ctx)
865 {
866 if (!ctx->has_geometry_shader) {
867 return;
868 }
869
870 assert(!ctx->geometry_shader_saved);
871 ctx->geometry_shader_saved = ctx->geometry_shader;
872 }
873
874 static void
875 cso_restore_geometry_shader(struct cso_context *ctx)
876 {
877 if (!ctx->has_geometry_shader) {
878 return;
879 }
880
881 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
882 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
883 ctx->geometry_shader = ctx->geometry_shader_saved;
884 }
885 ctx->geometry_shader_saved = NULL;
886 }
887
888 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
889 {
890 assert(ctx->has_tessellation || !handle);
891
892 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
893 ctx->tessctrl_shader = handle;
894 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
895 }
896 }
897
898 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
899 {
900 if (handle == ctx->tessctrl_shader) {
901 /* unbind before deleting */
902 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
903 ctx->tessctrl_shader = NULL;
904 }
905 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
906 }
907
908 static void
909 cso_save_tessctrl_shader(struct cso_context *ctx)
910 {
911 if (!ctx->has_tessellation) {
912 return;
913 }
914
915 assert(!ctx->tessctrl_shader_saved);
916 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
917 }
918
919 static void
920 cso_restore_tessctrl_shader(struct cso_context *ctx)
921 {
922 if (!ctx->has_tessellation) {
923 return;
924 }
925
926 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
927 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
928 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
929 }
930 ctx->tessctrl_shader_saved = NULL;
931 }
932
933 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
934 {
935 assert(ctx->has_tessellation || !handle);
936
937 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
938 ctx->tesseval_shader = handle;
939 ctx->pipe->bind_tes_state(ctx->pipe, handle);
940 }
941 }
942
943 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
944 {
945 if (handle == ctx->tesseval_shader) {
946 /* unbind before deleting */
947 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
948 ctx->tesseval_shader = NULL;
949 }
950 ctx->pipe->delete_tes_state(ctx->pipe, handle);
951 }
952
953 static void
954 cso_save_tesseval_shader(struct cso_context *ctx)
955 {
956 if (!ctx->has_tessellation) {
957 return;
958 }
959
960 assert(!ctx->tesseval_shader_saved);
961 ctx->tesseval_shader_saved = ctx->tesseval_shader;
962 }
963
964 static void
965 cso_restore_tesseval_shader(struct cso_context *ctx)
966 {
967 if (!ctx->has_tessellation) {
968 return;
969 }
970
971 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
972 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
973 ctx->tesseval_shader = ctx->tesseval_shader_saved;
974 }
975 ctx->tesseval_shader_saved = NULL;
976 }
977
978 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
979 {
980 assert(ctx->has_compute_shader || !handle);
981
982 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
983 ctx->compute_shader = handle;
984 ctx->pipe->bind_compute_state(ctx->pipe, handle);
985 }
986 }
987
988 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
989 {
990 if (handle == ctx->compute_shader) {
991 /* unbind before deleting */
992 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
993 ctx->compute_shader = NULL;
994 }
995 ctx->pipe->delete_compute_state(ctx->pipe, handle);
996 }
997
998 enum pipe_error
999 cso_set_vertex_elements(struct cso_context *ctx,
1000 unsigned count,
1001 const struct pipe_vertex_element *states)
1002 {
1003 struct u_vbuf *vbuf = ctx->vbuf;
1004 unsigned key_size, hash_key;
1005 struct cso_hash_iter iter;
1006 void *handle;
1007 struct cso_velems_state velems_state;
1008
1009 if (vbuf) {
1010 u_vbuf_set_vertex_elements(vbuf, count, states);
1011 return PIPE_OK;
1012 }
1013
1014 /* Need to include the count into the stored state data too.
1015 * Otherwise first few count pipe_vertex_elements could be identical
1016 * even if count is different, and there's no guarantee the hash would
1017 * be different in that case neither.
1018 */
1019 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1020 velems_state.count = count;
1021 memcpy(velems_state.velems, states,
1022 sizeof(struct pipe_vertex_element) * count);
1023 hash_key = cso_construct_key((void*)&velems_state, key_size);
1024 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1025 (void*)&velems_state, key_size);
1026
1027 if (cso_hash_iter_is_null(iter)) {
1028 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1029 if (!cso)
1030 return PIPE_ERROR_OUT_OF_MEMORY;
1031
1032 memcpy(&cso->state, &velems_state, key_size);
1033 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1034 &cso->state.velems[0]);
1035 cso->delete_state =
1036 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1037 cso->context = ctx->pipe;
1038
1039 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1040 if (cso_hash_iter_is_null(iter)) {
1041 FREE(cso);
1042 return PIPE_ERROR_OUT_OF_MEMORY;
1043 }
1044
1045 handle = cso->data;
1046 }
1047 else {
1048 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1049 }
1050
1051 if (ctx->velements != handle) {
1052 ctx->velements = handle;
1053 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1054 }
1055 return PIPE_OK;
1056 }
1057
1058 static void
1059 cso_save_vertex_elements(struct cso_context *ctx)
1060 {
1061 struct u_vbuf *vbuf = ctx->vbuf;
1062
1063 if (vbuf) {
1064 u_vbuf_save_vertex_elements(vbuf);
1065 return;
1066 }
1067
1068 assert(!ctx->velements_saved);
1069 ctx->velements_saved = ctx->velements;
1070 }
1071
1072 static void
1073 cso_restore_vertex_elements(struct cso_context *ctx)
1074 {
1075 struct u_vbuf *vbuf = ctx->vbuf;
1076
1077 if (vbuf) {
1078 u_vbuf_restore_vertex_elements(vbuf);
1079 return;
1080 }
1081
1082 if (ctx->velements != ctx->velements_saved) {
1083 ctx->velements = ctx->velements_saved;
1084 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1085 }
1086 ctx->velements_saved = NULL;
1087 }
1088
1089 /* vertex buffers */
1090
1091 void cso_set_vertex_buffers(struct cso_context *ctx,
1092 unsigned start_slot, unsigned count,
1093 const struct pipe_vertex_buffer *buffers)
1094 {
1095 struct u_vbuf *vbuf = ctx->vbuf;
1096
1097 if (vbuf) {
1098 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1099 return;
1100 }
1101
1102 /* Save what's in the auxiliary slot, so that we can save and restore it
1103 * for meta ops. */
1104 if (start_slot <= ctx->aux_vertex_buffer_index &&
1105 start_slot+count > ctx->aux_vertex_buffer_index) {
1106 if (buffers) {
1107 const struct pipe_vertex_buffer *vb =
1108 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1109
1110 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1111 vb->buffer);
1112 memcpy(&ctx->aux_vertex_buffer_current, vb,
1113 sizeof(struct pipe_vertex_buffer));
1114 }
1115 else {
1116 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1117 NULL);
1118 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1119 }
1120 }
1121
1122 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1123 }
1124
1125 static void
1126 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1127 {
1128 struct u_vbuf *vbuf = ctx->vbuf;
1129
1130 if (vbuf) {
1131 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1132 return;
1133 }
1134
1135 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1136 ctx->aux_vertex_buffer_current.buffer);
1137 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1138 sizeof(struct pipe_vertex_buffer));
1139 }
1140
1141 static void
1142 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1143 {
1144 struct u_vbuf *vbuf = ctx->vbuf;
1145
1146 if (vbuf) {
1147 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1148 return;
1149 }
1150
1151 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1152 &ctx->aux_vertex_buffer_saved);
1153 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1154 }
1155
1156 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1157 {
1158 return ctx->aux_vertex_buffer_index;
1159 }
1160
1161
1162
1163 enum pipe_error
1164 cso_single_sampler(struct cso_context *ctx, unsigned shader_stage,
1165 unsigned idx, const struct pipe_sampler_state *templ)
1166 {
1167 void *handle = NULL;
1168
1169 if (templ) {
1170 unsigned key_size = sizeof(struct pipe_sampler_state);
1171 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1172 struct cso_hash_iter iter =
1173 cso_find_state_template(ctx->cache,
1174 hash_key, CSO_SAMPLER,
1175 (void *) templ, key_size);
1176
1177 if (cso_hash_iter_is_null(iter)) {
1178 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
1179 if (!cso)
1180 return PIPE_ERROR_OUT_OF_MEMORY;
1181
1182 memcpy(&cso->state, templ, sizeof(*templ));
1183 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1184 cso->delete_state =
1185 (cso_state_callback) ctx->pipe->delete_sampler_state;
1186 cso->context = ctx->pipe;
1187
1188 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1189 if (cso_hash_iter_is_null(iter)) {
1190 FREE(cso);
1191 return PIPE_ERROR_OUT_OF_MEMORY;
1192 }
1193
1194 handle = cso->data;
1195 }
1196 else {
1197 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1198 }
1199 }
1200
1201 ctx->samplers[shader_stage].samplers[idx] = handle;
1202 return PIPE_OK;
1203 }
1204
1205
1206 /**
1207 * Send staged sampler state to the driver.
1208 */
1209 void
1210 cso_single_sampler_done(struct cso_context *ctx,
1211 enum pipe_shader_type shader_stage)
1212 {
1213 struct sampler_info *info = &ctx->samplers[shader_stage];
1214 const unsigned old_nr_samplers = info->nr_samplers;
1215 unsigned i;
1216
1217 /* find highest non-null sampler */
1218 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1219 if (info->samplers[i - 1] != NULL)
1220 break;
1221 }
1222
1223 info->nr_samplers = i;
1224 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1225 MAX2(old_nr_samplers, info->nr_samplers),
1226 info->samplers);
1227 }
1228
1229
1230 /*
1231 * If the function encouters any errors it will return the
1232 * last one. Done to always try to set as many samplers
1233 * as possible.
1234 */
1235 enum pipe_error
1236 cso_set_samplers(struct cso_context *ctx,
1237 enum pipe_shader_type shader_stage,
1238 unsigned nr,
1239 const struct pipe_sampler_state **templates)
1240 {
1241 struct sampler_info *info = &ctx->samplers[shader_stage];
1242 unsigned i;
1243 enum pipe_error temp, error = PIPE_OK;
1244
1245 for (i = 0; i < nr; i++) {
1246 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1247 if (temp != PIPE_OK)
1248 error = temp;
1249 }
1250
1251 for ( ; i < info->nr_samplers; i++) {
1252 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1253 if (temp != PIPE_OK)
1254 error = temp;
1255 }
1256
1257 cso_single_sampler_done(ctx, shader_stage);
1258
1259 return error;
1260 }
1261
1262 static void
1263 cso_save_fragment_samplers(struct cso_context *ctx)
1264 {
1265 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1266
1267 ctx->nr_fragment_samplers_saved = info->nr_samplers;
1268 memcpy(ctx->fragment_samplers_saved, info->samplers,
1269 sizeof(info->samplers));
1270 }
1271
1272
1273 static void
1274 cso_restore_fragment_samplers(struct cso_context *ctx)
1275 {
1276 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1277
1278 info->nr_samplers = ctx->nr_fragment_samplers_saved;
1279 memcpy(info->samplers, ctx->fragment_samplers_saved,
1280 sizeof(info->samplers));
1281 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1282 }
1283
1284
1285 void
1286 cso_set_sampler_views(struct cso_context *ctx,
1287 enum pipe_shader_type shader_stage,
1288 unsigned count,
1289 struct pipe_sampler_view **views)
1290 {
1291 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1292 unsigned i;
1293 boolean any_change = FALSE;
1294
1295 /* reference new views */
1296 for (i = 0; i < count; i++) {
1297 any_change |= ctx->fragment_views[i] != views[i];
1298 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1299 }
1300 /* unref extra old views, if any */
1301 for (; i < ctx->nr_fragment_views; i++) {
1302 any_change |= ctx->fragment_views[i] != NULL;
1303 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1304 }
1305
1306 /* bind the new sampler views */
1307 if (any_change) {
1308 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1309 MAX2(ctx->nr_fragment_views, count),
1310 ctx->fragment_views);
1311 }
1312
1313 ctx->nr_fragment_views = count;
1314 }
1315 else
1316 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1317 }
1318
1319
1320 static void
1321 cso_save_fragment_sampler_views(struct cso_context *ctx)
1322 {
1323 unsigned i;
1324
1325 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1326
1327 for (i = 0; i < ctx->nr_fragment_views; i++) {
1328 assert(!ctx->fragment_views_saved[i]);
1329 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1330 ctx->fragment_views[i]);
1331 }
1332 }
1333
1334
1335 static void
1336 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1337 {
1338 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1339 unsigned num;
1340
1341 for (i = 0; i < nr_saved; i++) {
1342 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1343 /* move the reference from one pointer to another */
1344 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1345 ctx->fragment_views_saved[i] = NULL;
1346 }
1347 for (; i < ctx->nr_fragment_views; i++) {
1348 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1349 }
1350
1351 num = MAX2(ctx->nr_fragment_views, nr_saved);
1352
1353 /* bind the old/saved sampler views */
1354 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1355 ctx->fragment_views);
1356
1357 ctx->nr_fragment_views = nr_saved;
1358 ctx->nr_fragment_views_saved = 0;
1359 }
1360
1361
1362 void
1363 cso_set_shader_images(struct cso_context *ctx, unsigned shader_stage,
1364 unsigned start, unsigned count,
1365 struct pipe_image_view *images)
1366 {
1367 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1368 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1369 }
1370
1371 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1372 }
1373
1374
1375 static void
1376 cso_save_fragment_image0(struct cso_context *ctx)
1377 {
1378 util_copy_image_view(&ctx->fragment_image0_saved,
1379 &ctx->fragment_image0_current);
1380 }
1381
1382
1383 static void
1384 cso_restore_fragment_image0(struct cso_context *ctx)
1385 {
1386 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1387 &ctx->fragment_image0_saved);
1388 }
1389
1390
1391 void
1392 cso_set_stream_outputs(struct cso_context *ctx,
1393 unsigned num_targets,
1394 struct pipe_stream_output_target **targets,
1395 const unsigned *offsets)
1396 {
1397 struct pipe_context *pipe = ctx->pipe;
1398 uint i;
1399
1400 if (!ctx->has_streamout) {
1401 assert(num_targets == 0);
1402 return;
1403 }
1404
1405 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1406 /* Nothing to do. */
1407 return;
1408 }
1409
1410 /* reference new targets */
1411 for (i = 0; i < num_targets; i++) {
1412 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1413 }
1414 /* unref extra old targets, if any */
1415 for (; i < ctx->nr_so_targets; i++) {
1416 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1417 }
1418
1419 pipe->set_stream_output_targets(pipe, num_targets, targets,
1420 offsets);
1421 ctx->nr_so_targets = num_targets;
1422 }
1423
1424 static void
1425 cso_save_stream_outputs(struct cso_context *ctx)
1426 {
1427 uint i;
1428
1429 if (!ctx->has_streamout) {
1430 return;
1431 }
1432
1433 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1434
1435 for (i = 0; i < ctx->nr_so_targets; i++) {
1436 assert(!ctx->so_targets_saved[i]);
1437 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1438 }
1439 }
1440
1441 static void
1442 cso_restore_stream_outputs(struct cso_context *ctx)
1443 {
1444 struct pipe_context *pipe = ctx->pipe;
1445 uint i;
1446 unsigned offset[PIPE_MAX_SO_BUFFERS];
1447
1448 if (!ctx->has_streamout) {
1449 return;
1450 }
1451
1452 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1453 /* Nothing to do. */
1454 return;
1455 }
1456
1457 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1458 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1459 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1460 /* move the reference from one pointer to another */
1461 ctx->so_targets[i] = ctx->so_targets_saved[i];
1462 ctx->so_targets_saved[i] = NULL;
1463 /* -1 means append */
1464 offset[i] = (unsigned)-1;
1465 }
1466 for (; i < ctx->nr_so_targets; i++) {
1467 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1468 }
1469
1470 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1471 ctx->so_targets, offset);
1472
1473 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1474 ctx->nr_so_targets_saved = 0;
1475 }
1476
1477 /* constant buffers */
1478
1479 void
1480 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1481 unsigned index, struct pipe_constant_buffer *cb)
1482 {
1483 struct pipe_context *pipe = cso->pipe;
1484
1485 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1486
1487 if (index == 0) {
1488 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1489 }
1490 }
1491
1492 void
1493 cso_set_constant_buffer_resource(struct cso_context *cso,
1494 unsigned shader_stage,
1495 unsigned index,
1496 struct pipe_resource *buffer)
1497 {
1498 if (buffer) {
1499 struct pipe_constant_buffer cb;
1500 cb.buffer = buffer;
1501 cb.buffer_offset = 0;
1502 cb.buffer_size = buffer->width0;
1503 cb.user_buffer = NULL;
1504 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1505 } else {
1506 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1507 }
1508 }
1509
1510 void
1511 cso_save_constant_buffer_slot0(struct cso_context *cso,
1512 unsigned shader_stage)
1513 {
1514 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1515 &cso->aux_constbuf_current[shader_stage]);
1516 }
1517
1518 void
1519 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1520 unsigned shader_stage)
1521 {
1522 cso_set_constant_buffer(cso, shader_stage, 0,
1523 &cso->aux_constbuf_saved[shader_stage]);
1524 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1525 NULL);
1526 }
1527
1528
1529 /**
1530 * Save all the CSO state items specified by the state_mask bitmask
1531 * of CSO_BIT_x flags.
1532 */
1533 void
1534 cso_save_state(struct cso_context *cso, unsigned state_mask)
1535 {
1536 assert(cso->saved_state == 0);
1537
1538 cso->saved_state = state_mask;
1539
1540 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1541 cso_save_aux_vertex_buffer_slot(cso);
1542 if (state_mask & CSO_BIT_BLEND)
1543 cso_save_blend(cso);
1544 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1545 cso_save_depth_stencil_alpha(cso);
1546 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1547 cso_save_fragment_samplers(cso);
1548 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1549 cso_save_fragment_sampler_views(cso);
1550 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1551 cso_save_fragment_shader(cso);
1552 if (state_mask & CSO_BIT_FRAMEBUFFER)
1553 cso_save_framebuffer(cso);
1554 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1555 cso_save_geometry_shader(cso);
1556 if (state_mask & CSO_BIT_MIN_SAMPLES)
1557 cso_save_min_samples(cso);
1558 if (state_mask & CSO_BIT_RASTERIZER)
1559 cso_save_rasterizer(cso);
1560 if (state_mask & CSO_BIT_RENDER_CONDITION)
1561 cso_save_render_condition(cso);
1562 if (state_mask & CSO_BIT_SAMPLE_MASK)
1563 cso_save_sample_mask(cso);
1564 if (state_mask & CSO_BIT_STENCIL_REF)
1565 cso_save_stencil_ref(cso);
1566 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1567 cso_save_stream_outputs(cso);
1568 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1569 cso_save_tessctrl_shader(cso);
1570 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1571 cso_save_tesseval_shader(cso);
1572 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1573 cso_save_vertex_elements(cso);
1574 if (state_mask & CSO_BIT_VERTEX_SHADER)
1575 cso_save_vertex_shader(cso);
1576 if (state_mask & CSO_BIT_VIEWPORT)
1577 cso_save_viewport(cso);
1578 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1579 cso->pipe->set_active_query_state(cso->pipe, false);
1580 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1581 cso_save_fragment_image0(cso);
1582 }
1583
1584
1585 /**
1586 * Restore the state which was saved by cso_save_state().
1587 */
1588 void
1589 cso_restore_state(struct cso_context *cso)
1590 {
1591 unsigned state_mask = cso->saved_state;
1592
1593 assert(state_mask);
1594
1595 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1596 cso_restore_aux_vertex_buffer_slot(cso);
1597 if (state_mask & CSO_BIT_BLEND)
1598 cso_restore_blend(cso);
1599 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1600 cso_restore_depth_stencil_alpha(cso);
1601 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1602 cso_restore_fragment_samplers(cso);
1603 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1604 cso_restore_fragment_sampler_views(cso);
1605 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1606 cso_restore_fragment_shader(cso);
1607 if (state_mask & CSO_BIT_FRAMEBUFFER)
1608 cso_restore_framebuffer(cso);
1609 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1610 cso_restore_geometry_shader(cso);
1611 if (state_mask & CSO_BIT_MIN_SAMPLES)
1612 cso_restore_min_samples(cso);
1613 if (state_mask & CSO_BIT_RASTERIZER)
1614 cso_restore_rasterizer(cso);
1615 if (state_mask & CSO_BIT_RENDER_CONDITION)
1616 cso_restore_render_condition(cso);
1617 if (state_mask & CSO_BIT_SAMPLE_MASK)
1618 cso_restore_sample_mask(cso);
1619 if (state_mask & CSO_BIT_STENCIL_REF)
1620 cso_restore_stencil_ref(cso);
1621 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1622 cso_restore_stream_outputs(cso);
1623 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1624 cso_restore_tessctrl_shader(cso);
1625 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1626 cso_restore_tesseval_shader(cso);
1627 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1628 cso_restore_vertex_elements(cso);
1629 if (state_mask & CSO_BIT_VERTEX_SHADER)
1630 cso_restore_vertex_shader(cso);
1631 if (state_mask & CSO_BIT_VIEWPORT)
1632 cso_restore_viewport(cso);
1633 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1634 cso->pipe->set_active_query_state(cso->pipe, true);
1635 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1636 cso_restore_fragment_image0(cso);
1637
1638 cso->saved_state = 0;
1639 }
1640
1641
1642
1643 /* drawing */
1644
1645 void
1646 cso_set_index_buffer(struct cso_context *cso,
1647 const struct pipe_index_buffer *ib)
1648 {
1649 struct u_vbuf *vbuf = cso->vbuf;
1650
1651 if (vbuf) {
1652 u_vbuf_set_index_buffer(vbuf, ib);
1653 } else {
1654 struct pipe_context *pipe = cso->pipe;
1655 pipe->set_index_buffer(pipe, ib);
1656 }
1657 }
1658
1659 void
1660 cso_draw_vbo(struct cso_context *cso,
1661 const struct pipe_draw_info *info)
1662 {
1663 struct u_vbuf *vbuf = cso->vbuf;
1664
1665 if (vbuf) {
1666 u_vbuf_draw_vbo(vbuf, info);
1667 } else {
1668 struct pipe_context *pipe = cso->pipe;
1669 pipe->draw_vbo(pipe, info);
1670 }
1671 }
1672
1673 void
1674 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1675 {
1676 struct pipe_draw_info info;
1677
1678 util_draw_init_info(&info);
1679
1680 info.mode = mode;
1681 info.start = start;
1682 info.count = count;
1683 info.min_index = start;
1684 info.max_index = start + count - 1;
1685
1686 cso_draw_vbo(cso, &info);
1687 }
1688
1689 void
1690 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1691 uint start, uint count,
1692 uint start_instance, uint instance_count)
1693 {
1694 struct pipe_draw_info info;
1695
1696 util_draw_init_info(&info);
1697
1698 info.mode = mode;
1699 info.start = start;
1700 info.count = count;
1701 info.min_index = start;
1702 info.max_index = start + count - 1;
1703 info.start_instance = start_instance;
1704 info.instance_count = instance_count;
1705
1706 cso_draw_vbo(cso, &info);
1707 }