cso: add cso_get_pipe_context
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59 void *samplers[PIPE_MAX_SAMPLERS];
60 };
61
62
63
64 struct cso_context {
65 struct pipe_context *pipe;
66 struct cso_cache *cache;
67 struct u_vbuf *vbuf;
68
69 boolean has_geometry_shader;
70 boolean has_tessellation;
71 boolean has_compute_shader;
72 boolean has_streamout;
73
74 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
75
76 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
77 unsigned nr_fragment_views;
78
79 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80 unsigned nr_fragment_views_saved;
81
82 struct sampler_info fragment_samplers_saved;
83 struct sampler_info samplers[PIPE_SHADER_TYPES];
84
85 /* Temporary number until cso_single_sampler_done is called.
86 * It tracks the highest sampler seen in cso_single_sampler.
87 */
88 int max_sampler_seen;
89
90 struct pipe_vertex_buffer aux_vertex_buffer_current;
91 struct pipe_vertex_buffer aux_vertex_buffer_saved;
92 unsigned aux_vertex_buffer_index;
93
94 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
95 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
96
97 struct pipe_image_view fragment_image0_current;
98 struct pipe_image_view fragment_image0_saved;
99
100 unsigned nr_so_targets;
101 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
102
103 unsigned nr_so_targets_saved;
104 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
105
106 /** Current and saved state.
107 * The saved state is used as a 1-deep stack.
108 */
109 void *blend, *blend_saved;
110 void *depth_stencil, *depth_stencil_saved;
111 void *rasterizer, *rasterizer_saved;
112 void *fragment_shader, *fragment_shader_saved;
113 void *vertex_shader, *vertex_shader_saved;
114 void *geometry_shader, *geometry_shader_saved;
115 void *tessctrl_shader, *tessctrl_shader_saved;
116 void *tesseval_shader, *tesseval_shader_saved;
117 void *compute_shader;
118 void *velements, *velements_saved;
119 struct pipe_query *render_condition, *render_condition_saved;
120 uint render_condition_mode, render_condition_mode_saved;
121 boolean render_condition_cond, render_condition_cond_saved;
122
123 struct pipe_framebuffer_state fb, fb_saved;
124 struct pipe_viewport_state vp, vp_saved;
125 struct pipe_blend_color blend_color;
126 unsigned sample_mask, sample_mask_saved;
127 unsigned min_samples, min_samples_saved;
128 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
129 };
130
131 struct pipe_context *cso_get_pipe_context(struct cso_context *cso)
132 {
133 return cso->pipe;
134 }
135
136 static boolean delete_blend_state(struct cso_context *ctx, void *state)
137 {
138 struct cso_blend *cso = (struct cso_blend *)state;
139
140 if (ctx->blend == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146 return TRUE;
147 }
148
149 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
150 {
151 struct cso_depth_stencil_alpha *cso =
152 (struct cso_depth_stencil_alpha *)state;
153
154 if (ctx->depth_stencil == cso->data)
155 return FALSE;
156
157 if (cso->delete_state)
158 cso->delete_state(cso->context, cso->data);
159 FREE(state);
160
161 return TRUE;
162 }
163
164 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
165 {
166 struct cso_sampler *cso = (struct cso_sampler *)state;
167 if (cso->delete_state)
168 cso->delete_state(cso->context, cso->data);
169 FREE(state);
170 return TRUE;
171 }
172
173 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
174 {
175 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
176
177 if (ctx->rasterizer == cso->data)
178 return FALSE;
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185 static boolean delete_vertex_elements(struct cso_context *ctx,
186 void *state)
187 {
188 struct cso_velements *cso = (struct cso_velements *)state;
189
190 if (ctx->velements == cso->data)
191 return FALSE;
192
193 if (cso->delete_state)
194 cso->delete_state(cso->context, cso->data);
195 FREE(state);
196 return TRUE;
197 }
198
199
200 static inline boolean delete_cso(struct cso_context *ctx,
201 void *state, enum cso_cache_type type)
202 {
203 switch (type) {
204 case CSO_BLEND:
205 return delete_blend_state(ctx, state);
206 case CSO_SAMPLER:
207 return delete_sampler_state(ctx, state);
208 case CSO_DEPTH_STENCIL_ALPHA:
209 return delete_depth_stencil_state(ctx, state);
210 case CSO_RASTERIZER:
211 return delete_rasterizer_state(ctx, state);
212 case CSO_VELEMENTS:
213 return delete_vertex_elements(ctx, state);
214 default:
215 assert(0);
216 FREE(state);
217 }
218 return FALSE;
219 }
220
221 static inline void
222 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
223 int max_size, void *user_data)
224 {
225 struct cso_context *ctx = (struct cso_context *)user_data;
226 /* if we're approach the maximum size, remove fourth of the entries
227 * otherwise every subsequent call will go through the same */
228 int hash_size = cso_hash_size(hash);
229 int max_entries = (max_size > hash_size) ? max_size : hash_size;
230 int to_remove = (max_size < max_entries) * max_entries/4;
231 struct cso_hash_iter iter;
232 struct cso_sampler **samplers_to_restore = NULL;
233 unsigned to_restore = 0;
234
235 if (hash_size > max_size)
236 to_remove += hash_size - max_size;
237
238 if (to_remove == 0)
239 return;
240
241 if (type == CSO_SAMPLER) {
242 int i, j;
243
244 samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
245 sizeof(*samplers_to_restore));
246
247 /* Temporarily remove currently bound sampler states from the hash
248 * table, to prevent them from being deleted
249 */
250 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
251 for (j = 0; j < PIPE_MAX_SAMPLERS; j++) {
252 struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
253
254 if (sampler && cso_hash_take(hash, sampler->hash_key))
255 samplers_to_restore[to_restore++] = sampler;
256 }
257 }
258 }
259
260 iter = cso_hash_first_node(hash);
261 while (to_remove) {
262 /*remove elements until we're good */
263 /*fixme: currently we pick the nodes to remove at random*/
264 void *cso = cso_hash_iter_data(iter);
265
266 if (!cso)
267 break;
268
269 if (delete_cso(ctx, cso, type)) {
270 iter = cso_hash_erase(hash, iter);
271 --to_remove;
272 } else
273 iter = cso_hash_iter_next(iter);
274 }
275
276 if (type == CSO_SAMPLER) {
277 /* Put currently bound sampler states back into the hash table */
278 while (to_restore--) {
279 struct cso_sampler *sampler = samplers_to_restore[to_restore];
280
281 cso_hash_insert(hash, sampler->hash_key, sampler);
282 }
283
284 FREE(samplers_to_restore);
285 }
286 }
287
288 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
289 {
290 struct u_vbuf_caps caps;
291
292 /* Install u_vbuf if there is anything unsupported. */
293 if (u_vbuf_get_caps(cso->pipe->screen, &caps, flags)) {
294 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
295 cso->aux_vertex_buffer_index);
296 }
297 }
298
299 struct cso_context *
300 cso_create_context(struct pipe_context *pipe, unsigned u_vbuf_flags)
301 {
302 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
303 if (!ctx)
304 return NULL;
305
306 ctx->cache = cso_cache_create();
307 if (ctx->cache == NULL)
308 goto out;
309 cso_cache_set_sanitize_callback(ctx->cache,
310 sanitize_hash,
311 ctx);
312
313 ctx->pipe = pipe;
314 ctx->sample_mask = ~0;
315
316 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
317
318 cso_init_vbuf(ctx, u_vbuf_flags);
319
320 /* Enable for testing: */
321 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
322
323 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
324 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
325 ctx->has_geometry_shader = TRUE;
326 }
327 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
328 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
329 ctx->has_tessellation = TRUE;
330 }
331 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
332 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
333 int supported_irs =
334 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
335 PIPE_SHADER_CAP_SUPPORTED_IRS);
336 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
337 ctx->has_compute_shader = TRUE;
338 }
339 }
340 if (pipe->screen->get_param(pipe->screen,
341 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
342 ctx->has_streamout = TRUE;
343 }
344
345 ctx->max_sampler_seen = -1;
346 return ctx;
347
348 out:
349 cso_destroy_context( ctx );
350 return NULL;
351 }
352
353 /**
354 * Free the CSO context.
355 */
356 void cso_destroy_context( struct cso_context *ctx )
357 {
358 unsigned i;
359
360 if (ctx->pipe) {
361 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
362 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
363
364 {
365 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
366 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
367 struct pipe_screen *scr = ctx->pipe->screen;
368 enum pipe_shader_type sh;
369 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
370 int maxsam = scr->get_shader_param(scr, sh,
371 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
372 int maxview = scr->get_shader_param(scr, sh,
373 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
374 assert(maxsam <= PIPE_MAX_SAMPLERS);
375 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
376 if (maxsam > 0) {
377 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
378 }
379 if (maxview > 0) {
380 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
381 }
382 }
383 }
384
385 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
386 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
387 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
388 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
389 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
390 if (ctx->has_geometry_shader) {
391 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
392 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
393 }
394 if (ctx->has_tessellation) {
395 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
396 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
397 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
398 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
399 }
400 if (ctx->has_compute_shader) {
401 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
402 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
403 }
404 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
405
406 if (ctx->has_streamout)
407 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
408 }
409
410 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
411 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
412 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
413 }
414
415 util_unreference_framebuffer_state(&ctx->fb);
416 util_unreference_framebuffer_state(&ctx->fb_saved);
417
418 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_current);
419 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_saved);
420
421 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
422 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
423 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
424 }
425
426 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
427 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
428
429 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
430 pipe_so_target_reference(&ctx->so_targets[i], NULL);
431 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
432 }
433
434 if (ctx->cache) {
435 cso_cache_delete( ctx->cache );
436 ctx->cache = NULL;
437 }
438
439 if (ctx->vbuf)
440 u_vbuf_destroy(ctx->vbuf);
441 FREE( ctx );
442 }
443
444
445 /* Those function will either find the state of the given template
446 * in the cache or they will create a new state from the given
447 * template, insert it in the cache and return it.
448 */
449
450 /*
451 * If the driver returns 0 from the create method then they will assign
452 * the data member of the cso to be the template itself.
453 */
454
455 enum pipe_error cso_set_blend(struct cso_context *ctx,
456 const struct pipe_blend_state *templ)
457 {
458 unsigned key_size, hash_key;
459 struct cso_hash_iter iter;
460 void *handle;
461
462 key_size = templ->independent_blend_enable ?
463 sizeof(struct pipe_blend_state) :
464 (char *)&(templ->rt[1]) - (char *)templ;
465 hash_key = cso_construct_key((void*)templ, key_size);
466 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
467 (void*)templ, key_size);
468
469 if (cso_hash_iter_is_null(iter)) {
470 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
471 if (!cso)
472 return PIPE_ERROR_OUT_OF_MEMORY;
473
474 memset(&cso->state, 0, sizeof cso->state);
475 memcpy(&cso->state, templ, key_size);
476 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
477 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
478 cso->context = ctx->pipe;
479
480 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
481 if (cso_hash_iter_is_null(iter)) {
482 FREE(cso);
483 return PIPE_ERROR_OUT_OF_MEMORY;
484 }
485
486 handle = cso->data;
487 }
488 else {
489 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
490 }
491
492 if (ctx->blend != handle) {
493 ctx->blend = handle;
494 ctx->pipe->bind_blend_state(ctx->pipe, handle);
495 }
496 return PIPE_OK;
497 }
498
499 static void
500 cso_save_blend(struct cso_context *ctx)
501 {
502 assert(!ctx->blend_saved);
503 ctx->blend_saved = ctx->blend;
504 }
505
506 static void
507 cso_restore_blend(struct cso_context *ctx)
508 {
509 if (ctx->blend != ctx->blend_saved) {
510 ctx->blend = ctx->blend_saved;
511 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
512 }
513 ctx->blend_saved = NULL;
514 }
515
516
517
518 enum pipe_error
519 cso_set_depth_stencil_alpha(struct cso_context *ctx,
520 const struct pipe_depth_stencil_alpha_state *templ)
521 {
522 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
523 unsigned hash_key = cso_construct_key((void*)templ, key_size);
524 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
525 hash_key,
526 CSO_DEPTH_STENCIL_ALPHA,
527 (void*)templ, key_size);
528 void *handle;
529
530 if (cso_hash_iter_is_null(iter)) {
531 struct cso_depth_stencil_alpha *cso =
532 MALLOC(sizeof(struct cso_depth_stencil_alpha));
533 if (!cso)
534 return PIPE_ERROR_OUT_OF_MEMORY;
535
536 memcpy(&cso->state, templ, sizeof(*templ));
537 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
538 &cso->state);
539 cso->delete_state =
540 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
541 cso->context = ctx->pipe;
542
543 iter = cso_insert_state(ctx->cache, hash_key,
544 CSO_DEPTH_STENCIL_ALPHA, cso);
545 if (cso_hash_iter_is_null(iter)) {
546 FREE(cso);
547 return PIPE_ERROR_OUT_OF_MEMORY;
548 }
549
550 handle = cso->data;
551 }
552 else {
553 handle = ((struct cso_depth_stencil_alpha *)
554 cso_hash_iter_data(iter))->data;
555 }
556
557 if (ctx->depth_stencil != handle) {
558 ctx->depth_stencil = handle;
559 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
560 }
561 return PIPE_OK;
562 }
563
564 static void
565 cso_save_depth_stencil_alpha(struct cso_context *ctx)
566 {
567 assert(!ctx->depth_stencil_saved);
568 ctx->depth_stencil_saved = ctx->depth_stencil;
569 }
570
571 static void
572 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
573 {
574 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
575 ctx->depth_stencil = ctx->depth_stencil_saved;
576 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
577 ctx->depth_stencil_saved);
578 }
579 ctx->depth_stencil_saved = NULL;
580 }
581
582
583
584 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
585 const struct pipe_rasterizer_state *templ)
586 {
587 unsigned key_size = sizeof(struct pipe_rasterizer_state);
588 unsigned hash_key = cso_construct_key((void*)templ, key_size);
589 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
590 hash_key,
591 CSO_RASTERIZER,
592 (void*)templ, key_size);
593 void *handle = NULL;
594
595 if (cso_hash_iter_is_null(iter)) {
596 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
597 if (!cso)
598 return PIPE_ERROR_OUT_OF_MEMORY;
599
600 memcpy(&cso->state, templ, sizeof(*templ));
601 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
602 cso->delete_state =
603 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
604 cso->context = ctx->pipe;
605
606 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
607 if (cso_hash_iter_is_null(iter)) {
608 FREE(cso);
609 return PIPE_ERROR_OUT_OF_MEMORY;
610 }
611
612 handle = cso->data;
613 }
614 else {
615 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
616 }
617
618 if (ctx->rasterizer != handle) {
619 ctx->rasterizer = handle;
620 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
621 }
622 return PIPE_OK;
623 }
624
625 static void
626 cso_save_rasterizer(struct cso_context *ctx)
627 {
628 assert(!ctx->rasterizer_saved);
629 ctx->rasterizer_saved = ctx->rasterizer;
630 }
631
632 static void
633 cso_restore_rasterizer(struct cso_context *ctx)
634 {
635 if (ctx->rasterizer != ctx->rasterizer_saved) {
636 ctx->rasterizer = ctx->rasterizer_saved;
637 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
638 }
639 ctx->rasterizer_saved = NULL;
640 }
641
642
643 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
644 {
645 if (ctx->fragment_shader != handle) {
646 ctx->fragment_shader = handle;
647 ctx->pipe->bind_fs_state(ctx->pipe, handle);
648 }
649 }
650
651 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
652 {
653 if (handle == ctx->fragment_shader) {
654 /* unbind before deleting */
655 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
656 ctx->fragment_shader = NULL;
657 }
658 ctx->pipe->delete_fs_state(ctx->pipe, handle);
659 }
660
661 static void
662 cso_save_fragment_shader(struct cso_context *ctx)
663 {
664 assert(!ctx->fragment_shader_saved);
665 ctx->fragment_shader_saved = ctx->fragment_shader;
666 }
667
668 static void
669 cso_restore_fragment_shader(struct cso_context *ctx)
670 {
671 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
672 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
673 ctx->fragment_shader = ctx->fragment_shader_saved;
674 }
675 ctx->fragment_shader_saved = NULL;
676 }
677
678
679 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
680 {
681 if (ctx->vertex_shader != handle) {
682 ctx->vertex_shader = handle;
683 ctx->pipe->bind_vs_state(ctx->pipe, handle);
684 }
685 }
686
687 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
688 {
689 if (handle == ctx->vertex_shader) {
690 /* unbind before deleting */
691 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
692 ctx->vertex_shader = NULL;
693 }
694 ctx->pipe->delete_vs_state(ctx->pipe, handle);
695 }
696
697 static void
698 cso_save_vertex_shader(struct cso_context *ctx)
699 {
700 assert(!ctx->vertex_shader_saved);
701 ctx->vertex_shader_saved = ctx->vertex_shader;
702 }
703
704 static void
705 cso_restore_vertex_shader(struct cso_context *ctx)
706 {
707 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
708 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
709 ctx->vertex_shader = ctx->vertex_shader_saved;
710 }
711 ctx->vertex_shader_saved = NULL;
712 }
713
714
715 void cso_set_framebuffer(struct cso_context *ctx,
716 const struct pipe_framebuffer_state *fb)
717 {
718 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
719 util_copy_framebuffer_state(&ctx->fb, fb);
720 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
721 }
722 }
723
724 static void
725 cso_save_framebuffer(struct cso_context *ctx)
726 {
727 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
728 }
729
730 static void
731 cso_restore_framebuffer(struct cso_context *ctx)
732 {
733 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
734 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
735 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
736 util_unreference_framebuffer_state(&ctx->fb_saved);
737 }
738 }
739
740
741 void cso_set_viewport(struct cso_context *ctx,
742 const struct pipe_viewport_state *vp)
743 {
744 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
745 ctx->vp = *vp;
746 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
747 }
748 }
749
750 /**
751 * Setup viewport state for given width and height (position is always (0,0)).
752 * Invert the Y axis if 'invert' is true.
753 */
754 void
755 cso_set_viewport_dims(struct cso_context *ctx,
756 float width, float height, boolean invert)
757 {
758 struct pipe_viewport_state vp;
759 vp.scale[0] = width * 0.5f;
760 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
761 vp.scale[2] = 0.5f;
762 vp.translate[0] = 0.5f * width;
763 vp.translate[1] = 0.5f * height;
764 vp.translate[2] = 0.5f;
765 cso_set_viewport(ctx, &vp);
766 }
767
768 static void
769 cso_save_viewport(struct cso_context *ctx)
770 {
771 ctx->vp_saved = ctx->vp;
772 }
773
774
775 static void
776 cso_restore_viewport(struct cso_context *ctx)
777 {
778 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
779 ctx->vp = ctx->vp_saved;
780 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
781 }
782 }
783
784
785 void cso_set_blend_color(struct cso_context *ctx,
786 const struct pipe_blend_color *bc)
787 {
788 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
789 ctx->blend_color = *bc;
790 ctx->pipe->set_blend_color(ctx->pipe, bc);
791 }
792 }
793
794 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
795 {
796 if (ctx->sample_mask != sample_mask) {
797 ctx->sample_mask = sample_mask;
798 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
799 }
800 }
801
802 static void
803 cso_save_sample_mask(struct cso_context *ctx)
804 {
805 ctx->sample_mask_saved = ctx->sample_mask;
806 }
807
808 static void
809 cso_restore_sample_mask(struct cso_context *ctx)
810 {
811 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
812 }
813
814 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
815 {
816 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
817 ctx->min_samples = min_samples;
818 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
819 }
820 }
821
822 static void
823 cso_save_min_samples(struct cso_context *ctx)
824 {
825 ctx->min_samples_saved = ctx->min_samples;
826 }
827
828 static void
829 cso_restore_min_samples(struct cso_context *ctx)
830 {
831 cso_set_min_samples(ctx, ctx->min_samples_saved);
832 }
833
834 void cso_set_stencil_ref(struct cso_context *ctx,
835 const struct pipe_stencil_ref *sr)
836 {
837 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
838 ctx->stencil_ref = *sr;
839 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
840 }
841 }
842
843 static void
844 cso_save_stencil_ref(struct cso_context *ctx)
845 {
846 ctx->stencil_ref_saved = ctx->stencil_ref;
847 }
848
849
850 static void
851 cso_restore_stencil_ref(struct cso_context *ctx)
852 {
853 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
854 sizeof(ctx->stencil_ref))) {
855 ctx->stencil_ref = ctx->stencil_ref_saved;
856 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
857 }
858 }
859
860 void cso_set_render_condition(struct cso_context *ctx,
861 struct pipe_query *query,
862 boolean condition,
863 enum pipe_render_cond_flag mode)
864 {
865 struct pipe_context *pipe = ctx->pipe;
866
867 if (ctx->render_condition != query ||
868 ctx->render_condition_mode != mode ||
869 ctx->render_condition_cond != condition) {
870 pipe->render_condition(pipe, query, condition, mode);
871 ctx->render_condition = query;
872 ctx->render_condition_cond = condition;
873 ctx->render_condition_mode = mode;
874 }
875 }
876
877 static void
878 cso_save_render_condition(struct cso_context *ctx)
879 {
880 ctx->render_condition_saved = ctx->render_condition;
881 ctx->render_condition_cond_saved = ctx->render_condition_cond;
882 ctx->render_condition_mode_saved = ctx->render_condition_mode;
883 }
884
885 static void
886 cso_restore_render_condition(struct cso_context *ctx)
887 {
888 cso_set_render_condition(ctx, ctx->render_condition_saved,
889 ctx->render_condition_cond_saved,
890 ctx->render_condition_mode_saved);
891 }
892
893 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
894 {
895 assert(ctx->has_geometry_shader || !handle);
896
897 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
898 ctx->geometry_shader = handle;
899 ctx->pipe->bind_gs_state(ctx->pipe, handle);
900 }
901 }
902
903 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
904 {
905 if (handle == ctx->geometry_shader) {
906 /* unbind before deleting */
907 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
908 ctx->geometry_shader = NULL;
909 }
910 ctx->pipe->delete_gs_state(ctx->pipe, handle);
911 }
912
913 static void
914 cso_save_geometry_shader(struct cso_context *ctx)
915 {
916 if (!ctx->has_geometry_shader) {
917 return;
918 }
919
920 assert(!ctx->geometry_shader_saved);
921 ctx->geometry_shader_saved = ctx->geometry_shader;
922 }
923
924 static void
925 cso_restore_geometry_shader(struct cso_context *ctx)
926 {
927 if (!ctx->has_geometry_shader) {
928 return;
929 }
930
931 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
932 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
933 ctx->geometry_shader = ctx->geometry_shader_saved;
934 }
935 ctx->geometry_shader_saved = NULL;
936 }
937
938 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
939 {
940 assert(ctx->has_tessellation || !handle);
941
942 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
943 ctx->tessctrl_shader = handle;
944 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
945 }
946 }
947
948 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
949 {
950 if (handle == ctx->tessctrl_shader) {
951 /* unbind before deleting */
952 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
953 ctx->tessctrl_shader = NULL;
954 }
955 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
956 }
957
958 static void
959 cso_save_tessctrl_shader(struct cso_context *ctx)
960 {
961 if (!ctx->has_tessellation) {
962 return;
963 }
964
965 assert(!ctx->tessctrl_shader_saved);
966 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
967 }
968
969 static void
970 cso_restore_tessctrl_shader(struct cso_context *ctx)
971 {
972 if (!ctx->has_tessellation) {
973 return;
974 }
975
976 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
977 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
978 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
979 }
980 ctx->tessctrl_shader_saved = NULL;
981 }
982
983 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
984 {
985 assert(ctx->has_tessellation || !handle);
986
987 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
988 ctx->tesseval_shader = handle;
989 ctx->pipe->bind_tes_state(ctx->pipe, handle);
990 }
991 }
992
993 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
994 {
995 if (handle == ctx->tesseval_shader) {
996 /* unbind before deleting */
997 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
998 ctx->tesseval_shader = NULL;
999 }
1000 ctx->pipe->delete_tes_state(ctx->pipe, handle);
1001 }
1002
1003 static void
1004 cso_save_tesseval_shader(struct cso_context *ctx)
1005 {
1006 if (!ctx->has_tessellation) {
1007 return;
1008 }
1009
1010 assert(!ctx->tesseval_shader_saved);
1011 ctx->tesseval_shader_saved = ctx->tesseval_shader;
1012 }
1013
1014 static void
1015 cso_restore_tesseval_shader(struct cso_context *ctx)
1016 {
1017 if (!ctx->has_tessellation) {
1018 return;
1019 }
1020
1021 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1022 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1023 ctx->tesseval_shader = ctx->tesseval_shader_saved;
1024 }
1025 ctx->tesseval_shader_saved = NULL;
1026 }
1027
1028 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1029 {
1030 assert(ctx->has_compute_shader || !handle);
1031
1032 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1033 ctx->compute_shader = handle;
1034 ctx->pipe->bind_compute_state(ctx->pipe, handle);
1035 }
1036 }
1037
1038 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
1039 {
1040 if (handle == ctx->compute_shader) {
1041 /* unbind before deleting */
1042 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
1043 ctx->compute_shader = NULL;
1044 }
1045 ctx->pipe->delete_compute_state(ctx->pipe, handle);
1046 }
1047
1048 enum pipe_error
1049 cso_set_vertex_elements(struct cso_context *ctx,
1050 unsigned count,
1051 const struct pipe_vertex_element *states)
1052 {
1053 struct u_vbuf *vbuf = ctx->vbuf;
1054 unsigned key_size, hash_key;
1055 struct cso_hash_iter iter;
1056 void *handle;
1057 struct cso_velems_state velems_state;
1058
1059 if (vbuf) {
1060 u_vbuf_set_vertex_elements(vbuf, count, states);
1061 return PIPE_OK;
1062 }
1063
1064 /* Need to include the count into the stored state data too.
1065 * Otherwise first few count pipe_vertex_elements could be identical
1066 * even if count is different, and there's no guarantee the hash would
1067 * be different in that case neither.
1068 */
1069 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1070 velems_state.count = count;
1071 memcpy(velems_state.velems, states,
1072 sizeof(struct pipe_vertex_element) * count);
1073 hash_key = cso_construct_key((void*)&velems_state, key_size);
1074 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1075 (void*)&velems_state, key_size);
1076
1077 if (cso_hash_iter_is_null(iter)) {
1078 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1079 if (!cso)
1080 return PIPE_ERROR_OUT_OF_MEMORY;
1081
1082 memcpy(&cso->state, &velems_state, key_size);
1083 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1084 &cso->state.velems[0]);
1085 cso->delete_state =
1086 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1087 cso->context = ctx->pipe;
1088
1089 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1090 if (cso_hash_iter_is_null(iter)) {
1091 FREE(cso);
1092 return PIPE_ERROR_OUT_OF_MEMORY;
1093 }
1094
1095 handle = cso->data;
1096 }
1097 else {
1098 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1099 }
1100
1101 if (ctx->velements != handle) {
1102 ctx->velements = handle;
1103 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1104 }
1105 return PIPE_OK;
1106 }
1107
1108 static void
1109 cso_save_vertex_elements(struct cso_context *ctx)
1110 {
1111 struct u_vbuf *vbuf = ctx->vbuf;
1112
1113 if (vbuf) {
1114 u_vbuf_save_vertex_elements(vbuf);
1115 return;
1116 }
1117
1118 assert(!ctx->velements_saved);
1119 ctx->velements_saved = ctx->velements;
1120 }
1121
1122 static void
1123 cso_restore_vertex_elements(struct cso_context *ctx)
1124 {
1125 struct u_vbuf *vbuf = ctx->vbuf;
1126
1127 if (vbuf) {
1128 u_vbuf_restore_vertex_elements(vbuf);
1129 return;
1130 }
1131
1132 if (ctx->velements != ctx->velements_saved) {
1133 ctx->velements = ctx->velements_saved;
1134 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1135 }
1136 ctx->velements_saved = NULL;
1137 }
1138
1139 /* vertex buffers */
1140
1141 void cso_set_vertex_buffers(struct cso_context *ctx,
1142 unsigned start_slot, unsigned count,
1143 const struct pipe_vertex_buffer *buffers)
1144 {
1145 struct u_vbuf *vbuf = ctx->vbuf;
1146
1147 if (vbuf) {
1148 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1149 return;
1150 }
1151
1152 /* Save what's in the auxiliary slot, so that we can save and restore it
1153 * for meta ops. */
1154 if (start_slot <= ctx->aux_vertex_buffer_index &&
1155 start_slot+count > ctx->aux_vertex_buffer_index) {
1156 if (buffers) {
1157 const struct pipe_vertex_buffer *vb =
1158 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1159
1160 pipe_vertex_buffer_reference(&ctx->aux_vertex_buffer_current, vb);
1161 } else {
1162 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_current);
1163 }
1164 }
1165
1166 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1167 }
1168
1169 static void
1170 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1171 {
1172 struct u_vbuf *vbuf = ctx->vbuf;
1173
1174 if (vbuf) {
1175 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1176 return;
1177 }
1178
1179 pipe_vertex_buffer_reference(&ctx->aux_vertex_buffer_saved,
1180 &ctx->aux_vertex_buffer_current);
1181 }
1182
1183 static void
1184 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1185 {
1186 struct u_vbuf *vbuf = ctx->vbuf;
1187
1188 if (vbuf) {
1189 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1190 return;
1191 }
1192
1193 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1194 &ctx->aux_vertex_buffer_saved);
1195 pipe_vertex_buffer_unreference(&ctx->aux_vertex_buffer_saved);
1196 }
1197
1198 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1199 {
1200 return ctx->aux_vertex_buffer_index;
1201 }
1202
1203
1204
1205 void
1206 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1207 unsigned idx, const struct pipe_sampler_state *templ)
1208 {
1209 if (templ) {
1210 unsigned key_size = sizeof(struct pipe_sampler_state);
1211 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1212 struct cso_sampler *cso;
1213 struct cso_hash_iter iter =
1214 cso_find_state_template(ctx->cache,
1215 hash_key, CSO_SAMPLER,
1216 (void *) templ, key_size);
1217
1218 if (cso_hash_iter_is_null(iter)) {
1219 cso = MALLOC(sizeof(struct cso_sampler));
1220 if (!cso)
1221 return;
1222
1223 memcpy(&cso->state, templ, sizeof(*templ));
1224 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1225 cso->delete_state =
1226 (cso_state_callback) ctx->pipe->delete_sampler_state;
1227 cso->context = ctx->pipe;
1228 cso->hash_key = hash_key;
1229
1230 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1231 if (cso_hash_iter_is_null(iter)) {
1232 FREE(cso);
1233 return;
1234 }
1235 }
1236 else {
1237 cso = cso_hash_iter_data(iter);
1238 }
1239
1240 ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1241 ctx->samplers[shader_stage].samplers[idx] = cso->data;
1242 ctx->max_sampler_seen = MAX2(ctx->max_sampler_seen, (int)idx);
1243 }
1244 }
1245
1246
1247 /**
1248 * Send staged sampler state to the driver.
1249 */
1250 void
1251 cso_single_sampler_done(struct cso_context *ctx,
1252 enum pipe_shader_type shader_stage)
1253 {
1254 struct sampler_info *info = &ctx->samplers[shader_stage];
1255
1256 if (ctx->max_sampler_seen == -1)
1257 return;
1258
1259 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1260 ctx->max_sampler_seen + 1,
1261 info->samplers);
1262 ctx->max_sampler_seen = -1;
1263 }
1264
1265
1266 /*
1267 * If the function encouters any errors it will return the
1268 * last one. Done to always try to set as many samplers
1269 * as possible.
1270 */
1271 void
1272 cso_set_samplers(struct cso_context *ctx,
1273 enum pipe_shader_type shader_stage,
1274 unsigned nr,
1275 const struct pipe_sampler_state **templates)
1276 {
1277 for (unsigned i = 0; i < nr; i++)
1278 cso_single_sampler(ctx, shader_stage, i, templates[i]);
1279
1280 cso_single_sampler_done(ctx, shader_stage);
1281 }
1282
1283 static void
1284 cso_save_fragment_samplers(struct cso_context *ctx)
1285 {
1286 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1287 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1288
1289 memcpy(saved->cso_samplers, info->cso_samplers,
1290 sizeof(info->cso_samplers));
1291 memcpy(saved->samplers, info->samplers, sizeof(info->samplers));
1292 }
1293
1294
1295 static void
1296 cso_restore_fragment_samplers(struct cso_context *ctx)
1297 {
1298 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1299 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1300
1301 memcpy(info->cso_samplers, saved->cso_samplers,
1302 sizeof(info->cso_samplers));
1303 memcpy(info->samplers, saved->samplers, sizeof(info->samplers));
1304
1305 for (int i = PIPE_MAX_SAMPLERS - 1; i >= 0; i--) {
1306 if (info->samplers[i]) {
1307 ctx->max_sampler_seen = i;
1308 break;
1309 }
1310 }
1311
1312 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1313 }
1314
1315
1316 void
1317 cso_set_sampler_views(struct cso_context *ctx,
1318 enum pipe_shader_type shader_stage,
1319 unsigned count,
1320 struct pipe_sampler_view **views)
1321 {
1322 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1323 unsigned i;
1324 boolean any_change = FALSE;
1325
1326 /* reference new views */
1327 for (i = 0; i < count; i++) {
1328 any_change |= ctx->fragment_views[i] != views[i];
1329 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1330 }
1331 /* unref extra old views, if any */
1332 for (; i < ctx->nr_fragment_views; i++) {
1333 any_change |= ctx->fragment_views[i] != NULL;
1334 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1335 }
1336
1337 /* bind the new sampler views */
1338 if (any_change) {
1339 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1340 MAX2(ctx->nr_fragment_views, count),
1341 ctx->fragment_views);
1342 }
1343
1344 ctx->nr_fragment_views = count;
1345 }
1346 else
1347 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1348 }
1349
1350
1351 static void
1352 cso_save_fragment_sampler_views(struct cso_context *ctx)
1353 {
1354 unsigned i;
1355
1356 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1357
1358 for (i = 0; i < ctx->nr_fragment_views; i++) {
1359 assert(!ctx->fragment_views_saved[i]);
1360 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1361 ctx->fragment_views[i]);
1362 }
1363 }
1364
1365
1366 static void
1367 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1368 {
1369 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1370 unsigned num;
1371
1372 for (i = 0; i < nr_saved; i++) {
1373 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1374 /* move the reference from one pointer to another */
1375 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1376 ctx->fragment_views_saved[i] = NULL;
1377 }
1378 for (; i < ctx->nr_fragment_views; i++) {
1379 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1380 }
1381
1382 num = MAX2(ctx->nr_fragment_views, nr_saved);
1383
1384 /* bind the old/saved sampler views */
1385 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1386 ctx->fragment_views);
1387
1388 ctx->nr_fragment_views = nr_saved;
1389 ctx->nr_fragment_views_saved = 0;
1390 }
1391
1392
1393 void
1394 cso_set_shader_images(struct cso_context *ctx,
1395 enum pipe_shader_type shader_stage,
1396 unsigned start, unsigned count,
1397 struct pipe_image_view *images)
1398 {
1399 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1400 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1401 }
1402
1403 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1404 }
1405
1406
1407 static void
1408 cso_save_fragment_image0(struct cso_context *ctx)
1409 {
1410 util_copy_image_view(&ctx->fragment_image0_saved,
1411 &ctx->fragment_image0_current);
1412 }
1413
1414
1415 static void
1416 cso_restore_fragment_image0(struct cso_context *ctx)
1417 {
1418 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1419 &ctx->fragment_image0_saved);
1420 }
1421
1422
1423 void
1424 cso_set_stream_outputs(struct cso_context *ctx,
1425 unsigned num_targets,
1426 struct pipe_stream_output_target **targets,
1427 const unsigned *offsets)
1428 {
1429 struct pipe_context *pipe = ctx->pipe;
1430 uint i;
1431
1432 if (!ctx->has_streamout) {
1433 assert(num_targets == 0);
1434 return;
1435 }
1436
1437 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1438 /* Nothing to do. */
1439 return;
1440 }
1441
1442 /* reference new targets */
1443 for (i = 0; i < num_targets; i++) {
1444 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1445 }
1446 /* unref extra old targets, if any */
1447 for (; i < ctx->nr_so_targets; i++) {
1448 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1449 }
1450
1451 pipe->set_stream_output_targets(pipe, num_targets, targets,
1452 offsets);
1453 ctx->nr_so_targets = num_targets;
1454 }
1455
1456 static void
1457 cso_save_stream_outputs(struct cso_context *ctx)
1458 {
1459 uint i;
1460
1461 if (!ctx->has_streamout) {
1462 return;
1463 }
1464
1465 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1466
1467 for (i = 0; i < ctx->nr_so_targets; i++) {
1468 assert(!ctx->so_targets_saved[i]);
1469 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1470 }
1471 }
1472
1473 static void
1474 cso_restore_stream_outputs(struct cso_context *ctx)
1475 {
1476 struct pipe_context *pipe = ctx->pipe;
1477 uint i;
1478 unsigned offset[PIPE_MAX_SO_BUFFERS];
1479
1480 if (!ctx->has_streamout) {
1481 return;
1482 }
1483
1484 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1485 /* Nothing to do. */
1486 return;
1487 }
1488
1489 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1490 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1491 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1492 /* move the reference from one pointer to another */
1493 ctx->so_targets[i] = ctx->so_targets_saved[i];
1494 ctx->so_targets_saved[i] = NULL;
1495 /* -1 means append */
1496 offset[i] = (unsigned)-1;
1497 }
1498 for (; i < ctx->nr_so_targets; i++) {
1499 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1500 }
1501
1502 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1503 ctx->so_targets, offset);
1504
1505 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1506 ctx->nr_so_targets_saved = 0;
1507 }
1508
1509 /* constant buffers */
1510
1511 void
1512 cso_set_constant_buffer(struct cso_context *cso,
1513 enum pipe_shader_type shader_stage,
1514 unsigned index, struct pipe_constant_buffer *cb)
1515 {
1516 struct pipe_context *pipe = cso->pipe;
1517
1518 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1519
1520 if (index == 0) {
1521 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1522 }
1523 }
1524
1525 void
1526 cso_set_constant_buffer_resource(struct cso_context *cso,
1527 enum pipe_shader_type shader_stage,
1528 unsigned index,
1529 struct pipe_resource *buffer)
1530 {
1531 if (buffer) {
1532 struct pipe_constant_buffer cb;
1533 cb.buffer = buffer;
1534 cb.buffer_offset = 0;
1535 cb.buffer_size = buffer->width0;
1536 cb.user_buffer = NULL;
1537 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1538 } else {
1539 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1540 }
1541 }
1542
1543 void
1544 cso_save_constant_buffer_slot0(struct cso_context *cso,
1545 enum pipe_shader_type shader_stage)
1546 {
1547 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1548 &cso->aux_constbuf_current[shader_stage]);
1549 }
1550
1551 void
1552 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1553 enum pipe_shader_type shader_stage)
1554 {
1555 cso_set_constant_buffer(cso, shader_stage, 0,
1556 &cso->aux_constbuf_saved[shader_stage]);
1557 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1558 NULL);
1559 }
1560
1561
1562 /**
1563 * Save all the CSO state items specified by the state_mask bitmask
1564 * of CSO_BIT_x flags.
1565 */
1566 void
1567 cso_save_state(struct cso_context *cso, unsigned state_mask)
1568 {
1569 assert(cso->saved_state == 0);
1570
1571 cso->saved_state = state_mask;
1572
1573 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1574 cso_save_aux_vertex_buffer_slot(cso);
1575 if (state_mask & CSO_BIT_BLEND)
1576 cso_save_blend(cso);
1577 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1578 cso_save_depth_stencil_alpha(cso);
1579 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1580 cso_save_fragment_samplers(cso);
1581 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1582 cso_save_fragment_sampler_views(cso);
1583 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1584 cso_save_fragment_shader(cso);
1585 if (state_mask & CSO_BIT_FRAMEBUFFER)
1586 cso_save_framebuffer(cso);
1587 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1588 cso_save_geometry_shader(cso);
1589 if (state_mask & CSO_BIT_MIN_SAMPLES)
1590 cso_save_min_samples(cso);
1591 if (state_mask & CSO_BIT_RASTERIZER)
1592 cso_save_rasterizer(cso);
1593 if (state_mask & CSO_BIT_RENDER_CONDITION)
1594 cso_save_render_condition(cso);
1595 if (state_mask & CSO_BIT_SAMPLE_MASK)
1596 cso_save_sample_mask(cso);
1597 if (state_mask & CSO_BIT_STENCIL_REF)
1598 cso_save_stencil_ref(cso);
1599 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1600 cso_save_stream_outputs(cso);
1601 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1602 cso_save_tessctrl_shader(cso);
1603 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1604 cso_save_tesseval_shader(cso);
1605 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1606 cso_save_vertex_elements(cso);
1607 if (state_mask & CSO_BIT_VERTEX_SHADER)
1608 cso_save_vertex_shader(cso);
1609 if (state_mask & CSO_BIT_VIEWPORT)
1610 cso_save_viewport(cso);
1611 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1612 cso->pipe->set_active_query_state(cso->pipe, false);
1613 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1614 cso_save_fragment_image0(cso);
1615 }
1616
1617
1618 /**
1619 * Restore the state which was saved by cso_save_state().
1620 */
1621 void
1622 cso_restore_state(struct cso_context *cso)
1623 {
1624 unsigned state_mask = cso->saved_state;
1625
1626 assert(state_mask);
1627
1628 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1629 cso_restore_aux_vertex_buffer_slot(cso);
1630 if (state_mask & CSO_BIT_BLEND)
1631 cso_restore_blend(cso);
1632 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1633 cso_restore_depth_stencil_alpha(cso);
1634 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1635 cso_restore_fragment_samplers(cso);
1636 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1637 cso_restore_fragment_sampler_views(cso);
1638 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1639 cso_restore_fragment_shader(cso);
1640 if (state_mask & CSO_BIT_FRAMEBUFFER)
1641 cso_restore_framebuffer(cso);
1642 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1643 cso_restore_geometry_shader(cso);
1644 if (state_mask & CSO_BIT_MIN_SAMPLES)
1645 cso_restore_min_samples(cso);
1646 if (state_mask & CSO_BIT_RASTERIZER)
1647 cso_restore_rasterizer(cso);
1648 if (state_mask & CSO_BIT_RENDER_CONDITION)
1649 cso_restore_render_condition(cso);
1650 if (state_mask & CSO_BIT_SAMPLE_MASK)
1651 cso_restore_sample_mask(cso);
1652 if (state_mask & CSO_BIT_STENCIL_REF)
1653 cso_restore_stencil_ref(cso);
1654 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1655 cso_restore_stream_outputs(cso);
1656 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1657 cso_restore_tessctrl_shader(cso);
1658 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1659 cso_restore_tesseval_shader(cso);
1660 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1661 cso_restore_vertex_elements(cso);
1662 if (state_mask & CSO_BIT_VERTEX_SHADER)
1663 cso_restore_vertex_shader(cso);
1664 if (state_mask & CSO_BIT_VIEWPORT)
1665 cso_restore_viewport(cso);
1666 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1667 cso->pipe->set_active_query_state(cso->pipe, true);
1668 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1669 cso_restore_fragment_image0(cso);
1670
1671 cso->saved_state = 0;
1672 }
1673
1674
1675
1676 /* drawing */
1677
1678 void
1679 cso_draw_vbo(struct cso_context *cso,
1680 const struct pipe_draw_info *info)
1681 {
1682 struct u_vbuf *vbuf = cso->vbuf;
1683
1684 if (vbuf) {
1685 u_vbuf_draw_vbo(vbuf, info);
1686 } else {
1687 struct pipe_context *pipe = cso->pipe;
1688 pipe->draw_vbo(pipe, info);
1689 }
1690 }
1691
1692 void
1693 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1694 {
1695 struct pipe_draw_info info;
1696
1697 util_draw_init_info(&info);
1698
1699 info.mode = mode;
1700 info.start = start;
1701 info.count = count;
1702 info.min_index = start;
1703 info.max_index = start + count - 1;
1704
1705 cso_draw_vbo(cso, &info);
1706 }
1707
1708 void
1709 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1710 uint start, uint count,
1711 uint start_instance, uint instance_count)
1712 {
1713 struct pipe_draw_info info;
1714
1715 util_draw_init_info(&info);
1716
1717 info.mode = mode;
1718 info.start = start;
1719 info.count = count;
1720 info.min_index = start;
1721 info.max_index = start + count - 1;
1722 info.start_instance = start_instance;
1723 info.instance_count = instance_count;
1724
1725 cso_draw_vbo(cso, &info);
1726 }