cso: s/unsigned/enum pipe_shader_type/
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_compute_shader;
73 boolean has_streamout;
74
75 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
76
77 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
78 unsigned nr_fragment_views;
79
80 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
81 unsigned nr_fragment_views_saved;
82
83 struct sampler_info fragment_samplers_saved;
84 struct sampler_info samplers[PIPE_SHADER_TYPES];
85
86 struct pipe_vertex_buffer aux_vertex_buffer_current;
87 struct pipe_vertex_buffer aux_vertex_buffer_saved;
88 unsigned aux_vertex_buffer_index;
89
90 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
91 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
92
93 struct pipe_image_view fragment_image0_current;
94 struct pipe_image_view fragment_image0_saved;
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *tessctrl_shader, *tessctrl_shader_saved;
112 void *tesseval_shader, *tesseval_shader_saved;
113 void *compute_shader;
114 void *velements, *velements_saved;
115 struct pipe_query *render_condition, *render_condition_saved;
116 uint render_condition_mode, render_condition_mode_saved;
117 boolean render_condition_cond, render_condition_cond_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 unsigned min_samples, min_samples_saved;
124 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
125 };
126
127
128 static boolean delete_blend_state(struct cso_context *ctx, void *state)
129 {
130 struct cso_blend *cso = (struct cso_blend *)state;
131
132 if (ctx->blend == cso->data)
133 return FALSE;
134
135 if (cso->delete_state)
136 cso->delete_state(cso->context, cso->data);
137 FREE(state);
138 return TRUE;
139 }
140
141 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
142 {
143 struct cso_depth_stencil_alpha *cso =
144 (struct cso_depth_stencil_alpha *)state;
145
146 if (ctx->depth_stencil == cso->data)
147 return FALSE;
148
149 if (cso->delete_state)
150 cso->delete_state(cso->context, cso->data);
151 FREE(state);
152
153 return TRUE;
154 }
155
156 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
157 {
158 struct cso_sampler *cso = (struct cso_sampler *)state;
159 if (cso->delete_state)
160 cso->delete_state(cso->context, cso->data);
161 FREE(state);
162 return TRUE;
163 }
164
165 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
166 {
167 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
168
169 if (ctx->rasterizer == cso->data)
170 return FALSE;
171 if (cso->delete_state)
172 cso->delete_state(cso->context, cso->data);
173 FREE(state);
174 return TRUE;
175 }
176
177 static boolean delete_vertex_elements(struct cso_context *ctx,
178 void *state)
179 {
180 struct cso_velements *cso = (struct cso_velements *)state;
181
182 if (ctx->velements == cso->data)
183 return FALSE;
184
185 if (cso->delete_state)
186 cso->delete_state(cso->context, cso->data);
187 FREE(state);
188 return TRUE;
189 }
190
191
192 static inline boolean delete_cso(struct cso_context *ctx,
193 void *state, enum cso_cache_type type)
194 {
195 switch (type) {
196 case CSO_BLEND:
197 return delete_blend_state(ctx, state);
198 case CSO_SAMPLER:
199 return delete_sampler_state(ctx, state);
200 case CSO_DEPTH_STENCIL_ALPHA:
201 return delete_depth_stencil_state(ctx, state);
202 case CSO_RASTERIZER:
203 return delete_rasterizer_state(ctx, state);
204 case CSO_VELEMENTS:
205 return delete_vertex_elements(ctx, state);
206 default:
207 assert(0);
208 FREE(state);
209 }
210 return FALSE;
211 }
212
213 static inline void
214 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
215 int max_size, void *user_data)
216 {
217 struct cso_context *ctx = (struct cso_context *)user_data;
218 /* if we're approach the maximum size, remove fourth of the entries
219 * otherwise every subsequent call will go through the same */
220 int hash_size = cso_hash_size(hash);
221 int max_entries = (max_size > hash_size) ? max_size : hash_size;
222 int to_remove = (max_size < max_entries) * max_entries/4;
223 struct cso_hash_iter iter;
224 struct cso_sampler **samplers_to_restore = NULL;
225 unsigned to_restore = 0;
226
227 if (hash_size > max_size)
228 to_remove += hash_size - max_size;
229
230 if (to_remove == 0)
231 return;
232
233 if (type == CSO_SAMPLER) {
234 int i, j;
235
236 samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
237 sizeof(*samplers_to_restore));
238
239 /* Temporarily remove currently bound sampler states from the hash
240 * table, to prevent them from being deleted
241 */
242 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
243 for (j = 0; j < ctx->samplers[i].nr_samplers; j++) {
244 struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
245
246 if (sampler && cso_hash_take(hash, sampler->hash_key))
247 samplers_to_restore[to_restore++] = sampler;
248 }
249 }
250 }
251
252 iter = cso_hash_first_node(hash);
253 while (to_remove) {
254 /*remove elements until we're good */
255 /*fixme: currently we pick the nodes to remove at random*/
256 void *cso = cso_hash_iter_data(iter);
257
258 if (!cso)
259 break;
260
261 if (delete_cso(ctx, cso, type)) {
262 iter = cso_hash_erase(hash, iter);
263 --to_remove;
264 } else
265 iter = cso_hash_iter_next(iter);
266 }
267
268 if (type == CSO_SAMPLER) {
269 /* Put currently bound sampler states back into the hash table */
270 while (to_restore--) {
271 struct cso_sampler *sampler = samplers_to_restore[to_restore];
272
273 cso_hash_insert(hash, sampler->hash_key, sampler);
274 }
275
276 FREE(samplers_to_restore);
277 }
278 }
279
280 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
281 {
282 struct u_vbuf_caps caps;
283
284 /* Install u_vbuf if there is anything unsupported. */
285 if (u_vbuf_get_caps(cso->pipe->screen, &caps, flags)) {
286 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
287 cso->aux_vertex_buffer_index);
288 }
289 }
290
291 struct cso_context *
292 cso_create_context(struct pipe_context *pipe, unsigned u_vbuf_flags)
293 {
294 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
295 if (!ctx)
296 return NULL;
297
298 ctx->cache = cso_cache_create();
299 if (ctx->cache == NULL)
300 goto out;
301 cso_cache_set_sanitize_callback(ctx->cache,
302 sanitize_hash,
303 ctx);
304
305 ctx->pipe = pipe;
306 ctx->sample_mask = ~0;
307
308 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
309
310 cso_init_vbuf(ctx, u_vbuf_flags);
311
312 /* Enable for testing: */
313 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
314
315 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
316 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
317 ctx->has_geometry_shader = TRUE;
318 }
319 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
320 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
321 ctx->has_tessellation = TRUE;
322 }
323 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
324 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
325 int supported_irs =
326 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
327 PIPE_SHADER_CAP_SUPPORTED_IRS);
328 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
329 ctx->has_compute_shader = TRUE;
330 }
331 }
332 if (pipe->screen->get_param(pipe->screen,
333 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
334 ctx->has_streamout = TRUE;
335 }
336
337 return ctx;
338
339 out:
340 cso_destroy_context( ctx );
341 return NULL;
342 }
343
344 /**
345 * Free the CSO context.
346 */
347 void cso_destroy_context( struct cso_context *ctx )
348 {
349 unsigned i;
350
351 if (ctx->pipe) {
352 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
353
354 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
355 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
356
357 {
358 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
359 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
360 struct pipe_screen *scr = ctx->pipe->screen;
361 enum pipe_shader_type sh;
362 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
363 int maxsam = scr->get_shader_param(scr, sh,
364 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
365 int maxview = scr->get_shader_param(scr, sh,
366 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
367 assert(maxsam <= PIPE_MAX_SAMPLERS);
368 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
369 if (maxsam > 0) {
370 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
371 }
372 if (maxview > 0) {
373 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
374 }
375 }
376 }
377
378 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
379 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
380 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
381 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
382 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
383 if (ctx->has_geometry_shader) {
384 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
385 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
386 }
387 if (ctx->has_tessellation) {
388 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
389 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
390 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
391 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
392 }
393 if (ctx->has_compute_shader) {
394 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
395 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
396 }
397 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
398
399 if (ctx->has_streamout)
400 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
401 }
402
403 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
404 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
405 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
406 }
407
408 util_unreference_framebuffer_state(&ctx->fb);
409 util_unreference_framebuffer_state(&ctx->fb_saved);
410
411 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
412 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
413
414 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
415 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
416 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
417 }
418
419 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
420 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
421
422 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
423 pipe_so_target_reference(&ctx->so_targets[i], NULL);
424 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
425 }
426
427 if (ctx->cache) {
428 cso_cache_delete( ctx->cache );
429 ctx->cache = NULL;
430 }
431
432 if (ctx->vbuf)
433 u_vbuf_destroy(ctx->vbuf);
434 FREE( ctx );
435 }
436
437
438 /* Those function will either find the state of the given template
439 * in the cache or they will create a new state from the given
440 * template, insert it in the cache and return it.
441 */
442
443 /*
444 * If the driver returns 0 from the create method then they will assign
445 * the data member of the cso to be the template itself.
446 */
447
448 enum pipe_error cso_set_blend(struct cso_context *ctx,
449 const struct pipe_blend_state *templ)
450 {
451 unsigned key_size, hash_key;
452 struct cso_hash_iter iter;
453 void *handle;
454
455 key_size = templ->independent_blend_enable ?
456 sizeof(struct pipe_blend_state) :
457 (char *)&(templ->rt[1]) - (char *)templ;
458 hash_key = cso_construct_key((void*)templ, key_size);
459 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
460 (void*)templ, key_size);
461
462 if (cso_hash_iter_is_null(iter)) {
463 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
464 if (!cso)
465 return PIPE_ERROR_OUT_OF_MEMORY;
466
467 memset(&cso->state, 0, sizeof cso->state);
468 memcpy(&cso->state, templ, key_size);
469 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
470 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
471 cso->context = ctx->pipe;
472
473 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
474 if (cso_hash_iter_is_null(iter)) {
475 FREE(cso);
476 return PIPE_ERROR_OUT_OF_MEMORY;
477 }
478
479 handle = cso->data;
480 }
481 else {
482 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
483 }
484
485 if (ctx->blend != handle) {
486 ctx->blend = handle;
487 ctx->pipe->bind_blend_state(ctx->pipe, handle);
488 }
489 return PIPE_OK;
490 }
491
492 static void
493 cso_save_blend(struct cso_context *ctx)
494 {
495 assert(!ctx->blend_saved);
496 ctx->blend_saved = ctx->blend;
497 }
498
499 static void
500 cso_restore_blend(struct cso_context *ctx)
501 {
502 if (ctx->blend != ctx->blend_saved) {
503 ctx->blend = ctx->blend_saved;
504 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
505 }
506 ctx->blend_saved = NULL;
507 }
508
509
510
511 enum pipe_error
512 cso_set_depth_stencil_alpha(struct cso_context *ctx,
513 const struct pipe_depth_stencil_alpha_state *templ)
514 {
515 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
516 unsigned hash_key = cso_construct_key((void*)templ, key_size);
517 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
518 hash_key,
519 CSO_DEPTH_STENCIL_ALPHA,
520 (void*)templ, key_size);
521 void *handle;
522
523 if (cso_hash_iter_is_null(iter)) {
524 struct cso_depth_stencil_alpha *cso =
525 MALLOC(sizeof(struct cso_depth_stencil_alpha));
526 if (!cso)
527 return PIPE_ERROR_OUT_OF_MEMORY;
528
529 memcpy(&cso->state, templ, sizeof(*templ));
530 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
531 &cso->state);
532 cso->delete_state =
533 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
534 cso->context = ctx->pipe;
535
536 iter = cso_insert_state(ctx->cache, hash_key,
537 CSO_DEPTH_STENCIL_ALPHA, cso);
538 if (cso_hash_iter_is_null(iter)) {
539 FREE(cso);
540 return PIPE_ERROR_OUT_OF_MEMORY;
541 }
542
543 handle = cso->data;
544 }
545 else {
546 handle = ((struct cso_depth_stencil_alpha *)
547 cso_hash_iter_data(iter))->data;
548 }
549
550 if (ctx->depth_stencil != handle) {
551 ctx->depth_stencil = handle;
552 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
553 }
554 return PIPE_OK;
555 }
556
557 static void
558 cso_save_depth_stencil_alpha(struct cso_context *ctx)
559 {
560 assert(!ctx->depth_stencil_saved);
561 ctx->depth_stencil_saved = ctx->depth_stencil;
562 }
563
564 static void
565 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
566 {
567 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
568 ctx->depth_stencil = ctx->depth_stencil_saved;
569 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
570 ctx->depth_stencil_saved);
571 }
572 ctx->depth_stencil_saved = NULL;
573 }
574
575
576
577 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
578 const struct pipe_rasterizer_state *templ)
579 {
580 unsigned key_size = sizeof(struct pipe_rasterizer_state);
581 unsigned hash_key = cso_construct_key((void*)templ, key_size);
582 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
583 hash_key,
584 CSO_RASTERIZER,
585 (void*)templ, key_size);
586 void *handle = NULL;
587
588 if (cso_hash_iter_is_null(iter)) {
589 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
590 if (!cso)
591 return PIPE_ERROR_OUT_OF_MEMORY;
592
593 memcpy(&cso->state, templ, sizeof(*templ));
594 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
595 cso->delete_state =
596 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
597 cso->context = ctx->pipe;
598
599 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
600 if (cso_hash_iter_is_null(iter)) {
601 FREE(cso);
602 return PIPE_ERROR_OUT_OF_MEMORY;
603 }
604
605 handle = cso->data;
606 }
607 else {
608 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
609 }
610
611 if (ctx->rasterizer != handle) {
612 ctx->rasterizer = handle;
613 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
614 }
615 return PIPE_OK;
616 }
617
618 static void
619 cso_save_rasterizer(struct cso_context *ctx)
620 {
621 assert(!ctx->rasterizer_saved);
622 ctx->rasterizer_saved = ctx->rasterizer;
623 }
624
625 static void
626 cso_restore_rasterizer(struct cso_context *ctx)
627 {
628 if (ctx->rasterizer != ctx->rasterizer_saved) {
629 ctx->rasterizer = ctx->rasterizer_saved;
630 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
631 }
632 ctx->rasterizer_saved = NULL;
633 }
634
635
636 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
637 {
638 if (ctx->fragment_shader != handle) {
639 ctx->fragment_shader = handle;
640 ctx->pipe->bind_fs_state(ctx->pipe, handle);
641 }
642 }
643
644 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
645 {
646 if (handle == ctx->fragment_shader) {
647 /* unbind before deleting */
648 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
649 ctx->fragment_shader = NULL;
650 }
651 ctx->pipe->delete_fs_state(ctx->pipe, handle);
652 }
653
654 static void
655 cso_save_fragment_shader(struct cso_context *ctx)
656 {
657 assert(!ctx->fragment_shader_saved);
658 ctx->fragment_shader_saved = ctx->fragment_shader;
659 }
660
661 static void
662 cso_restore_fragment_shader(struct cso_context *ctx)
663 {
664 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
665 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
666 ctx->fragment_shader = ctx->fragment_shader_saved;
667 }
668 ctx->fragment_shader_saved = NULL;
669 }
670
671
672 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
673 {
674 if (ctx->vertex_shader != handle) {
675 ctx->vertex_shader = handle;
676 ctx->pipe->bind_vs_state(ctx->pipe, handle);
677 }
678 }
679
680 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
681 {
682 if (handle == ctx->vertex_shader) {
683 /* unbind before deleting */
684 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
685 ctx->vertex_shader = NULL;
686 }
687 ctx->pipe->delete_vs_state(ctx->pipe, handle);
688 }
689
690 static void
691 cso_save_vertex_shader(struct cso_context *ctx)
692 {
693 assert(!ctx->vertex_shader_saved);
694 ctx->vertex_shader_saved = ctx->vertex_shader;
695 }
696
697 static void
698 cso_restore_vertex_shader(struct cso_context *ctx)
699 {
700 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
701 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
702 ctx->vertex_shader = ctx->vertex_shader_saved;
703 }
704 ctx->vertex_shader_saved = NULL;
705 }
706
707
708 void cso_set_framebuffer(struct cso_context *ctx,
709 const struct pipe_framebuffer_state *fb)
710 {
711 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
712 util_copy_framebuffer_state(&ctx->fb, fb);
713 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
714 }
715 }
716
717 static void
718 cso_save_framebuffer(struct cso_context *ctx)
719 {
720 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
721 }
722
723 static void
724 cso_restore_framebuffer(struct cso_context *ctx)
725 {
726 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
727 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
728 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
729 util_unreference_framebuffer_state(&ctx->fb_saved);
730 }
731 }
732
733
734 void cso_set_viewport(struct cso_context *ctx,
735 const struct pipe_viewport_state *vp)
736 {
737 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
738 ctx->vp = *vp;
739 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
740 }
741 }
742
743 /**
744 * Setup viewport state for given width and height (position is always (0,0)).
745 * Invert the Y axis if 'invert' is true.
746 */
747 void
748 cso_set_viewport_dims(struct cso_context *ctx,
749 float width, float height, boolean invert)
750 {
751 struct pipe_viewport_state vp;
752 vp.scale[0] = width * 0.5f;
753 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
754 vp.scale[2] = 0.5f;
755 vp.translate[0] = 0.5f * width;
756 vp.translate[1] = 0.5f * height;
757 vp.translate[2] = 0.5f;
758 cso_set_viewport(ctx, &vp);
759 }
760
761 static void
762 cso_save_viewport(struct cso_context *ctx)
763 {
764 ctx->vp_saved = ctx->vp;
765 }
766
767
768 static void
769 cso_restore_viewport(struct cso_context *ctx)
770 {
771 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
772 ctx->vp = ctx->vp_saved;
773 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
774 }
775 }
776
777
778 void cso_set_blend_color(struct cso_context *ctx,
779 const struct pipe_blend_color *bc)
780 {
781 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
782 ctx->blend_color = *bc;
783 ctx->pipe->set_blend_color(ctx->pipe, bc);
784 }
785 }
786
787 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
788 {
789 if (ctx->sample_mask != sample_mask) {
790 ctx->sample_mask = sample_mask;
791 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
792 }
793 }
794
795 static void
796 cso_save_sample_mask(struct cso_context *ctx)
797 {
798 ctx->sample_mask_saved = ctx->sample_mask;
799 }
800
801 static void
802 cso_restore_sample_mask(struct cso_context *ctx)
803 {
804 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
805 }
806
807 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
808 {
809 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
810 ctx->min_samples = min_samples;
811 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
812 }
813 }
814
815 static void
816 cso_save_min_samples(struct cso_context *ctx)
817 {
818 ctx->min_samples_saved = ctx->min_samples;
819 }
820
821 static void
822 cso_restore_min_samples(struct cso_context *ctx)
823 {
824 cso_set_min_samples(ctx, ctx->min_samples_saved);
825 }
826
827 void cso_set_stencil_ref(struct cso_context *ctx,
828 const struct pipe_stencil_ref *sr)
829 {
830 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
831 ctx->stencil_ref = *sr;
832 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
833 }
834 }
835
836 static void
837 cso_save_stencil_ref(struct cso_context *ctx)
838 {
839 ctx->stencil_ref_saved = ctx->stencil_ref;
840 }
841
842
843 static void
844 cso_restore_stencil_ref(struct cso_context *ctx)
845 {
846 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
847 sizeof(ctx->stencil_ref))) {
848 ctx->stencil_ref = ctx->stencil_ref_saved;
849 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
850 }
851 }
852
853 void cso_set_render_condition(struct cso_context *ctx,
854 struct pipe_query *query,
855 boolean condition, uint mode)
856 {
857 struct pipe_context *pipe = ctx->pipe;
858
859 if (ctx->render_condition != query ||
860 ctx->render_condition_mode != mode ||
861 ctx->render_condition_cond != condition) {
862 pipe->render_condition(pipe, query, condition, mode);
863 ctx->render_condition = query;
864 ctx->render_condition_cond = condition;
865 ctx->render_condition_mode = mode;
866 }
867 }
868
869 static void
870 cso_save_render_condition(struct cso_context *ctx)
871 {
872 ctx->render_condition_saved = ctx->render_condition;
873 ctx->render_condition_cond_saved = ctx->render_condition_cond;
874 ctx->render_condition_mode_saved = ctx->render_condition_mode;
875 }
876
877 static void
878 cso_restore_render_condition(struct cso_context *ctx)
879 {
880 cso_set_render_condition(ctx, ctx->render_condition_saved,
881 ctx->render_condition_cond_saved,
882 ctx->render_condition_mode_saved);
883 }
884
885 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
886 {
887 assert(ctx->has_geometry_shader || !handle);
888
889 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
890 ctx->geometry_shader = handle;
891 ctx->pipe->bind_gs_state(ctx->pipe, handle);
892 }
893 }
894
895 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
896 {
897 if (handle == ctx->geometry_shader) {
898 /* unbind before deleting */
899 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
900 ctx->geometry_shader = NULL;
901 }
902 ctx->pipe->delete_gs_state(ctx->pipe, handle);
903 }
904
905 static void
906 cso_save_geometry_shader(struct cso_context *ctx)
907 {
908 if (!ctx->has_geometry_shader) {
909 return;
910 }
911
912 assert(!ctx->geometry_shader_saved);
913 ctx->geometry_shader_saved = ctx->geometry_shader;
914 }
915
916 static void
917 cso_restore_geometry_shader(struct cso_context *ctx)
918 {
919 if (!ctx->has_geometry_shader) {
920 return;
921 }
922
923 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
924 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
925 ctx->geometry_shader = ctx->geometry_shader_saved;
926 }
927 ctx->geometry_shader_saved = NULL;
928 }
929
930 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
931 {
932 assert(ctx->has_tessellation || !handle);
933
934 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
935 ctx->tessctrl_shader = handle;
936 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
937 }
938 }
939
940 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
941 {
942 if (handle == ctx->tessctrl_shader) {
943 /* unbind before deleting */
944 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
945 ctx->tessctrl_shader = NULL;
946 }
947 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
948 }
949
950 static void
951 cso_save_tessctrl_shader(struct cso_context *ctx)
952 {
953 if (!ctx->has_tessellation) {
954 return;
955 }
956
957 assert(!ctx->tessctrl_shader_saved);
958 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
959 }
960
961 static void
962 cso_restore_tessctrl_shader(struct cso_context *ctx)
963 {
964 if (!ctx->has_tessellation) {
965 return;
966 }
967
968 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
969 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
970 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
971 }
972 ctx->tessctrl_shader_saved = NULL;
973 }
974
975 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
976 {
977 assert(ctx->has_tessellation || !handle);
978
979 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
980 ctx->tesseval_shader = handle;
981 ctx->pipe->bind_tes_state(ctx->pipe, handle);
982 }
983 }
984
985 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
986 {
987 if (handle == ctx->tesseval_shader) {
988 /* unbind before deleting */
989 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
990 ctx->tesseval_shader = NULL;
991 }
992 ctx->pipe->delete_tes_state(ctx->pipe, handle);
993 }
994
995 static void
996 cso_save_tesseval_shader(struct cso_context *ctx)
997 {
998 if (!ctx->has_tessellation) {
999 return;
1000 }
1001
1002 assert(!ctx->tesseval_shader_saved);
1003 ctx->tesseval_shader_saved = ctx->tesseval_shader;
1004 }
1005
1006 static void
1007 cso_restore_tesseval_shader(struct cso_context *ctx)
1008 {
1009 if (!ctx->has_tessellation) {
1010 return;
1011 }
1012
1013 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1014 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1015 ctx->tesseval_shader = ctx->tesseval_shader_saved;
1016 }
1017 ctx->tesseval_shader_saved = NULL;
1018 }
1019
1020 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1021 {
1022 assert(ctx->has_compute_shader || !handle);
1023
1024 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1025 ctx->compute_shader = handle;
1026 ctx->pipe->bind_compute_state(ctx->pipe, handle);
1027 }
1028 }
1029
1030 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
1031 {
1032 if (handle == ctx->compute_shader) {
1033 /* unbind before deleting */
1034 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
1035 ctx->compute_shader = NULL;
1036 }
1037 ctx->pipe->delete_compute_state(ctx->pipe, handle);
1038 }
1039
1040 enum pipe_error
1041 cso_set_vertex_elements(struct cso_context *ctx,
1042 unsigned count,
1043 const struct pipe_vertex_element *states)
1044 {
1045 struct u_vbuf *vbuf = ctx->vbuf;
1046 unsigned key_size, hash_key;
1047 struct cso_hash_iter iter;
1048 void *handle;
1049 struct cso_velems_state velems_state;
1050
1051 if (vbuf) {
1052 u_vbuf_set_vertex_elements(vbuf, count, states);
1053 return PIPE_OK;
1054 }
1055
1056 /* Need to include the count into the stored state data too.
1057 * Otherwise first few count pipe_vertex_elements could be identical
1058 * even if count is different, and there's no guarantee the hash would
1059 * be different in that case neither.
1060 */
1061 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1062 velems_state.count = count;
1063 memcpy(velems_state.velems, states,
1064 sizeof(struct pipe_vertex_element) * count);
1065 hash_key = cso_construct_key((void*)&velems_state, key_size);
1066 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1067 (void*)&velems_state, key_size);
1068
1069 if (cso_hash_iter_is_null(iter)) {
1070 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1071 if (!cso)
1072 return PIPE_ERROR_OUT_OF_MEMORY;
1073
1074 memcpy(&cso->state, &velems_state, key_size);
1075 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1076 &cso->state.velems[0]);
1077 cso->delete_state =
1078 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1079 cso->context = ctx->pipe;
1080
1081 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1082 if (cso_hash_iter_is_null(iter)) {
1083 FREE(cso);
1084 return PIPE_ERROR_OUT_OF_MEMORY;
1085 }
1086
1087 handle = cso->data;
1088 }
1089 else {
1090 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1091 }
1092
1093 if (ctx->velements != handle) {
1094 ctx->velements = handle;
1095 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1096 }
1097 return PIPE_OK;
1098 }
1099
1100 static void
1101 cso_save_vertex_elements(struct cso_context *ctx)
1102 {
1103 struct u_vbuf *vbuf = ctx->vbuf;
1104
1105 if (vbuf) {
1106 u_vbuf_save_vertex_elements(vbuf);
1107 return;
1108 }
1109
1110 assert(!ctx->velements_saved);
1111 ctx->velements_saved = ctx->velements;
1112 }
1113
1114 static void
1115 cso_restore_vertex_elements(struct cso_context *ctx)
1116 {
1117 struct u_vbuf *vbuf = ctx->vbuf;
1118
1119 if (vbuf) {
1120 u_vbuf_restore_vertex_elements(vbuf);
1121 return;
1122 }
1123
1124 if (ctx->velements != ctx->velements_saved) {
1125 ctx->velements = ctx->velements_saved;
1126 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1127 }
1128 ctx->velements_saved = NULL;
1129 }
1130
1131 /* vertex buffers */
1132
1133 void cso_set_vertex_buffers(struct cso_context *ctx,
1134 unsigned start_slot, unsigned count,
1135 const struct pipe_vertex_buffer *buffers)
1136 {
1137 struct u_vbuf *vbuf = ctx->vbuf;
1138
1139 if (vbuf) {
1140 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1141 return;
1142 }
1143
1144 /* Save what's in the auxiliary slot, so that we can save and restore it
1145 * for meta ops. */
1146 if (start_slot <= ctx->aux_vertex_buffer_index &&
1147 start_slot+count > ctx->aux_vertex_buffer_index) {
1148 if (buffers) {
1149 const struct pipe_vertex_buffer *vb =
1150 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1151
1152 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1153 vb->buffer);
1154 memcpy(&ctx->aux_vertex_buffer_current, vb,
1155 sizeof(struct pipe_vertex_buffer));
1156 }
1157 else {
1158 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1159 NULL);
1160 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1161 }
1162 }
1163
1164 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1165 }
1166
1167 static void
1168 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1169 {
1170 struct u_vbuf *vbuf = ctx->vbuf;
1171
1172 if (vbuf) {
1173 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1174 return;
1175 }
1176
1177 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1178 ctx->aux_vertex_buffer_current.buffer);
1179 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1180 sizeof(struct pipe_vertex_buffer));
1181 }
1182
1183 static void
1184 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1185 {
1186 struct u_vbuf *vbuf = ctx->vbuf;
1187
1188 if (vbuf) {
1189 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1190 return;
1191 }
1192
1193 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1194 &ctx->aux_vertex_buffer_saved);
1195 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1196 }
1197
1198 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1199 {
1200 return ctx->aux_vertex_buffer_index;
1201 }
1202
1203
1204
1205 enum pipe_error
1206 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1207 unsigned idx, const struct pipe_sampler_state *templ)
1208 {
1209 if (templ) {
1210 unsigned key_size = sizeof(struct pipe_sampler_state);
1211 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1212 struct cso_sampler *cso;
1213 struct cso_hash_iter iter =
1214 cso_find_state_template(ctx->cache,
1215 hash_key, CSO_SAMPLER,
1216 (void *) templ, key_size);
1217
1218 if (cso_hash_iter_is_null(iter)) {
1219 cso = MALLOC(sizeof(struct cso_sampler));
1220 if (!cso)
1221 return PIPE_ERROR_OUT_OF_MEMORY;
1222
1223 memcpy(&cso->state, templ, sizeof(*templ));
1224 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1225 cso->delete_state =
1226 (cso_state_callback) ctx->pipe->delete_sampler_state;
1227 cso->context = ctx->pipe;
1228 cso->hash_key = hash_key;
1229
1230 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1231 if (cso_hash_iter_is_null(iter)) {
1232 FREE(cso);
1233 return PIPE_ERROR_OUT_OF_MEMORY;
1234 }
1235 }
1236 else {
1237 cso = cso_hash_iter_data(iter);
1238 }
1239
1240 ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1241 ctx->samplers[shader_stage].samplers[idx] = cso->data;
1242 } else {
1243 ctx->samplers[shader_stage].cso_samplers[idx] = NULL;
1244 ctx->samplers[shader_stage].samplers[idx] = NULL;
1245 }
1246
1247 return PIPE_OK;
1248 }
1249
1250
1251 /**
1252 * Send staged sampler state to the driver.
1253 */
1254 void
1255 cso_single_sampler_done(struct cso_context *ctx,
1256 enum pipe_shader_type shader_stage)
1257 {
1258 struct sampler_info *info = &ctx->samplers[shader_stage];
1259 const unsigned old_nr_samplers = info->nr_samplers;
1260 unsigned i;
1261
1262 /* find highest non-null sampler */
1263 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1264 if (info->samplers[i - 1] != NULL)
1265 break;
1266 }
1267
1268 info->nr_samplers = i;
1269 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1270 MAX2(old_nr_samplers, info->nr_samplers),
1271 info->samplers);
1272 }
1273
1274
1275 /*
1276 * If the function encouters any errors it will return the
1277 * last one. Done to always try to set as many samplers
1278 * as possible.
1279 */
1280 enum pipe_error
1281 cso_set_samplers(struct cso_context *ctx,
1282 enum pipe_shader_type shader_stage,
1283 unsigned nr,
1284 const struct pipe_sampler_state **templates)
1285 {
1286 struct sampler_info *info = &ctx->samplers[shader_stage];
1287 unsigned i;
1288 enum pipe_error temp, error = PIPE_OK;
1289
1290 for (i = 0; i < nr; i++) {
1291 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1292 if (temp != PIPE_OK)
1293 error = temp;
1294 }
1295
1296 for ( ; i < info->nr_samplers; i++) {
1297 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1298 if (temp != PIPE_OK)
1299 error = temp;
1300 }
1301
1302 cso_single_sampler_done(ctx, shader_stage);
1303
1304 return error;
1305 }
1306
1307 static void
1308 cso_save_fragment_samplers(struct cso_context *ctx)
1309 {
1310 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1311 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1312
1313 saved->nr_samplers = info->nr_samplers;
1314 memcpy(saved->cso_samplers, info->cso_samplers, info->nr_samplers *
1315 sizeof(*info->cso_samplers));
1316 memcpy(saved->samplers, info->samplers, info->nr_samplers *
1317 sizeof(*info->samplers));
1318 }
1319
1320
1321 static void
1322 cso_restore_fragment_samplers(struct cso_context *ctx)
1323 {
1324 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1325 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1326 int delta = (int)info->nr_samplers - saved->nr_samplers;
1327
1328 memcpy(info->cso_samplers, saved->cso_samplers,
1329 saved->nr_samplers * sizeof(*info->cso_samplers));
1330 memcpy(info->samplers, saved->samplers,
1331 saved->nr_samplers * sizeof(*info->samplers));
1332
1333 if (delta > 0) {
1334 memset(&info->cso_samplers[saved->nr_samplers], 0,
1335 delta * sizeof(*info->cso_samplers));
1336 memset(&info->samplers[saved->nr_samplers], 0,
1337 delta * sizeof(*info->samplers));
1338 }
1339
1340 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1341 }
1342
1343
1344 void
1345 cso_set_sampler_views(struct cso_context *ctx,
1346 enum pipe_shader_type shader_stage,
1347 unsigned count,
1348 struct pipe_sampler_view **views)
1349 {
1350 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1351 unsigned i;
1352 boolean any_change = FALSE;
1353
1354 /* reference new views */
1355 for (i = 0; i < count; i++) {
1356 any_change |= ctx->fragment_views[i] != views[i];
1357 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1358 }
1359 /* unref extra old views, if any */
1360 for (; i < ctx->nr_fragment_views; i++) {
1361 any_change |= ctx->fragment_views[i] != NULL;
1362 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1363 }
1364
1365 /* bind the new sampler views */
1366 if (any_change) {
1367 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1368 MAX2(ctx->nr_fragment_views, count),
1369 ctx->fragment_views);
1370 }
1371
1372 ctx->nr_fragment_views = count;
1373 }
1374 else
1375 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1376 }
1377
1378
1379 static void
1380 cso_save_fragment_sampler_views(struct cso_context *ctx)
1381 {
1382 unsigned i;
1383
1384 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1385
1386 for (i = 0; i < ctx->nr_fragment_views; i++) {
1387 assert(!ctx->fragment_views_saved[i]);
1388 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1389 ctx->fragment_views[i]);
1390 }
1391 }
1392
1393
1394 static void
1395 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1396 {
1397 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1398 unsigned num;
1399
1400 for (i = 0; i < nr_saved; i++) {
1401 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1402 /* move the reference from one pointer to another */
1403 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1404 ctx->fragment_views_saved[i] = NULL;
1405 }
1406 for (; i < ctx->nr_fragment_views; i++) {
1407 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1408 }
1409
1410 num = MAX2(ctx->nr_fragment_views, nr_saved);
1411
1412 /* bind the old/saved sampler views */
1413 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1414 ctx->fragment_views);
1415
1416 ctx->nr_fragment_views = nr_saved;
1417 ctx->nr_fragment_views_saved = 0;
1418 }
1419
1420
1421 void
1422 cso_set_shader_images(struct cso_context *ctx,
1423 enum pipe_shader_type shader_stage,
1424 unsigned start, unsigned count,
1425 struct pipe_image_view *images)
1426 {
1427 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1428 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1429 }
1430
1431 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1432 }
1433
1434
1435 static void
1436 cso_save_fragment_image0(struct cso_context *ctx)
1437 {
1438 util_copy_image_view(&ctx->fragment_image0_saved,
1439 &ctx->fragment_image0_current);
1440 }
1441
1442
1443 static void
1444 cso_restore_fragment_image0(struct cso_context *ctx)
1445 {
1446 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1447 &ctx->fragment_image0_saved);
1448 }
1449
1450
1451 void
1452 cso_set_stream_outputs(struct cso_context *ctx,
1453 unsigned num_targets,
1454 struct pipe_stream_output_target **targets,
1455 const unsigned *offsets)
1456 {
1457 struct pipe_context *pipe = ctx->pipe;
1458 uint i;
1459
1460 if (!ctx->has_streamout) {
1461 assert(num_targets == 0);
1462 return;
1463 }
1464
1465 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1466 /* Nothing to do. */
1467 return;
1468 }
1469
1470 /* reference new targets */
1471 for (i = 0; i < num_targets; i++) {
1472 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1473 }
1474 /* unref extra old targets, if any */
1475 for (; i < ctx->nr_so_targets; i++) {
1476 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1477 }
1478
1479 pipe->set_stream_output_targets(pipe, num_targets, targets,
1480 offsets);
1481 ctx->nr_so_targets = num_targets;
1482 }
1483
1484 static void
1485 cso_save_stream_outputs(struct cso_context *ctx)
1486 {
1487 uint i;
1488
1489 if (!ctx->has_streamout) {
1490 return;
1491 }
1492
1493 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1494
1495 for (i = 0; i < ctx->nr_so_targets; i++) {
1496 assert(!ctx->so_targets_saved[i]);
1497 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1498 }
1499 }
1500
1501 static void
1502 cso_restore_stream_outputs(struct cso_context *ctx)
1503 {
1504 struct pipe_context *pipe = ctx->pipe;
1505 uint i;
1506 unsigned offset[PIPE_MAX_SO_BUFFERS];
1507
1508 if (!ctx->has_streamout) {
1509 return;
1510 }
1511
1512 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1513 /* Nothing to do. */
1514 return;
1515 }
1516
1517 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1518 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1519 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1520 /* move the reference from one pointer to another */
1521 ctx->so_targets[i] = ctx->so_targets_saved[i];
1522 ctx->so_targets_saved[i] = NULL;
1523 /* -1 means append */
1524 offset[i] = (unsigned)-1;
1525 }
1526 for (; i < ctx->nr_so_targets; i++) {
1527 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1528 }
1529
1530 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1531 ctx->so_targets, offset);
1532
1533 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1534 ctx->nr_so_targets_saved = 0;
1535 }
1536
1537 /* constant buffers */
1538
1539 void
1540 cso_set_constant_buffer(struct cso_context *cso,
1541 enum pipe_shader_type shader_stage,
1542 unsigned index, struct pipe_constant_buffer *cb)
1543 {
1544 struct pipe_context *pipe = cso->pipe;
1545
1546 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1547
1548 if (index == 0) {
1549 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1550 }
1551 }
1552
1553 void
1554 cso_set_constant_buffer_resource(struct cso_context *cso,
1555 enum pipe_shader_type shader_stage,
1556 unsigned index,
1557 struct pipe_resource *buffer)
1558 {
1559 if (buffer) {
1560 struct pipe_constant_buffer cb;
1561 cb.buffer = buffer;
1562 cb.buffer_offset = 0;
1563 cb.buffer_size = buffer->width0;
1564 cb.user_buffer = NULL;
1565 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1566 } else {
1567 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1568 }
1569 }
1570
1571 void
1572 cso_save_constant_buffer_slot0(struct cso_context *cso,
1573 enum pipe_shader_type shader_stage)
1574 {
1575 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1576 &cso->aux_constbuf_current[shader_stage]);
1577 }
1578
1579 void
1580 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1581 enum pipe_shader_type shader_stage)
1582 {
1583 cso_set_constant_buffer(cso, shader_stage, 0,
1584 &cso->aux_constbuf_saved[shader_stage]);
1585 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1586 NULL);
1587 }
1588
1589
1590 /**
1591 * Save all the CSO state items specified by the state_mask bitmask
1592 * of CSO_BIT_x flags.
1593 */
1594 void
1595 cso_save_state(struct cso_context *cso, unsigned state_mask)
1596 {
1597 assert(cso->saved_state == 0);
1598
1599 cso->saved_state = state_mask;
1600
1601 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1602 cso_save_aux_vertex_buffer_slot(cso);
1603 if (state_mask & CSO_BIT_BLEND)
1604 cso_save_blend(cso);
1605 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1606 cso_save_depth_stencil_alpha(cso);
1607 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1608 cso_save_fragment_samplers(cso);
1609 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1610 cso_save_fragment_sampler_views(cso);
1611 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1612 cso_save_fragment_shader(cso);
1613 if (state_mask & CSO_BIT_FRAMEBUFFER)
1614 cso_save_framebuffer(cso);
1615 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1616 cso_save_geometry_shader(cso);
1617 if (state_mask & CSO_BIT_MIN_SAMPLES)
1618 cso_save_min_samples(cso);
1619 if (state_mask & CSO_BIT_RASTERIZER)
1620 cso_save_rasterizer(cso);
1621 if (state_mask & CSO_BIT_RENDER_CONDITION)
1622 cso_save_render_condition(cso);
1623 if (state_mask & CSO_BIT_SAMPLE_MASK)
1624 cso_save_sample_mask(cso);
1625 if (state_mask & CSO_BIT_STENCIL_REF)
1626 cso_save_stencil_ref(cso);
1627 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1628 cso_save_stream_outputs(cso);
1629 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1630 cso_save_tessctrl_shader(cso);
1631 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1632 cso_save_tesseval_shader(cso);
1633 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1634 cso_save_vertex_elements(cso);
1635 if (state_mask & CSO_BIT_VERTEX_SHADER)
1636 cso_save_vertex_shader(cso);
1637 if (state_mask & CSO_BIT_VIEWPORT)
1638 cso_save_viewport(cso);
1639 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1640 cso->pipe->set_active_query_state(cso->pipe, false);
1641 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1642 cso_save_fragment_image0(cso);
1643 }
1644
1645
1646 /**
1647 * Restore the state which was saved by cso_save_state().
1648 */
1649 void
1650 cso_restore_state(struct cso_context *cso)
1651 {
1652 unsigned state_mask = cso->saved_state;
1653
1654 assert(state_mask);
1655
1656 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1657 cso_restore_aux_vertex_buffer_slot(cso);
1658 if (state_mask & CSO_BIT_BLEND)
1659 cso_restore_blend(cso);
1660 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1661 cso_restore_depth_stencil_alpha(cso);
1662 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1663 cso_restore_fragment_samplers(cso);
1664 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1665 cso_restore_fragment_sampler_views(cso);
1666 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1667 cso_restore_fragment_shader(cso);
1668 if (state_mask & CSO_BIT_FRAMEBUFFER)
1669 cso_restore_framebuffer(cso);
1670 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1671 cso_restore_geometry_shader(cso);
1672 if (state_mask & CSO_BIT_MIN_SAMPLES)
1673 cso_restore_min_samples(cso);
1674 if (state_mask & CSO_BIT_RASTERIZER)
1675 cso_restore_rasterizer(cso);
1676 if (state_mask & CSO_BIT_RENDER_CONDITION)
1677 cso_restore_render_condition(cso);
1678 if (state_mask & CSO_BIT_SAMPLE_MASK)
1679 cso_restore_sample_mask(cso);
1680 if (state_mask & CSO_BIT_STENCIL_REF)
1681 cso_restore_stencil_ref(cso);
1682 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1683 cso_restore_stream_outputs(cso);
1684 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1685 cso_restore_tessctrl_shader(cso);
1686 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1687 cso_restore_tesseval_shader(cso);
1688 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1689 cso_restore_vertex_elements(cso);
1690 if (state_mask & CSO_BIT_VERTEX_SHADER)
1691 cso_restore_vertex_shader(cso);
1692 if (state_mask & CSO_BIT_VIEWPORT)
1693 cso_restore_viewport(cso);
1694 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1695 cso->pipe->set_active_query_state(cso->pipe, true);
1696 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1697 cso_restore_fragment_image0(cso);
1698
1699 cso->saved_state = 0;
1700 }
1701
1702
1703
1704 /* drawing */
1705
1706 void
1707 cso_set_index_buffer(struct cso_context *cso,
1708 const struct pipe_index_buffer *ib)
1709 {
1710 struct u_vbuf *vbuf = cso->vbuf;
1711
1712 if (vbuf) {
1713 u_vbuf_set_index_buffer(vbuf, ib);
1714 } else {
1715 struct pipe_context *pipe = cso->pipe;
1716 pipe->set_index_buffer(pipe, ib);
1717 }
1718 }
1719
1720 void
1721 cso_draw_vbo(struct cso_context *cso,
1722 const struct pipe_draw_info *info)
1723 {
1724 struct u_vbuf *vbuf = cso->vbuf;
1725
1726 if (vbuf) {
1727 u_vbuf_draw_vbo(vbuf, info);
1728 } else {
1729 struct pipe_context *pipe = cso->pipe;
1730 pipe->draw_vbo(pipe, info);
1731 }
1732 }
1733
1734 void
1735 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1736 {
1737 struct pipe_draw_info info;
1738
1739 util_draw_init_info(&info);
1740
1741 info.mode = mode;
1742 info.start = start;
1743 info.count = count;
1744 info.min_index = start;
1745 info.max_index = start + count - 1;
1746
1747 cso_draw_vbo(cso, &info);
1748 }
1749
1750 void
1751 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1752 uint start, uint count,
1753 uint start_instance, uint instance_count)
1754 {
1755 struct pipe_draw_info info;
1756
1757 util_draw_init_info(&info);
1758
1759 info.mode = mode;
1760 info.start = start;
1761 info.count = count;
1762 info.min_index = start;
1763 info.max_index = start + count - 1;
1764 info.start_instance = start_instance;
1765 info.instance_count = instance_count;
1766
1767 cso_draw_vbo(cso, &info);
1768 }