1 /**************************************************************************
3 * Copyright 2007 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
54 * Per-shader sampler information.
58 struct cso_sampler
*cso_samplers
[PIPE_MAX_SAMPLERS
];
59 void *samplers
[PIPE_MAX_SAMPLERS
];
65 struct pipe_context
*pipe
;
66 struct cso_cache
*cache
;
69 boolean has_geometry_shader
;
70 boolean has_tessellation
;
71 boolean has_compute_shader
;
72 boolean has_streamout
;
74 unsigned saved_state
; /**< bitmask of CSO_BIT_x flags */
76 struct pipe_sampler_view
*fragment_views
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
77 unsigned nr_fragment_views
;
79 struct pipe_sampler_view
*fragment_views_saved
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
80 unsigned nr_fragment_views_saved
;
82 struct sampler_info fragment_samplers_saved
;
83 struct sampler_info samplers
[PIPE_SHADER_TYPES
];
85 /* Temporary number until cso_single_sampler_done is called.
86 * It tracks the highest sampler seen in cso_single_sampler.
90 struct pipe_vertex_buffer vertex_buffer0_current
;
91 struct pipe_vertex_buffer vertex_buffer0_saved
;
93 struct pipe_constant_buffer aux_constbuf_current
[PIPE_SHADER_TYPES
];
94 struct pipe_constant_buffer aux_constbuf_saved
[PIPE_SHADER_TYPES
];
96 struct pipe_image_view fragment_image0_current
;
97 struct pipe_image_view fragment_image0_saved
;
99 unsigned nr_so_targets
;
100 struct pipe_stream_output_target
*so_targets
[PIPE_MAX_SO_BUFFERS
];
102 unsigned nr_so_targets_saved
;
103 struct pipe_stream_output_target
*so_targets_saved
[PIPE_MAX_SO_BUFFERS
];
105 /** Current and saved state.
106 * The saved state is used as a 1-deep stack.
108 void *blend
, *blend_saved
;
109 void *depth_stencil
, *depth_stencil_saved
;
110 void *rasterizer
, *rasterizer_saved
;
111 void *fragment_shader
, *fragment_shader_saved
;
112 void *vertex_shader
, *vertex_shader_saved
;
113 void *geometry_shader
, *geometry_shader_saved
;
114 void *tessctrl_shader
, *tessctrl_shader_saved
;
115 void *tesseval_shader
, *tesseval_shader_saved
;
116 void *compute_shader
;
117 void *velements
, *velements_saved
;
118 struct pipe_query
*render_condition
, *render_condition_saved
;
119 uint render_condition_mode
, render_condition_mode_saved
;
120 boolean render_condition_cond
, render_condition_cond_saved
;
122 struct pipe_framebuffer_state fb
, fb_saved
;
123 struct pipe_viewport_state vp
, vp_saved
;
124 struct pipe_blend_color blend_color
;
125 unsigned sample_mask
, sample_mask_saved
;
126 unsigned min_samples
, min_samples_saved
;
127 struct pipe_stencil_ref stencil_ref
, stencil_ref_saved
;
130 struct pipe_context
*cso_get_pipe_context(struct cso_context
*cso
)
135 static boolean
delete_blend_state(struct cso_context
*ctx
, void *state
)
137 struct cso_blend
*cso
= (struct cso_blend
*)state
;
139 if (ctx
->blend
== cso
->data
)
142 if (cso
->delete_state
)
143 cso
->delete_state(cso
->context
, cso
->data
);
148 static boolean
delete_depth_stencil_state(struct cso_context
*ctx
, void *state
)
150 struct cso_depth_stencil_alpha
*cso
=
151 (struct cso_depth_stencil_alpha
*)state
;
153 if (ctx
->depth_stencil
== cso
->data
)
156 if (cso
->delete_state
)
157 cso
->delete_state(cso
->context
, cso
->data
);
163 static boolean
delete_sampler_state(UNUSED
struct cso_context
*ctx
, void *state
)
165 struct cso_sampler
*cso
= (struct cso_sampler
*)state
;
166 if (cso
->delete_state
)
167 cso
->delete_state(cso
->context
, cso
->data
);
172 static boolean
delete_rasterizer_state(struct cso_context
*ctx
, void *state
)
174 struct cso_rasterizer
*cso
= (struct cso_rasterizer
*)state
;
176 if (ctx
->rasterizer
== cso
->data
)
178 if (cso
->delete_state
)
179 cso
->delete_state(cso
->context
, cso
->data
);
184 static boolean
delete_vertex_elements(struct cso_context
*ctx
,
187 struct cso_velements
*cso
= (struct cso_velements
*)state
;
189 if (ctx
->velements
== cso
->data
)
192 if (cso
->delete_state
)
193 cso
->delete_state(cso
->context
, cso
->data
);
199 static inline boolean
delete_cso(struct cso_context
*ctx
,
200 void *state
, enum cso_cache_type type
)
204 return delete_blend_state(ctx
, state
);
206 return delete_sampler_state(ctx
, state
);
207 case CSO_DEPTH_STENCIL_ALPHA
:
208 return delete_depth_stencil_state(ctx
, state
);
210 return delete_rasterizer_state(ctx
, state
);
212 return delete_vertex_elements(ctx
, state
);
221 sanitize_hash(struct cso_hash
*hash
, enum cso_cache_type type
,
222 int max_size
, void *user_data
)
224 struct cso_context
*ctx
= (struct cso_context
*)user_data
;
225 /* if we're approach the maximum size, remove fourth of the entries
226 * otherwise every subsequent call will go through the same */
227 int hash_size
= cso_hash_size(hash
);
228 int max_entries
= (max_size
> hash_size
) ? max_size
: hash_size
;
229 int to_remove
= (max_size
< max_entries
) * max_entries
/4;
230 struct cso_hash_iter iter
;
231 struct cso_sampler
**samplers_to_restore
= NULL
;
232 unsigned to_restore
= 0;
234 if (hash_size
> max_size
)
235 to_remove
+= hash_size
- max_size
;
240 if (type
== CSO_SAMPLER
) {
243 samplers_to_restore
= MALLOC(PIPE_SHADER_TYPES
* PIPE_MAX_SAMPLERS
*
244 sizeof(*samplers_to_restore
));
246 /* Temporarily remove currently bound sampler states from the hash
247 * table, to prevent them from being deleted
249 for (i
= 0; i
< PIPE_SHADER_TYPES
; i
++) {
250 for (j
= 0; j
< PIPE_MAX_SAMPLERS
; j
++) {
251 struct cso_sampler
*sampler
= ctx
->samplers
[i
].cso_samplers
[j
];
253 if (sampler
&& cso_hash_take(hash
, sampler
->hash_key
))
254 samplers_to_restore
[to_restore
++] = sampler
;
259 iter
= cso_hash_first_node(hash
);
261 /*remove elements until we're good */
262 /*fixme: currently we pick the nodes to remove at random*/
263 void *cso
= cso_hash_iter_data(iter
);
268 if (delete_cso(ctx
, cso
, type
)) {
269 iter
= cso_hash_erase(hash
, iter
);
272 iter
= cso_hash_iter_next(iter
);
275 if (type
== CSO_SAMPLER
) {
276 /* Put currently bound sampler states back into the hash table */
277 while (to_restore
--) {
278 struct cso_sampler
*sampler
= samplers_to_restore
[to_restore
];
280 cso_hash_insert(hash
, sampler
->hash_key
, sampler
);
283 FREE(samplers_to_restore
);
287 static void cso_init_vbuf(struct cso_context
*cso
, unsigned flags
)
289 struct u_vbuf_caps caps
;
290 bool uses_user_vertex_buffers
= !(flags
& CSO_NO_USER_VERTEX_BUFFERS
);
292 u_vbuf_get_caps(cso
->pipe
->screen
, &caps
);
294 /* Enable u_vbuf if needed. */
295 if (caps
.fallback_always
||
296 (uses_user_vertex_buffers
&&
297 caps
.fallback_only_for_user_vbuffers
)) {
298 cso
->vbuf
= u_vbuf_create(cso
->pipe
, &caps
);
303 cso_create_context(struct pipe_context
*pipe
, unsigned flags
)
305 struct cso_context
*ctx
= CALLOC_STRUCT(cso_context
);
309 ctx
->cache
= cso_cache_create();
310 if (ctx
->cache
== NULL
)
312 cso_cache_set_sanitize_callback(ctx
->cache
,
317 ctx
->sample_mask
= ~0;
319 cso_init_vbuf(ctx
, flags
);
321 /* Enable for testing: */
322 if (0) cso_set_maximum_cache_size( ctx
->cache
, 4 );
324 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_GEOMETRY
,
325 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
326 ctx
->has_geometry_shader
= TRUE
;
328 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_TESS_CTRL
,
329 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
330 ctx
->has_tessellation
= TRUE
;
332 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_COMPUTE
,
333 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
335 pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_COMPUTE
,
336 PIPE_SHADER_CAP_SUPPORTED_IRS
);
337 if (supported_irs
& ((1 << PIPE_SHADER_IR_TGSI
) |
338 (1 << PIPE_SHADER_IR_NIR
))) {
339 ctx
->has_compute_shader
= TRUE
;
342 if (pipe
->screen
->get_param(pipe
->screen
,
343 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS
) != 0) {
344 ctx
->has_streamout
= TRUE
;
347 ctx
->max_sampler_seen
= -1;
351 cso_destroy_context( ctx
);
356 * Free the CSO context.
358 void cso_destroy_context( struct cso_context
*ctx
)
363 ctx
->pipe
->bind_blend_state( ctx
->pipe
, NULL
);
364 ctx
->pipe
->bind_rasterizer_state( ctx
->pipe
, NULL
);
367 static struct pipe_sampler_view
*views
[PIPE_MAX_SHADER_SAMPLER_VIEWS
] = { NULL
};
368 static void *zeros
[PIPE_MAX_SAMPLERS
] = { NULL
};
369 struct pipe_screen
*scr
= ctx
->pipe
->screen
;
370 enum pipe_shader_type sh
;
371 for (sh
= 0; sh
< PIPE_SHADER_TYPES
; sh
++) {
372 int maxsam
= scr
->get_shader_param(scr
, sh
,
373 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS
);
374 int maxview
= scr
->get_shader_param(scr
, sh
,
375 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS
);
376 assert(maxsam
<= PIPE_MAX_SAMPLERS
);
377 assert(maxview
<= PIPE_MAX_SHADER_SAMPLER_VIEWS
);
379 ctx
->pipe
->bind_sampler_states(ctx
->pipe
, sh
, 0, maxsam
, zeros
);
382 ctx
->pipe
->set_sampler_views(ctx
->pipe
, sh
, 0, maxview
, views
);
387 ctx
->pipe
->bind_depth_stencil_alpha_state( ctx
->pipe
, NULL
);
388 ctx
->pipe
->bind_fs_state( ctx
->pipe
, NULL
);
389 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_FRAGMENT
, 0, NULL
);
390 ctx
->pipe
->bind_vs_state( ctx
->pipe
, NULL
);
391 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_VERTEX
, 0, NULL
);
392 if (ctx
->has_geometry_shader
) {
393 ctx
->pipe
->bind_gs_state(ctx
->pipe
, NULL
);
394 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_GEOMETRY
, 0, NULL
);
396 if (ctx
->has_tessellation
) {
397 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, NULL
);
398 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_TESS_CTRL
, 0, NULL
);
399 ctx
->pipe
->bind_tes_state(ctx
->pipe
, NULL
);
400 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_TESS_EVAL
, 0, NULL
);
402 if (ctx
->has_compute_shader
) {
403 ctx
->pipe
->bind_compute_state(ctx
->pipe
, NULL
);
404 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_COMPUTE
, 0, NULL
);
406 ctx
->pipe
->bind_vertex_elements_state( ctx
->pipe
, NULL
);
408 if (ctx
->has_streamout
)
409 ctx
->pipe
->set_stream_output_targets(ctx
->pipe
, 0, NULL
, NULL
);
412 for (i
= 0; i
< ctx
->nr_fragment_views
; i
++) {
413 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
415 for (i
= 0; i
< ctx
->nr_fragment_views_saved
; i
++) {
416 pipe_sampler_view_reference(&ctx
->fragment_views_saved
[i
], NULL
);
419 util_unreference_framebuffer_state(&ctx
->fb
);
420 util_unreference_framebuffer_state(&ctx
->fb_saved
);
422 pipe_vertex_buffer_unreference(&ctx
->vertex_buffer0_current
);
423 pipe_vertex_buffer_unreference(&ctx
->vertex_buffer0_saved
);
425 for (i
= 0; i
< PIPE_SHADER_TYPES
; i
++) {
426 pipe_resource_reference(&ctx
->aux_constbuf_current
[i
].buffer
, NULL
);
427 pipe_resource_reference(&ctx
->aux_constbuf_saved
[i
].buffer
, NULL
);
430 pipe_resource_reference(&ctx
->fragment_image0_current
.resource
, NULL
);
431 pipe_resource_reference(&ctx
->fragment_image0_saved
.resource
, NULL
);
433 for (i
= 0; i
< PIPE_MAX_SO_BUFFERS
; i
++) {
434 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
435 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], NULL
);
439 cso_cache_delete( ctx
->cache
);
444 u_vbuf_destroy(ctx
->vbuf
);
449 /* Those function will either find the state of the given template
450 * in the cache or they will create a new state from the given
451 * template, insert it in the cache and return it.
455 * If the driver returns 0 from the create method then they will assign
456 * the data member of the cso to be the template itself.
459 enum pipe_error
cso_set_blend(struct cso_context
*ctx
,
460 const struct pipe_blend_state
*templ
)
462 unsigned key_size
, hash_key
;
463 struct cso_hash_iter iter
;
466 key_size
= templ
->independent_blend_enable
?
467 sizeof(struct pipe_blend_state
) :
468 (char *)&(templ
->rt
[1]) - (char *)templ
;
469 hash_key
= cso_construct_key((void*)templ
, key_size
);
470 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_BLEND
,
471 (void*)templ
, key_size
);
473 if (cso_hash_iter_is_null(iter
)) {
474 struct cso_blend
*cso
= MALLOC(sizeof(struct cso_blend
));
476 return PIPE_ERROR_OUT_OF_MEMORY
;
478 memset(&cso
->state
, 0, sizeof cso
->state
);
479 memcpy(&cso
->state
, templ
, key_size
);
480 cso
->data
= ctx
->pipe
->create_blend_state(ctx
->pipe
, &cso
->state
);
481 cso
->delete_state
= (cso_state_callback
)ctx
->pipe
->delete_blend_state
;
482 cso
->context
= ctx
->pipe
;
484 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_BLEND
, cso
);
485 if (cso_hash_iter_is_null(iter
)) {
487 return PIPE_ERROR_OUT_OF_MEMORY
;
493 handle
= ((struct cso_blend
*)cso_hash_iter_data(iter
))->data
;
496 if (ctx
->blend
!= handle
) {
498 ctx
->pipe
->bind_blend_state(ctx
->pipe
, handle
);
504 cso_save_blend(struct cso_context
*ctx
)
506 assert(!ctx
->blend_saved
);
507 ctx
->blend_saved
= ctx
->blend
;
511 cso_restore_blend(struct cso_context
*ctx
)
513 if (ctx
->blend
!= ctx
->blend_saved
) {
514 ctx
->blend
= ctx
->blend_saved
;
515 ctx
->pipe
->bind_blend_state(ctx
->pipe
, ctx
->blend_saved
);
517 ctx
->blend_saved
= NULL
;
523 cso_set_depth_stencil_alpha(struct cso_context
*ctx
,
524 const struct pipe_depth_stencil_alpha_state
*templ
)
526 unsigned key_size
= sizeof(struct pipe_depth_stencil_alpha_state
);
527 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
528 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
530 CSO_DEPTH_STENCIL_ALPHA
,
531 (void*)templ
, key_size
);
534 if (cso_hash_iter_is_null(iter
)) {
535 struct cso_depth_stencil_alpha
*cso
=
536 MALLOC(sizeof(struct cso_depth_stencil_alpha
));
538 return PIPE_ERROR_OUT_OF_MEMORY
;
540 memcpy(&cso
->state
, templ
, sizeof(*templ
));
541 cso
->data
= ctx
->pipe
->create_depth_stencil_alpha_state(ctx
->pipe
,
544 (cso_state_callback
)ctx
->pipe
->delete_depth_stencil_alpha_state
;
545 cso
->context
= ctx
->pipe
;
547 iter
= cso_insert_state(ctx
->cache
, hash_key
,
548 CSO_DEPTH_STENCIL_ALPHA
, cso
);
549 if (cso_hash_iter_is_null(iter
)) {
551 return PIPE_ERROR_OUT_OF_MEMORY
;
557 handle
= ((struct cso_depth_stencil_alpha
*)
558 cso_hash_iter_data(iter
))->data
;
561 if (ctx
->depth_stencil
!= handle
) {
562 ctx
->depth_stencil
= handle
;
563 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
, handle
);
569 cso_save_depth_stencil_alpha(struct cso_context
*ctx
)
571 assert(!ctx
->depth_stencil_saved
);
572 ctx
->depth_stencil_saved
= ctx
->depth_stencil
;
576 cso_restore_depth_stencil_alpha(struct cso_context
*ctx
)
578 if (ctx
->depth_stencil
!= ctx
->depth_stencil_saved
) {
579 ctx
->depth_stencil
= ctx
->depth_stencil_saved
;
580 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
,
581 ctx
->depth_stencil_saved
);
583 ctx
->depth_stencil_saved
= NULL
;
588 enum pipe_error
cso_set_rasterizer(struct cso_context
*ctx
,
589 const struct pipe_rasterizer_state
*templ
)
591 unsigned key_size
= sizeof(struct pipe_rasterizer_state
);
592 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
593 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
596 (void*)templ
, key_size
);
599 /* We can't have both point_quad_rasterization (sprites) and point_smooth
600 * (round AA points) enabled at the same time.
602 assert(!(templ
->point_quad_rasterization
&& templ
->point_smooth
));
604 if (cso_hash_iter_is_null(iter
)) {
605 struct cso_rasterizer
*cso
= MALLOC(sizeof(struct cso_rasterizer
));
607 return PIPE_ERROR_OUT_OF_MEMORY
;
609 memcpy(&cso
->state
, templ
, sizeof(*templ
));
610 cso
->data
= ctx
->pipe
->create_rasterizer_state(ctx
->pipe
, &cso
->state
);
612 (cso_state_callback
)ctx
->pipe
->delete_rasterizer_state
;
613 cso
->context
= ctx
->pipe
;
615 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_RASTERIZER
, cso
);
616 if (cso_hash_iter_is_null(iter
)) {
618 return PIPE_ERROR_OUT_OF_MEMORY
;
624 handle
= ((struct cso_rasterizer
*)cso_hash_iter_data(iter
))->data
;
627 if (ctx
->rasterizer
!= handle
) {
628 ctx
->rasterizer
= handle
;
629 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, handle
);
635 cso_save_rasterizer(struct cso_context
*ctx
)
637 assert(!ctx
->rasterizer_saved
);
638 ctx
->rasterizer_saved
= ctx
->rasterizer
;
642 cso_restore_rasterizer(struct cso_context
*ctx
)
644 if (ctx
->rasterizer
!= ctx
->rasterizer_saved
) {
645 ctx
->rasterizer
= ctx
->rasterizer_saved
;
646 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, ctx
->rasterizer_saved
);
648 ctx
->rasterizer_saved
= NULL
;
652 void cso_set_fragment_shader_handle(struct cso_context
*ctx
, void *handle
)
654 if (ctx
->fragment_shader
!= handle
) {
655 ctx
->fragment_shader
= handle
;
656 ctx
->pipe
->bind_fs_state(ctx
->pipe
, handle
);
660 void cso_delete_fragment_shader(struct cso_context
*ctx
, void *handle
)
662 if (handle
== ctx
->fragment_shader
) {
663 /* unbind before deleting */
664 ctx
->pipe
->bind_fs_state(ctx
->pipe
, NULL
);
665 ctx
->fragment_shader
= NULL
;
667 ctx
->pipe
->delete_fs_state(ctx
->pipe
, handle
);
671 cso_save_fragment_shader(struct cso_context
*ctx
)
673 assert(!ctx
->fragment_shader_saved
);
674 ctx
->fragment_shader_saved
= ctx
->fragment_shader
;
678 cso_restore_fragment_shader(struct cso_context
*ctx
)
680 if (ctx
->fragment_shader_saved
!= ctx
->fragment_shader
) {
681 ctx
->pipe
->bind_fs_state(ctx
->pipe
, ctx
->fragment_shader_saved
);
682 ctx
->fragment_shader
= ctx
->fragment_shader_saved
;
684 ctx
->fragment_shader_saved
= NULL
;
688 void cso_set_vertex_shader_handle(struct cso_context
*ctx
, void *handle
)
690 if (ctx
->vertex_shader
!= handle
) {
691 ctx
->vertex_shader
= handle
;
692 ctx
->pipe
->bind_vs_state(ctx
->pipe
, handle
);
696 void cso_delete_vertex_shader(struct cso_context
*ctx
, void *handle
)
698 if (handle
== ctx
->vertex_shader
) {
699 /* unbind before deleting */
700 ctx
->pipe
->bind_vs_state(ctx
->pipe
, NULL
);
701 ctx
->vertex_shader
= NULL
;
703 ctx
->pipe
->delete_vs_state(ctx
->pipe
, handle
);
707 cso_save_vertex_shader(struct cso_context
*ctx
)
709 assert(!ctx
->vertex_shader_saved
);
710 ctx
->vertex_shader_saved
= ctx
->vertex_shader
;
714 cso_restore_vertex_shader(struct cso_context
*ctx
)
716 if (ctx
->vertex_shader_saved
!= ctx
->vertex_shader
) {
717 ctx
->pipe
->bind_vs_state(ctx
->pipe
, ctx
->vertex_shader_saved
);
718 ctx
->vertex_shader
= ctx
->vertex_shader_saved
;
720 ctx
->vertex_shader_saved
= NULL
;
724 void cso_set_framebuffer(struct cso_context
*ctx
,
725 const struct pipe_framebuffer_state
*fb
)
727 if (memcmp(&ctx
->fb
, fb
, sizeof(*fb
)) != 0) {
728 util_copy_framebuffer_state(&ctx
->fb
, fb
);
729 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, fb
);
734 cso_save_framebuffer(struct cso_context
*ctx
)
736 util_copy_framebuffer_state(&ctx
->fb_saved
, &ctx
->fb
);
740 cso_restore_framebuffer(struct cso_context
*ctx
)
742 if (memcmp(&ctx
->fb
, &ctx
->fb_saved
, sizeof(ctx
->fb
))) {
743 util_copy_framebuffer_state(&ctx
->fb
, &ctx
->fb_saved
);
744 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, &ctx
->fb
);
745 util_unreference_framebuffer_state(&ctx
->fb_saved
);
750 void cso_set_viewport(struct cso_context
*ctx
,
751 const struct pipe_viewport_state
*vp
)
753 if (memcmp(&ctx
->vp
, vp
, sizeof(*vp
))) {
755 ctx
->pipe
->set_viewport_states(ctx
->pipe
, 0, 1, vp
);
760 * Setup viewport state for given width and height (position is always (0,0)).
761 * Invert the Y axis if 'invert' is true.
764 cso_set_viewport_dims(struct cso_context
*ctx
,
765 float width
, float height
, boolean invert
)
767 struct pipe_viewport_state vp
;
768 vp
.scale
[0] = width
* 0.5f
;
769 vp
.scale
[1] = height
* (invert
? -0.5f
: 0.5f
);
771 vp
.translate
[0] = 0.5f
* width
;
772 vp
.translate
[1] = 0.5f
* height
;
773 vp
.translate
[2] = 0.5f
;
774 cso_set_viewport(ctx
, &vp
);
778 cso_save_viewport(struct cso_context
*ctx
)
780 ctx
->vp_saved
= ctx
->vp
;
785 cso_restore_viewport(struct cso_context
*ctx
)
787 if (memcmp(&ctx
->vp
, &ctx
->vp_saved
, sizeof(ctx
->vp
))) {
788 ctx
->vp
= ctx
->vp_saved
;
789 ctx
->pipe
->set_viewport_states(ctx
->pipe
, 0, 1, &ctx
->vp
);
794 void cso_set_blend_color(struct cso_context
*ctx
,
795 const struct pipe_blend_color
*bc
)
797 if (memcmp(&ctx
->blend_color
, bc
, sizeof(ctx
->blend_color
))) {
798 ctx
->blend_color
= *bc
;
799 ctx
->pipe
->set_blend_color(ctx
->pipe
, bc
);
803 void cso_set_sample_mask(struct cso_context
*ctx
, unsigned sample_mask
)
805 if (ctx
->sample_mask
!= sample_mask
) {
806 ctx
->sample_mask
= sample_mask
;
807 ctx
->pipe
->set_sample_mask(ctx
->pipe
, sample_mask
);
812 cso_save_sample_mask(struct cso_context
*ctx
)
814 ctx
->sample_mask_saved
= ctx
->sample_mask
;
818 cso_restore_sample_mask(struct cso_context
*ctx
)
820 cso_set_sample_mask(ctx
, ctx
->sample_mask_saved
);
823 void cso_set_min_samples(struct cso_context
*ctx
, unsigned min_samples
)
825 if (ctx
->min_samples
!= min_samples
&& ctx
->pipe
->set_min_samples
) {
826 ctx
->min_samples
= min_samples
;
827 ctx
->pipe
->set_min_samples(ctx
->pipe
, min_samples
);
832 cso_save_min_samples(struct cso_context
*ctx
)
834 ctx
->min_samples_saved
= ctx
->min_samples
;
838 cso_restore_min_samples(struct cso_context
*ctx
)
840 cso_set_min_samples(ctx
, ctx
->min_samples_saved
);
843 void cso_set_stencil_ref(struct cso_context
*ctx
,
844 const struct pipe_stencil_ref
*sr
)
846 if (memcmp(&ctx
->stencil_ref
, sr
, sizeof(ctx
->stencil_ref
))) {
847 ctx
->stencil_ref
= *sr
;
848 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, sr
);
853 cso_save_stencil_ref(struct cso_context
*ctx
)
855 ctx
->stencil_ref_saved
= ctx
->stencil_ref
;
860 cso_restore_stencil_ref(struct cso_context
*ctx
)
862 if (memcmp(&ctx
->stencil_ref
, &ctx
->stencil_ref_saved
,
863 sizeof(ctx
->stencil_ref
))) {
864 ctx
->stencil_ref
= ctx
->stencil_ref_saved
;
865 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, &ctx
->stencil_ref
);
869 void cso_set_render_condition(struct cso_context
*ctx
,
870 struct pipe_query
*query
,
872 enum pipe_render_cond_flag mode
)
874 struct pipe_context
*pipe
= ctx
->pipe
;
876 if (ctx
->render_condition
!= query
||
877 ctx
->render_condition_mode
!= mode
||
878 ctx
->render_condition_cond
!= condition
) {
879 pipe
->render_condition(pipe
, query
, condition
, mode
);
880 ctx
->render_condition
= query
;
881 ctx
->render_condition_cond
= condition
;
882 ctx
->render_condition_mode
= mode
;
887 cso_save_render_condition(struct cso_context
*ctx
)
889 ctx
->render_condition_saved
= ctx
->render_condition
;
890 ctx
->render_condition_cond_saved
= ctx
->render_condition_cond
;
891 ctx
->render_condition_mode_saved
= ctx
->render_condition_mode
;
895 cso_restore_render_condition(struct cso_context
*ctx
)
897 cso_set_render_condition(ctx
, ctx
->render_condition_saved
,
898 ctx
->render_condition_cond_saved
,
899 ctx
->render_condition_mode_saved
);
902 void cso_set_geometry_shader_handle(struct cso_context
*ctx
, void *handle
)
904 assert(ctx
->has_geometry_shader
|| !handle
);
906 if (ctx
->has_geometry_shader
&& ctx
->geometry_shader
!= handle
) {
907 ctx
->geometry_shader
= handle
;
908 ctx
->pipe
->bind_gs_state(ctx
->pipe
, handle
);
912 void cso_delete_geometry_shader(struct cso_context
*ctx
, void *handle
)
914 if (handle
== ctx
->geometry_shader
) {
915 /* unbind before deleting */
916 ctx
->pipe
->bind_gs_state(ctx
->pipe
, NULL
);
917 ctx
->geometry_shader
= NULL
;
919 ctx
->pipe
->delete_gs_state(ctx
->pipe
, handle
);
923 cso_save_geometry_shader(struct cso_context
*ctx
)
925 if (!ctx
->has_geometry_shader
) {
929 assert(!ctx
->geometry_shader_saved
);
930 ctx
->geometry_shader_saved
= ctx
->geometry_shader
;
934 cso_restore_geometry_shader(struct cso_context
*ctx
)
936 if (!ctx
->has_geometry_shader
) {
940 if (ctx
->geometry_shader_saved
!= ctx
->geometry_shader
) {
941 ctx
->pipe
->bind_gs_state(ctx
->pipe
, ctx
->geometry_shader_saved
);
942 ctx
->geometry_shader
= ctx
->geometry_shader_saved
;
944 ctx
->geometry_shader_saved
= NULL
;
947 void cso_set_tessctrl_shader_handle(struct cso_context
*ctx
, void *handle
)
949 assert(ctx
->has_tessellation
|| !handle
);
951 if (ctx
->has_tessellation
&& ctx
->tessctrl_shader
!= handle
) {
952 ctx
->tessctrl_shader
= handle
;
953 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, handle
);
957 void cso_delete_tessctrl_shader(struct cso_context
*ctx
, void *handle
)
959 if (handle
== ctx
->tessctrl_shader
) {
960 /* unbind before deleting */
961 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, NULL
);
962 ctx
->tessctrl_shader
= NULL
;
964 ctx
->pipe
->delete_tcs_state(ctx
->pipe
, handle
);
968 cso_save_tessctrl_shader(struct cso_context
*ctx
)
970 if (!ctx
->has_tessellation
) {
974 assert(!ctx
->tessctrl_shader_saved
);
975 ctx
->tessctrl_shader_saved
= ctx
->tessctrl_shader
;
979 cso_restore_tessctrl_shader(struct cso_context
*ctx
)
981 if (!ctx
->has_tessellation
) {
985 if (ctx
->tessctrl_shader_saved
!= ctx
->tessctrl_shader
) {
986 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, ctx
->tessctrl_shader_saved
);
987 ctx
->tessctrl_shader
= ctx
->tessctrl_shader_saved
;
989 ctx
->tessctrl_shader_saved
= NULL
;
992 void cso_set_tesseval_shader_handle(struct cso_context
*ctx
, void *handle
)
994 assert(ctx
->has_tessellation
|| !handle
);
996 if (ctx
->has_tessellation
&& ctx
->tesseval_shader
!= handle
) {
997 ctx
->tesseval_shader
= handle
;
998 ctx
->pipe
->bind_tes_state(ctx
->pipe
, handle
);
1002 void cso_delete_tesseval_shader(struct cso_context
*ctx
, void *handle
)
1004 if (handle
== ctx
->tesseval_shader
) {
1005 /* unbind before deleting */
1006 ctx
->pipe
->bind_tes_state(ctx
->pipe
, NULL
);
1007 ctx
->tesseval_shader
= NULL
;
1009 ctx
->pipe
->delete_tes_state(ctx
->pipe
, handle
);
1013 cso_save_tesseval_shader(struct cso_context
*ctx
)
1015 if (!ctx
->has_tessellation
) {
1019 assert(!ctx
->tesseval_shader_saved
);
1020 ctx
->tesseval_shader_saved
= ctx
->tesseval_shader
;
1024 cso_restore_tesseval_shader(struct cso_context
*ctx
)
1026 if (!ctx
->has_tessellation
) {
1030 if (ctx
->tesseval_shader_saved
!= ctx
->tesseval_shader
) {
1031 ctx
->pipe
->bind_tes_state(ctx
->pipe
, ctx
->tesseval_shader_saved
);
1032 ctx
->tesseval_shader
= ctx
->tesseval_shader_saved
;
1034 ctx
->tesseval_shader_saved
= NULL
;
1037 void cso_set_compute_shader_handle(struct cso_context
*ctx
, void *handle
)
1039 assert(ctx
->has_compute_shader
|| !handle
);
1041 if (ctx
->has_compute_shader
&& ctx
->compute_shader
!= handle
) {
1042 ctx
->compute_shader
= handle
;
1043 ctx
->pipe
->bind_compute_state(ctx
->pipe
, handle
);
1047 void cso_delete_compute_shader(struct cso_context
*ctx
, void *handle
)
1049 if (handle
== ctx
->compute_shader
) {
1050 /* unbind before deleting */
1051 ctx
->pipe
->bind_compute_state(ctx
->pipe
, NULL
);
1052 ctx
->compute_shader
= NULL
;
1054 ctx
->pipe
->delete_compute_state(ctx
->pipe
, handle
);
1058 cso_set_vertex_elements_direct(struct cso_context
*ctx
,
1060 const struct pipe_vertex_element
*states
)
1062 unsigned key_size
, hash_key
;
1063 struct cso_hash_iter iter
;
1065 struct cso_velems_state velems_state
;
1067 /* Need to include the count into the stored state data too.
1068 * Otherwise first few count pipe_vertex_elements could be identical
1069 * even if count is different, and there's no guarantee the hash would
1070 * be different in that case neither.
1072 key_size
= sizeof(struct pipe_vertex_element
) * count
+ sizeof(unsigned);
1073 velems_state
.count
= count
;
1074 memcpy(velems_state
.velems
, states
,
1075 sizeof(struct pipe_vertex_element
) * count
);
1076 hash_key
= cso_construct_key((void*)&velems_state
, key_size
);
1077 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_VELEMENTS
,
1078 (void*)&velems_state
, key_size
);
1080 if (cso_hash_iter_is_null(iter
)) {
1081 struct cso_velements
*cso
= MALLOC(sizeof(struct cso_velements
));
1085 memcpy(&cso
->state
, &velems_state
, key_size
);
1086 cso
->data
= ctx
->pipe
->create_vertex_elements_state(ctx
->pipe
, count
,
1087 &cso
->state
.velems
[0]);
1089 (cso_state_callback
) ctx
->pipe
->delete_vertex_elements_state
;
1090 cso
->context
= ctx
->pipe
;
1092 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_VELEMENTS
, cso
);
1093 if (cso_hash_iter_is_null(iter
)) {
1101 handle
= ((struct cso_velements
*)cso_hash_iter_data(iter
))->data
;
1104 if (ctx
->velements
!= handle
) {
1105 ctx
->velements
= handle
;
1106 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, handle
);
1111 cso_set_vertex_elements(struct cso_context
*ctx
,
1113 const struct pipe_vertex_element
*states
)
1115 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1118 u_vbuf_set_vertex_elements(vbuf
, count
, states
);
1122 cso_set_vertex_elements_direct(ctx
, count
, states
);
1127 cso_save_vertex_elements(struct cso_context
*ctx
)
1129 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1132 u_vbuf_save_vertex_elements(vbuf
);
1136 assert(!ctx
->velements_saved
);
1137 ctx
->velements_saved
= ctx
->velements
;
1141 cso_restore_vertex_elements(struct cso_context
*ctx
)
1143 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1146 u_vbuf_restore_vertex_elements(vbuf
);
1150 if (ctx
->velements
!= ctx
->velements_saved
) {
1151 ctx
->velements
= ctx
->velements_saved
;
1152 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, ctx
->velements_saved
);
1154 ctx
->velements_saved
= NULL
;
1157 /* vertex buffers */
1160 cso_set_vertex_buffers_direct(struct cso_context
*ctx
,
1161 unsigned start_slot
, unsigned count
,
1162 const struct pipe_vertex_buffer
*buffers
)
1164 /* Save what's in the auxiliary slot, so that we can save and restore it
1167 if (start_slot
== 0) {
1169 pipe_vertex_buffer_reference(&ctx
->vertex_buffer0_current
,
1172 pipe_vertex_buffer_unreference(&ctx
->vertex_buffer0_current
);
1176 ctx
->pipe
->set_vertex_buffers(ctx
->pipe
, start_slot
, count
, buffers
);
1180 void cso_set_vertex_buffers(struct cso_context
*ctx
,
1181 unsigned start_slot
, unsigned count
,
1182 const struct pipe_vertex_buffer
*buffers
)
1184 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1190 u_vbuf_set_vertex_buffers(vbuf
, start_slot
, count
, buffers
);
1194 cso_set_vertex_buffers_direct(ctx
, start_slot
, count
, buffers
);
1198 cso_save_vertex_buffer0(struct cso_context
*ctx
)
1200 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1203 u_vbuf_save_vertex_buffer0(vbuf
);
1207 pipe_vertex_buffer_reference(&ctx
->vertex_buffer0_saved
,
1208 &ctx
->vertex_buffer0_current
);
1212 cso_restore_vertex_buffer0(struct cso_context
*ctx
)
1214 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1217 u_vbuf_restore_vertex_buffer0(vbuf
);
1221 cso_set_vertex_buffers(ctx
, 0, 1, &ctx
->vertex_buffer0_saved
);
1222 pipe_vertex_buffer_unreference(&ctx
->vertex_buffer0_saved
);
1227 cso_single_sampler(struct cso_context
*ctx
, enum pipe_shader_type shader_stage
,
1228 unsigned idx
, const struct pipe_sampler_state
*templ
)
1231 unsigned key_size
= sizeof(struct pipe_sampler_state
);
1232 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
1233 struct cso_sampler
*cso
;
1234 struct cso_hash_iter iter
=
1235 cso_find_state_template(ctx
->cache
,
1236 hash_key
, CSO_SAMPLER
,
1237 (void *) templ
, key_size
);
1239 if (cso_hash_iter_is_null(iter
)) {
1240 cso
= MALLOC(sizeof(struct cso_sampler
));
1244 memcpy(&cso
->state
, templ
, sizeof(*templ
));
1245 cso
->data
= ctx
->pipe
->create_sampler_state(ctx
->pipe
, &cso
->state
);
1247 (cso_state_callback
) ctx
->pipe
->delete_sampler_state
;
1248 cso
->context
= ctx
->pipe
;
1249 cso
->hash_key
= hash_key
;
1251 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_SAMPLER
, cso
);
1252 if (cso_hash_iter_is_null(iter
)) {
1258 cso
= cso_hash_iter_data(iter
);
1261 ctx
->samplers
[shader_stage
].cso_samplers
[idx
] = cso
;
1262 ctx
->samplers
[shader_stage
].samplers
[idx
] = cso
->data
;
1263 ctx
->max_sampler_seen
= MAX2(ctx
->max_sampler_seen
, (int)idx
);
1269 * Send staged sampler state to the driver.
1272 cso_single_sampler_done(struct cso_context
*ctx
,
1273 enum pipe_shader_type shader_stage
)
1275 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1277 if (ctx
->max_sampler_seen
== -1)
1280 ctx
->pipe
->bind_sampler_states(ctx
->pipe
, shader_stage
, 0,
1281 ctx
->max_sampler_seen
+ 1,
1283 ctx
->max_sampler_seen
= -1;
1288 * If the function encouters any errors it will return the
1289 * last one. Done to always try to set as many samplers
1293 cso_set_samplers(struct cso_context
*ctx
,
1294 enum pipe_shader_type shader_stage
,
1296 const struct pipe_sampler_state
**templates
)
1298 for (unsigned i
= 0; i
< nr
; i
++)
1299 cso_single_sampler(ctx
, shader_stage
, i
, templates
[i
]);
1301 cso_single_sampler_done(ctx
, shader_stage
);
1305 cso_save_fragment_samplers(struct cso_context
*ctx
)
1307 struct sampler_info
*info
= &ctx
->samplers
[PIPE_SHADER_FRAGMENT
];
1308 struct sampler_info
*saved
= &ctx
->fragment_samplers_saved
;
1310 memcpy(saved
->cso_samplers
, info
->cso_samplers
,
1311 sizeof(info
->cso_samplers
));
1312 memcpy(saved
->samplers
, info
->samplers
, sizeof(info
->samplers
));
1317 cso_restore_fragment_samplers(struct cso_context
*ctx
)
1319 struct sampler_info
*info
= &ctx
->samplers
[PIPE_SHADER_FRAGMENT
];
1320 struct sampler_info
*saved
= &ctx
->fragment_samplers_saved
;
1322 memcpy(info
->cso_samplers
, saved
->cso_samplers
,
1323 sizeof(info
->cso_samplers
));
1324 memcpy(info
->samplers
, saved
->samplers
, sizeof(info
->samplers
));
1326 for (int i
= PIPE_MAX_SAMPLERS
- 1; i
>= 0; i
--) {
1327 if (info
->samplers
[i
]) {
1328 ctx
->max_sampler_seen
= i
;
1333 cso_single_sampler_done(ctx
, PIPE_SHADER_FRAGMENT
);
1338 cso_set_sampler_views(struct cso_context
*ctx
,
1339 enum pipe_shader_type shader_stage
,
1341 struct pipe_sampler_view
**views
)
1343 if (shader_stage
== PIPE_SHADER_FRAGMENT
) {
1345 boolean any_change
= FALSE
;
1347 /* reference new views */
1348 for (i
= 0; i
< count
; i
++) {
1349 any_change
|= ctx
->fragment_views
[i
] != views
[i
];
1350 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], views
[i
]);
1352 /* unref extra old views, if any */
1353 for (; i
< ctx
->nr_fragment_views
; i
++) {
1354 any_change
|= ctx
->fragment_views
[i
] != NULL
;
1355 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1358 /* bind the new sampler views */
1360 ctx
->pipe
->set_sampler_views(ctx
->pipe
, shader_stage
, 0,
1361 MAX2(ctx
->nr_fragment_views
, count
),
1362 ctx
->fragment_views
);
1365 ctx
->nr_fragment_views
= count
;
1368 ctx
->pipe
->set_sampler_views(ctx
->pipe
, shader_stage
, 0, count
, views
);
1373 cso_save_fragment_sampler_views(struct cso_context
*ctx
)
1377 ctx
->nr_fragment_views_saved
= ctx
->nr_fragment_views
;
1379 for (i
= 0; i
< ctx
->nr_fragment_views
; i
++) {
1380 assert(!ctx
->fragment_views_saved
[i
]);
1381 pipe_sampler_view_reference(&ctx
->fragment_views_saved
[i
],
1382 ctx
->fragment_views
[i
]);
1388 cso_restore_fragment_sampler_views(struct cso_context
*ctx
)
1390 unsigned i
, nr_saved
= ctx
->nr_fragment_views_saved
;
1393 for (i
= 0; i
< nr_saved
; i
++) {
1394 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1395 /* move the reference from one pointer to another */
1396 ctx
->fragment_views
[i
] = ctx
->fragment_views_saved
[i
];
1397 ctx
->fragment_views_saved
[i
] = NULL
;
1399 for (; i
< ctx
->nr_fragment_views
; i
++) {
1400 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1403 num
= MAX2(ctx
->nr_fragment_views
, nr_saved
);
1405 /* bind the old/saved sampler views */
1406 ctx
->pipe
->set_sampler_views(ctx
->pipe
, PIPE_SHADER_FRAGMENT
, 0, num
,
1407 ctx
->fragment_views
);
1409 ctx
->nr_fragment_views
= nr_saved
;
1410 ctx
->nr_fragment_views_saved
= 0;
1415 cso_set_shader_images(struct cso_context
*ctx
,
1416 enum pipe_shader_type shader_stage
,
1417 unsigned start
, unsigned count
,
1418 struct pipe_image_view
*images
)
1420 if (shader_stage
== PIPE_SHADER_FRAGMENT
&& start
== 0 && count
>= 1) {
1421 util_copy_image_view(&ctx
->fragment_image0_current
, &images
[0]);
1424 ctx
->pipe
->set_shader_images(ctx
->pipe
, shader_stage
, start
, count
, images
);
1429 cso_save_fragment_image0(struct cso_context
*ctx
)
1431 util_copy_image_view(&ctx
->fragment_image0_saved
,
1432 &ctx
->fragment_image0_current
);
1437 cso_restore_fragment_image0(struct cso_context
*ctx
)
1439 cso_set_shader_images(ctx
, PIPE_SHADER_FRAGMENT
, 0, 1,
1440 &ctx
->fragment_image0_saved
);
1445 cso_set_stream_outputs(struct cso_context
*ctx
,
1446 unsigned num_targets
,
1447 struct pipe_stream_output_target
**targets
,
1448 const unsigned *offsets
)
1450 struct pipe_context
*pipe
= ctx
->pipe
;
1453 if (!ctx
->has_streamout
) {
1454 assert(num_targets
== 0);
1458 if (ctx
->nr_so_targets
== 0 && num_targets
== 0) {
1459 /* Nothing to do. */
1463 /* reference new targets */
1464 for (i
= 0; i
< num_targets
; i
++) {
1465 pipe_so_target_reference(&ctx
->so_targets
[i
], targets
[i
]);
1467 /* unref extra old targets, if any */
1468 for (; i
< ctx
->nr_so_targets
; i
++) {
1469 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1472 pipe
->set_stream_output_targets(pipe
, num_targets
, targets
,
1474 ctx
->nr_so_targets
= num_targets
;
1478 cso_save_stream_outputs(struct cso_context
*ctx
)
1482 if (!ctx
->has_streamout
) {
1486 ctx
->nr_so_targets_saved
= ctx
->nr_so_targets
;
1488 for (i
= 0; i
< ctx
->nr_so_targets
; i
++) {
1489 assert(!ctx
->so_targets_saved
[i
]);
1490 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], ctx
->so_targets
[i
]);
1495 cso_restore_stream_outputs(struct cso_context
*ctx
)
1497 struct pipe_context
*pipe
= ctx
->pipe
;
1499 unsigned offset
[PIPE_MAX_SO_BUFFERS
];
1501 if (!ctx
->has_streamout
) {
1505 if (ctx
->nr_so_targets
== 0 && ctx
->nr_so_targets_saved
== 0) {
1506 /* Nothing to do. */
1510 assert(ctx
->nr_so_targets_saved
<= PIPE_MAX_SO_BUFFERS
);
1511 for (i
= 0; i
< ctx
->nr_so_targets_saved
; i
++) {
1512 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1513 /* move the reference from one pointer to another */
1514 ctx
->so_targets
[i
] = ctx
->so_targets_saved
[i
];
1515 ctx
->so_targets_saved
[i
] = NULL
;
1516 /* -1 means append */
1517 offset
[i
] = (unsigned)-1;
1519 for (; i
< ctx
->nr_so_targets
; i
++) {
1520 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1523 pipe
->set_stream_output_targets(pipe
, ctx
->nr_so_targets_saved
,
1524 ctx
->so_targets
, offset
);
1526 ctx
->nr_so_targets
= ctx
->nr_so_targets_saved
;
1527 ctx
->nr_so_targets_saved
= 0;
1530 /* constant buffers */
1533 cso_set_constant_buffer(struct cso_context
*cso
,
1534 enum pipe_shader_type shader_stage
,
1535 unsigned index
, struct pipe_constant_buffer
*cb
)
1537 struct pipe_context
*pipe
= cso
->pipe
;
1539 pipe
->set_constant_buffer(pipe
, shader_stage
, index
, cb
);
1542 util_copy_constant_buffer(&cso
->aux_constbuf_current
[shader_stage
], cb
);
1547 cso_set_constant_buffer_resource(struct cso_context
*cso
,
1548 enum pipe_shader_type shader_stage
,
1550 struct pipe_resource
*buffer
)
1553 struct pipe_constant_buffer cb
;
1555 cb
.buffer_offset
= 0;
1556 cb
.buffer_size
= buffer
->width0
;
1557 cb
.user_buffer
= NULL
;
1558 cso_set_constant_buffer(cso
, shader_stage
, index
, &cb
);
1560 cso_set_constant_buffer(cso
, shader_stage
, index
, NULL
);
1565 cso_set_constant_user_buffer(struct cso_context
*cso
,
1566 enum pipe_shader_type shader_stage
,
1567 unsigned index
, void *ptr
, unsigned size
)
1570 struct pipe_constant_buffer cb
;
1572 cb
.buffer_offset
= 0;
1573 cb
.buffer_size
= size
;
1574 cb
.user_buffer
= ptr
;
1575 cso_set_constant_buffer(cso
, shader_stage
, index
, &cb
);
1577 cso_set_constant_buffer(cso
, shader_stage
, index
, NULL
);
1582 cso_save_constant_buffer_slot0(struct cso_context
*cso
,
1583 enum pipe_shader_type shader_stage
)
1585 util_copy_constant_buffer(&cso
->aux_constbuf_saved
[shader_stage
],
1586 &cso
->aux_constbuf_current
[shader_stage
]);
1590 cso_restore_constant_buffer_slot0(struct cso_context
*cso
,
1591 enum pipe_shader_type shader_stage
)
1593 cso_set_constant_buffer(cso
, shader_stage
, 0,
1594 &cso
->aux_constbuf_saved
[shader_stage
]);
1595 pipe_resource_reference(&cso
->aux_constbuf_saved
[shader_stage
].buffer
,
1601 * Save all the CSO state items specified by the state_mask bitmask
1602 * of CSO_BIT_x flags.
1605 cso_save_state(struct cso_context
*cso
, unsigned state_mask
)
1607 assert(cso
->saved_state
== 0);
1609 cso
->saved_state
= state_mask
;
1611 if (state_mask
& CSO_BIT_AUX_VERTEX_BUFFER_SLOT
)
1612 cso_save_vertex_buffer0(cso
);
1613 if (state_mask
& CSO_BIT_BLEND
)
1614 cso_save_blend(cso
);
1615 if (state_mask
& CSO_BIT_DEPTH_STENCIL_ALPHA
)
1616 cso_save_depth_stencil_alpha(cso
);
1617 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLERS
)
1618 cso_save_fragment_samplers(cso
);
1619 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLER_VIEWS
)
1620 cso_save_fragment_sampler_views(cso
);
1621 if (state_mask
& CSO_BIT_FRAGMENT_SHADER
)
1622 cso_save_fragment_shader(cso
);
1623 if (state_mask
& CSO_BIT_FRAMEBUFFER
)
1624 cso_save_framebuffer(cso
);
1625 if (state_mask
& CSO_BIT_GEOMETRY_SHADER
)
1626 cso_save_geometry_shader(cso
);
1627 if (state_mask
& CSO_BIT_MIN_SAMPLES
)
1628 cso_save_min_samples(cso
);
1629 if (state_mask
& CSO_BIT_RASTERIZER
)
1630 cso_save_rasterizer(cso
);
1631 if (state_mask
& CSO_BIT_RENDER_CONDITION
)
1632 cso_save_render_condition(cso
);
1633 if (state_mask
& CSO_BIT_SAMPLE_MASK
)
1634 cso_save_sample_mask(cso
);
1635 if (state_mask
& CSO_BIT_STENCIL_REF
)
1636 cso_save_stencil_ref(cso
);
1637 if (state_mask
& CSO_BIT_STREAM_OUTPUTS
)
1638 cso_save_stream_outputs(cso
);
1639 if (state_mask
& CSO_BIT_TESSCTRL_SHADER
)
1640 cso_save_tessctrl_shader(cso
);
1641 if (state_mask
& CSO_BIT_TESSEVAL_SHADER
)
1642 cso_save_tesseval_shader(cso
);
1643 if (state_mask
& CSO_BIT_VERTEX_ELEMENTS
)
1644 cso_save_vertex_elements(cso
);
1645 if (state_mask
& CSO_BIT_VERTEX_SHADER
)
1646 cso_save_vertex_shader(cso
);
1647 if (state_mask
& CSO_BIT_VIEWPORT
)
1648 cso_save_viewport(cso
);
1649 if (state_mask
& CSO_BIT_PAUSE_QUERIES
)
1650 cso
->pipe
->set_active_query_state(cso
->pipe
, false);
1651 if (state_mask
& CSO_BIT_FRAGMENT_IMAGE0
)
1652 cso_save_fragment_image0(cso
);
1657 * Restore the state which was saved by cso_save_state().
1660 cso_restore_state(struct cso_context
*cso
)
1662 unsigned state_mask
= cso
->saved_state
;
1666 if (state_mask
& CSO_BIT_AUX_VERTEX_BUFFER_SLOT
)
1667 cso_restore_vertex_buffer0(cso
);
1668 if (state_mask
& CSO_BIT_BLEND
)
1669 cso_restore_blend(cso
);
1670 if (state_mask
& CSO_BIT_DEPTH_STENCIL_ALPHA
)
1671 cso_restore_depth_stencil_alpha(cso
);
1672 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLERS
)
1673 cso_restore_fragment_samplers(cso
);
1674 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLER_VIEWS
)
1675 cso_restore_fragment_sampler_views(cso
);
1676 if (state_mask
& CSO_BIT_FRAGMENT_SHADER
)
1677 cso_restore_fragment_shader(cso
);
1678 if (state_mask
& CSO_BIT_FRAMEBUFFER
)
1679 cso_restore_framebuffer(cso
);
1680 if (state_mask
& CSO_BIT_GEOMETRY_SHADER
)
1681 cso_restore_geometry_shader(cso
);
1682 if (state_mask
& CSO_BIT_MIN_SAMPLES
)
1683 cso_restore_min_samples(cso
);
1684 if (state_mask
& CSO_BIT_RASTERIZER
)
1685 cso_restore_rasterizer(cso
);
1686 if (state_mask
& CSO_BIT_RENDER_CONDITION
)
1687 cso_restore_render_condition(cso
);
1688 if (state_mask
& CSO_BIT_SAMPLE_MASK
)
1689 cso_restore_sample_mask(cso
);
1690 if (state_mask
& CSO_BIT_STENCIL_REF
)
1691 cso_restore_stencil_ref(cso
);
1692 if (state_mask
& CSO_BIT_STREAM_OUTPUTS
)
1693 cso_restore_stream_outputs(cso
);
1694 if (state_mask
& CSO_BIT_TESSCTRL_SHADER
)
1695 cso_restore_tessctrl_shader(cso
);
1696 if (state_mask
& CSO_BIT_TESSEVAL_SHADER
)
1697 cso_restore_tesseval_shader(cso
);
1698 if (state_mask
& CSO_BIT_VERTEX_ELEMENTS
)
1699 cso_restore_vertex_elements(cso
);
1700 if (state_mask
& CSO_BIT_VERTEX_SHADER
)
1701 cso_restore_vertex_shader(cso
);
1702 if (state_mask
& CSO_BIT_VIEWPORT
)
1703 cso_restore_viewport(cso
);
1704 if (state_mask
& CSO_BIT_PAUSE_QUERIES
)
1705 cso
->pipe
->set_active_query_state(cso
->pipe
, true);
1706 if (state_mask
& CSO_BIT_FRAGMENT_IMAGE0
)
1707 cso_restore_fragment_image0(cso
);
1709 cso
->saved_state
= 0;
1717 cso_draw_vbo(struct cso_context
*cso
,
1718 const struct pipe_draw_info
*info
)
1720 struct u_vbuf
*vbuf
= cso
->vbuf
;
1722 /* We can't have both indirect drawing and SO-vertex-count drawing */
1723 assert(info
->indirect
== NULL
|| info
->count_from_stream_output
== NULL
);
1725 /* We can't have SO-vertex-count drawing with an index buffer */
1726 assert(info
->count_from_stream_output
== NULL
|| info
->index_size
== 0);
1729 u_vbuf_draw_vbo(vbuf
, info
);
1731 struct pipe_context
*pipe
= cso
->pipe
;
1732 pipe
->draw_vbo(pipe
, info
);
1737 cso_draw_arrays(struct cso_context
*cso
, uint mode
, uint start
, uint count
)
1739 struct pipe_draw_info info
;
1741 util_draw_init_info(&info
);
1746 info
.min_index
= start
;
1747 info
.max_index
= start
+ count
- 1;
1749 cso_draw_vbo(cso
, &info
);
1753 cso_draw_arrays_instanced(struct cso_context
*cso
, uint mode
,
1754 uint start
, uint count
,
1755 uint start_instance
, uint instance_count
)
1757 struct pipe_draw_info info
;
1759 util_draw_init_info(&info
);
1764 info
.min_index
= start
;
1765 info
.max_index
= start
+ count
- 1;
1766 info
.start_instance
= start_instance
;
1767 info
.instance_count
= instance_count
;
1769 cso_draw_vbo(cso
, &info
);