1 /**************************************************************************
3 * Copyright 2007 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
54 * Per-shader sampler information.
58 struct cso_sampler
*cso_samplers
[PIPE_MAX_SAMPLERS
];
59 void *samplers
[PIPE_MAX_SAMPLERS
];
66 struct pipe_context
*pipe
;
67 struct cso_cache
*cache
;
70 boolean has_geometry_shader
;
71 boolean has_tessellation
;
72 boolean has_compute_shader
;
73 boolean has_streamout
;
75 unsigned saved_state
; /**< bitmask of CSO_BIT_x flags */
77 struct pipe_sampler_view
*fragment_views
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
78 unsigned nr_fragment_views
;
80 struct pipe_sampler_view
*fragment_views_saved
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
81 unsigned nr_fragment_views_saved
;
83 struct sampler_info fragment_samplers_saved
;
84 struct sampler_info samplers
[PIPE_SHADER_TYPES
];
86 struct pipe_vertex_buffer aux_vertex_buffer_current
;
87 struct pipe_vertex_buffer aux_vertex_buffer_saved
;
88 unsigned aux_vertex_buffer_index
;
90 struct pipe_constant_buffer aux_constbuf_current
[PIPE_SHADER_TYPES
];
91 struct pipe_constant_buffer aux_constbuf_saved
[PIPE_SHADER_TYPES
];
93 struct pipe_image_view fragment_image0_current
;
94 struct pipe_image_view fragment_image0_saved
;
96 unsigned nr_so_targets
;
97 struct pipe_stream_output_target
*so_targets
[PIPE_MAX_SO_BUFFERS
];
99 unsigned nr_so_targets_saved
;
100 struct pipe_stream_output_target
*so_targets_saved
[PIPE_MAX_SO_BUFFERS
];
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
105 void *blend
, *blend_saved
;
106 void *depth_stencil
, *depth_stencil_saved
;
107 void *rasterizer
, *rasterizer_saved
;
108 void *fragment_shader
, *fragment_shader_saved
;
109 void *vertex_shader
, *vertex_shader_saved
;
110 void *geometry_shader
, *geometry_shader_saved
;
111 void *tessctrl_shader
, *tessctrl_shader_saved
;
112 void *tesseval_shader
, *tesseval_shader_saved
;
113 void *compute_shader
;
114 void *velements
, *velements_saved
;
115 struct pipe_query
*render_condition
, *render_condition_saved
;
116 uint render_condition_mode
, render_condition_mode_saved
;
117 boolean render_condition_cond
, render_condition_cond_saved
;
119 struct pipe_framebuffer_state fb
, fb_saved
;
120 struct pipe_viewport_state vp
, vp_saved
;
121 struct pipe_blend_color blend_color
;
122 unsigned sample_mask
, sample_mask_saved
;
123 unsigned min_samples
, min_samples_saved
;
124 struct pipe_stencil_ref stencil_ref
, stencil_ref_saved
;
128 static boolean
delete_blend_state(struct cso_context
*ctx
, void *state
)
130 struct cso_blend
*cso
= (struct cso_blend
*)state
;
132 if (ctx
->blend
== cso
->data
)
135 if (cso
->delete_state
)
136 cso
->delete_state(cso
->context
, cso
->data
);
141 static boolean
delete_depth_stencil_state(struct cso_context
*ctx
, void *state
)
143 struct cso_depth_stencil_alpha
*cso
=
144 (struct cso_depth_stencil_alpha
*)state
;
146 if (ctx
->depth_stencil
== cso
->data
)
149 if (cso
->delete_state
)
150 cso
->delete_state(cso
->context
, cso
->data
);
156 static boolean
delete_sampler_state(struct cso_context
*ctx
, void *state
)
158 struct cso_sampler
*cso
= (struct cso_sampler
*)state
;
159 if (cso
->delete_state
)
160 cso
->delete_state(cso
->context
, cso
->data
);
165 static boolean
delete_rasterizer_state(struct cso_context
*ctx
, void *state
)
167 struct cso_rasterizer
*cso
= (struct cso_rasterizer
*)state
;
169 if (ctx
->rasterizer
== cso
->data
)
171 if (cso
->delete_state
)
172 cso
->delete_state(cso
->context
, cso
->data
);
177 static boolean
delete_vertex_elements(struct cso_context
*ctx
,
180 struct cso_velements
*cso
= (struct cso_velements
*)state
;
182 if (ctx
->velements
== cso
->data
)
185 if (cso
->delete_state
)
186 cso
->delete_state(cso
->context
, cso
->data
);
192 static inline boolean
delete_cso(struct cso_context
*ctx
,
193 void *state
, enum cso_cache_type type
)
197 return delete_blend_state(ctx
, state
);
199 return delete_sampler_state(ctx
, state
);
200 case CSO_DEPTH_STENCIL_ALPHA
:
201 return delete_depth_stencil_state(ctx
, state
);
203 return delete_rasterizer_state(ctx
, state
);
205 return delete_vertex_elements(ctx
, state
);
214 sanitize_hash(struct cso_hash
*hash
, enum cso_cache_type type
,
215 int max_size
, void *user_data
)
217 struct cso_context
*ctx
= (struct cso_context
*)user_data
;
218 /* if we're approach the maximum size, remove fourth of the entries
219 * otherwise every subsequent call will go through the same */
220 int hash_size
= cso_hash_size(hash
);
221 int max_entries
= (max_size
> hash_size
) ? max_size
: hash_size
;
222 int to_remove
= (max_size
< max_entries
) * max_entries
/4;
223 struct cso_hash_iter iter
;
224 struct cso_sampler
**samplers_to_restore
= NULL
;
225 unsigned to_restore
= 0;
227 if (hash_size
> max_size
)
228 to_remove
+= hash_size
- max_size
;
233 if (type
== CSO_SAMPLER
) {
236 samplers_to_restore
= MALLOC(PIPE_SHADER_TYPES
* PIPE_MAX_SAMPLERS
*
237 sizeof(*samplers_to_restore
));
239 /* Temporarily remove currently bound sampler states from the hash
240 * table, to prevent them from being deleted
242 for (i
= 0; i
< PIPE_SHADER_TYPES
; i
++) {
243 for (j
= 0; j
< ctx
->samplers
[i
].nr_samplers
; j
++) {
244 struct cso_sampler
*sampler
= ctx
->samplers
[i
].cso_samplers
[j
];
246 if (sampler
&& cso_hash_take(hash
, sampler
->hash_key
))
247 samplers_to_restore
[to_restore
++] = sampler
;
252 iter
= cso_hash_first_node(hash
);
254 /*remove elements until we're good */
255 /*fixme: currently we pick the nodes to remove at random*/
256 void *cso
= cso_hash_iter_data(iter
);
261 if (delete_cso(ctx
, cso
, type
)) {
262 iter
= cso_hash_erase(hash
, iter
);
265 iter
= cso_hash_iter_next(iter
);
268 if (type
== CSO_SAMPLER
) {
269 /* Put currently bound sampler states back into the hash table */
270 while (to_restore
--) {
271 struct cso_sampler
*sampler
= samplers_to_restore
[to_restore
];
273 cso_hash_insert(hash
, sampler
->hash_key
, sampler
);
276 FREE(samplers_to_restore
);
280 static void cso_init_vbuf(struct cso_context
*cso
)
282 struct u_vbuf_caps caps
;
284 /* Install u_vbuf if there is anything unsupported. */
285 if (u_vbuf_get_caps(cso
->pipe
->screen
, &caps
)) {
286 cso
->vbuf
= u_vbuf_create(cso
->pipe
, &caps
,
287 cso
->aux_vertex_buffer_index
);
291 struct cso_context
*cso_create_context( struct pipe_context
*pipe
)
293 struct cso_context
*ctx
= CALLOC_STRUCT(cso_context
);
297 ctx
->cache
= cso_cache_create();
298 if (ctx
->cache
== NULL
)
300 cso_cache_set_sanitize_callback(ctx
->cache
,
305 ctx
->sample_mask
= ~0;
307 ctx
->aux_vertex_buffer_index
= 0; /* 0 for now */
311 /* Enable for testing: */
312 if (0) cso_set_maximum_cache_size( ctx
->cache
, 4 );
314 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_GEOMETRY
,
315 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
316 ctx
->has_geometry_shader
= TRUE
;
318 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_TESS_CTRL
,
319 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
320 ctx
->has_tessellation
= TRUE
;
322 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_COMPUTE
,
323 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
325 pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_COMPUTE
,
326 PIPE_SHADER_CAP_SUPPORTED_IRS
);
327 if (supported_irs
& (1 << PIPE_SHADER_IR_TGSI
)) {
328 ctx
->has_compute_shader
= TRUE
;
331 if (pipe
->screen
->get_param(pipe
->screen
,
332 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS
) != 0) {
333 ctx
->has_streamout
= TRUE
;
339 cso_destroy_context( ctx
);
344 * Free the CSO context.
346 void cso_destroy_context( struct cso_context
*ctx
)
351 ctx
->pipe
->set_index_buffer(ctx
->pipe
, NULL
);
353 ctx
->pipe
->bind_blend_state( ctx
->pipe
, NULL
);
354 ctx
->pipe
->bind_rasterizer_state( ctx
->pipe
, NULL
);
357 static struct pipe_sampler_view
*views
[PIPE_MAX_SHADER_SAMPLER_VIEWS
] = { NULL
};
358 static void *zeros
[PIPE_MAX_SAMPLERS
] = { NULL
};
359 struct pipe_screen
*scr
= ctx
->pipe
->screen
;
360 enum pipe_shader_type sh
;
361 for (sh
= 0; sh
< PIPE_SHADER_TYPES
; sh
++) {
362 int maxsam
= scr
->get_shader_param(scr
, sh
,
363 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS
);
364 int maxview
= scr
->get_shader_param(scr
, sh
,
365 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS
);
366 assert(maxsam
<= PIPE_MAX_SAMPLERS
);
367 assert(maxview
<= PIPE_MAX_SHADER_SAMPLER_VIEWS
);
369 ctx
->pipe
->bind_sampler_states(ctx
->pipe
, sh
, 0, maxsam
, zeros
);
372 ctx
->pipe
->set_sampler_views(ctx
->pipe
, sh
, 0, maxview
, views
);
377 ctx
->pipe
->bind_depth_stencil_alpha_state( ctx
->pipe
, NULL
);
378 ctx
->pipe
->bind_fs_state( ctx
->pipe
, NULL
);
379 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_FRAGMENT
, 0, NULL
);
380 ctx
->pipe
->bind_vs_state( ctx
->pipe
, NULL
);
381 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_VERTEX
, 0, NULL
);
382 if (ctx
->has_geometry_shader
) {
383 ctx
->pipe
->bind_gs_state(ctx
->pipe
, NULL
);
384 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_GEOMETRY
, 0, NULL
);
386 if (ctx
->has_tessellation
) {
387 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, NULL
);
388 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_TESS_CTRL
, 0, NULL
);
389 ctx
->pipe
->bind_tes_state(ctx
->pipe
, NULL
);
390 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_TESS_EVAL
, 0, NULL
);
392 if (ctx
->has_compute_shader
) {
393 ctx
->pipe
->bind_compute_state(ctx
->pipe
, NULL
);
394 ctx
->pipe
->set_constant_buffer(ctx
->pipe
, PIPE_SHADER_COMPUTE
, 0, NULL
);
396 ctx
->pipe
->bind_vertex_elements_state( ctx
->pipe
, NULL
);
398 if (ctx
->has_streamout
)
399 ctx
->pipe
->set_stream_output_targets(ctx
->pipe
, 0, NULL
, NULL
);
402 for (i
= 0; i
< PIPE_MAX_SHADER_SAMPLER_VIEWS
; i
++) {
403 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
404 pipe_sampler_view_reference(&ctx
->fragment_views_saved
[i
], NULL
);
407 util_unreference_framebuffer_state(&ctx
->fb
);
408 util_unreference_framebuffer_state(&ctx
->fb_saved
);
410 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
, NULL
);
411 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
, NULL
);
413 for (i
= 0; i
< PIPE_SHADER_TYPES
; i
++) {
414 pipe_resource_reference(&ctx
->aux_constbuf_current
[i
].buffer
, NULL
);
415 pipe_resource_reference(&ctx
->aux_constbuf_saved
[i
].buffer
, NULL
);
418 pipe_resource_reference(&ctx
->fragment_image0_current
.resource
, NULL
);
419 pipe_resource_reference(&ctx
->fragment_image0_saved
.resource
, NULL
);
421 for (i
= 0; i
< PIPE_MAX_SO_BUFFERS
; i
++) {
422 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
423 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], NULL
);
427 cso_cache_delete( ctx
->cache
);
432 u_vbuf_destroy(ctx
->vbuf
);
437 /* Those function will either find the state of the given template
438 * in the cache or they will create a new state from the given
439 * template, insert it in the cache and return it.
443 * If the driver returns 0 from the create method then they will assign
444 * the data member of the cso to be the template itself.
447 enum pipe_error
cso_set_blend(struct cso_context
*ctx
,
448 const struct pipe_blend_state
*templ
)
450 unsigned key_size
, hash_key
;
451 struct cso_hash_iter iter
;
454 key_size
= templ
->independent_blend_enable
?
455 sizeof(struct pipe_blend_state
) :
456 (char *)&(templ
->rt
[1]) - (char *)templ
;
457 hash_key
= cso_construct_key((void*)templ
, key_size
);
458 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_BLEND
,
459 (void*)templ
, key_size
);
461 if (cso_hash_iter_is_null(iter
)) {
462 struct cso_blend
*cso
= MALLOC(sizeof(struct cso_blend
));
464 return PIPE_ERROR_OUT_OF_MEMORY
;
466 memset(&cso
->state
, 0, sizeof cso
->state
);
467 memcpy(&cso
->state
, templ
, key_size
);
468 cso
->data
= ctx
->pipe
->create_blend_state(ctx
->pipe
, &cso
->state
);
469 cso
->delete_state
= (cso_state_callback
)ctx
->pipe
->delete_blend_state
;
470 cso
->context
= ctx
->pipe
;
472 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_BLEND
, cso
);
473 if (cso_hash_iter_is_null(iter
)) {
475 return PIPE_ERROR_OUT_OF_MEMORY
;
481 handle
= ((struct cso_blend
*)cso_hash_iter_data(iter
))->data
;
484 if (ctx
->blend
!= handle
) {
486 ctx
->pipe
->bind_blend_state(ctx
->pipe
, handle
);
492 cso_save_blend(struct cso_context
*ctx
)
494 assert(!ctx
->blend_saved
);
495 ctx
->blend_saved
= ctx
->blend
;
499 cso_restore_blend(struct cso_context
*ctx
)
501 if (ctx
->blend
!= ctx
->blend_saved
) {
502 ctx
->blend
= ctx
->blend_saved
;
503 ctx
->pipe
->bind_blend_state(ctx
->pipe
, ctx
->blend_saved
);
505 ctx
->blend_saved
= NULL
;
511 cso_set_depth_stencil_alpha(struct cso_context
*ctx
,
512 const struct pipe_depth_stencil_alpha_state
*templ
)
514 unsigned key_size
= sizeof(struct pipe_depth_stencil_alpha_state
);
515 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
516 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
518 CSO_DEPTH_STENCIL_ALPHA
,
519 (void*)templ
, key_size
);
522 if (cso_hash_iter_is_null(iter
)) {
523 struct cso_depth_stencil_alpha
*cso
=
524 MALLOC(sizeof(struct cso_depth_stencil_alpha
));
526 return PIPE_ERROR_OUT_OF_MEMORY
;
528 memcpy(&cso
->state
, templ
, sizeof(*templ
));
529 cso
->data
= ctx
->pipe
->create_depth_stencil_alpha_state(ctx
->pipe
,
532 (cso_state_callback
)ctx
->pipe
->delete_depth_stencil_alpha_state
;
533 cso
->context
= ctx
->pipe
;
535 iter
= cso_insert_state(ctx
->cache
, hash_key
,
536 CSO_DEPTH_STENCIL_ALPHA
, cso
);
537 if (cso_hash_iter_is_null(iter
)) {
539 return PIPE_ERROR_OUT_OF_MEMORY
;
545 handle
= ((struct cso_depth_stencil_alpha
*)
546 cso_hash_iter_data(iter
))->data
;
549 if (ctx
->depth_stencil
!= handle
) {
550 ctx
->depth_stencil
= handle
;
551 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
, handle
);
557 cso_save_depth_stencil_alpha(struct cso_context
*ctx
)
559 assert(!ctx
->depth_stencil_saved
);
560 ctx
->depth_stencil_saved
= ctx
->depth_stencil
;
564 cso_restore_depth_stencil_alpha(struct cso_context
*ctx
)
566 if (ctx
->depth_stencil
!= ctx
->depth_stencil_saved
) {
567 ctx
->depth_stencil
= ctx
->depth_stencil_saved
;
568 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
,
569 ctx
->depth_stencil_saved
);
571 ctx
->depth_stencil_saved
= NULL
;
576 enum pipe_error
cso_set_rasterizer(struct cso_context
*ctx
,
577 const struct pipe_rasterizer_state
*templ
)
579 unsigned key_size
= sizeof(struct pipe_rasterizer_state
);
580 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
581 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
584 (void*)templ
, key_size
);
587 if (cso_hash_iter_is_null(iter
)) {
588 struct cso_rasterizer
*cso
= MALLOC(sizeof(struct cso_rasterizer
));
590 return PIPE_ERROR_OUT_OF_MEMORY
;
592 memcpy(&cso
->state
, templ
, sizeof(*templ
));
593 cso
->data
= ctx
->pipe
->create_rasterizer_state(ctx
->pipe
, &cso
->state
);
595 (cso_state_callback
)ctx
->pipe
->delete_rasterizer_state
;
596 cso
->context
= ctx
->pipe
;
598 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_RASTERIZER
, cso
);
599 if (cso_hash_iter_is_null(iter
)) {
601 return PIPE_ERROR_OUT_OF_MEMORY
;
607 handle
= ((struct cso_rasterizer
*)cso_hash_iter_data(iter
))->data
;
610 if (ctx
->rasterizer
!= handle
) {
611 ctx
->rasterizer
= handle
;
612 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, handle
);
618 cso_save_rasterizer(struct cso_context
*ctx
)
620 assert(!ctx
->rasterizer_saved
);
621 ctx
->rasterizer_saved
= ctx
->rasterizer
;
625 cso_restore_rasterizer(struct cso_context
*ctx
)
627 if (ctx
->rasterizer
!= ctx
->rasterizer_saved
) {
628 ctx
->rasterizer
= ctx
->rasterizer_saved
;
629 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, ctx
->rasterizer_saved
);
631 ctx
->rasterizer_saved
= NULL
;
635 void cso_set_fragment_shader_handle(struct cso_context
*ctx
, void *handle
)
637 if (ctx
->fragment_shader
!= handle
) {
638 ctx
->fragment_shader
= handle
;
639 ctx
->pipe
->bind_fs_state(ctx
->pipe
, handle
);
643 void cso_delete_fragment_shader(struct cso_context
*ctx
, void *handle
)
645 if (handle
== ctx
->fragment_shader
) {
646 /* unbind before deleting */
647 ctx
->pipe
->bind_fs_state(ctx
->pipe
, NULL
);
648 ctx
->fragment_shader
= NULL
;
650 ctx
->pipe
->delete_fs_state(ctx
->pipe
, handle
);
654 cso_save_fragment_shader(struct cso_context
*ctx
)
656 assert(!ctx
->fragment_shader_saved
);
657 ctx
->fragment_shader_saved
= ctx
->fragment_shader
;
661 cso_restore_fragment_shader(struct cso_context
*ctx
)
663 if (ctx
->fragment_shader_saved
!= ctx
->fragment_shader
) {
664 ctx
->pipe
->bind_fs_state(ctx
->pipe
, ctx
->fragment_shader_saved
);
665 ctx
->fragment_shader
= ctx
->fragment_shader_saved
;
667 ctx
->fragment_shader_saved
= NULL
;
671 void cso_set_vertex_shader_handle(struct cso_context
*ctx
, void *handle
)
673 if (ctx
->vertex_shader
!= handle
) {
674 ctx
->vertex_shader
= handle
;
675 ctx
->pipe
->bind_vs_state(ctx
->pipe
, handle
);
679 void cso_delete_vertex_shader(struct cso_context
*ctx
, void *handle
)
681 if (handle
== ctx
->vertex_shader
) {
682 /* unbind before deleting */
683 ctx
->pipe
->bind_vs_state(ctx
->pipe
, NULL
);
684 ctx
->vertex_shader
= NULL
;
686 ctx
->pipe
->delete_vs_state(ctx
->pipe
, handle
);
690 cso_save_vertex_shader(struct cso_context
*ctx
)
692 assert(!ctx
->vertex_shader_saved
);
693 ctx
->vertex_shader_saved
= ctx
->vertex_shader
;
697 cso_restore_vertex_shader(struct cso_context
*ctx
)
699 if (ctx
->vertex_shader_saved
!= ctx
->vertex_shader
) {
700 ctx
->pipe
->bind_vs_state(ctx
->pipe
, ctx
->vertex_shader_saved
);
701 ctx
->vertex_shader
= ctx
->vertex_shader_saved
;
703 ctx
->vertex_shader_saved
= NULL
;
707 void cso_set_framebuffer(struct cso_context
*ctx
,
708 const struct pipe_framebuffer_state
*fb
)
710 if (memcmp(&ctx
->fb
, fb
, sizeof(*fb
)) != 0) {
711 util_copy_framebuffer_state(&ctx
->fb
, fb
);
712 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, fb
);
717 cso_save_framebuffer(struct cso_context
*ctx
)
719 util_copy_framebuffer_state(&ctx
->fb_saved
, &ctx
->fb
);
723 cso_restore_framebuffer(struct cso_context
*ctx
)
725 if (memcmp(&ctx
->fb
, &ctx
->fb_saved
, sizeof(ctx
->fb
))) {
726 util_copy_framebuffer_state(&ctx
->fb
, &ctx
->fb_saved
);
727 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, &ctx
->fb
);
728 util_unreference_framebuffer_state(&ctx
->fb_saved
);
733 void cso_set_viewport(struct cso_context
*ctx
,
734 const struct pipe_viewport_state
*vp
)
736 if (memcmp(&ctx
->vp
, vp
, sizeof(*vp
))) {
738 ctx
->pipe
->set_viewport_states(ctx
->pipe
, 0, 1, vp
);
743 * Setup viewport state for given width and height (position is always (0,0)).
744 * Invert the Y axis if 'invert' is true.
747 cso_set_viewport_dims(struct cso_context
*ctx
,
748 float width
, float height
, boolean invert
)
750 struct pipe_viewport_state vp
;
751 vp
.scale
[0] = width
* 0.5f
;
752 vp
.scale
[1] = height
* (invert
? -0.5f
: 0.5f
);
754 vp
.translate
[0] = 0.5f
* width
;
755 vp
.translate
[1] = 0.5f
* height
;
756 vp
.translate
[2] = 0.5f
;
757 cso_set_viewport(ctx
, &vp
);
761 cso_save_viewport(struct cso_context
*ctx
)
763 ctx
->vp_saved
= ctx
->vp
;
768 cso_restore_viewport(struct cso_context
*ctx
)
770 if (memcmp(&ctx
->vp
, &ctx
->vp_saved
, sizeof(ctx
->vp
))) {
771 ctx
->vp
= ctx
->vp_saved
;
772 ctx
->pipe
->set_viewport_states(ctx
->pipe
, 0, 1, &ctx
->vp
);
777 void cso_set_blend_color(struct cso_context
*ctx
,
778 const struct pipe_blend_color
*bc
)
780 if (memcmp(&ctx
->blend_color
, bc
, sizeof(ctx
->blend_color
))) {
781 ctx
->blend_color
= *bc
;
782 ctx
->pipe
->set_blend_color(ctx
->pipe
, bc
);
786 void cso_set_sample_mask(struct cso_context
*ctx
, unsigned sample_mask
)
788 if (ctx
->sample_mask
!= sample_mask
) {
789 ctx
->sample_mask
= sample_mask
;
790 ctx
->pipe
->set_sample_mask(ctx
->pipe
, sample_mask
);
795 cso_save_sample_mask(struct cso_context
*ctx
)
797 ctx
->sample_mask_saved
= ctx
->sample_mask
;
801 cso_restore_sample_mask(struct cso_context
*ctx
)
803 cso_set_sample_mask(ctx
, ctx
->sample_mask_saved
);
806 void cso_set_min_samples(struct cso_context
*ctx
, unsigned min_samples
)
808 if (ctx
->min_samples
!= min_samples
&& ctx
->pipe
->set_min_samples
) {
809 ctx
->min_samples
= min_samples
;
810 ctx
->pipe
->set_min_samples(ctx
->pipe
, min_samples
);
815 cso_save_min_samples(struct cso_context
*ctx
)
817 ctx
->min_samples_saved
= ctx
->min_samples
;
821 cso_restore_min_samples(struct cso_context
*ctx
)
823 cso_set_min_samples(ctx
, ctx
->min_samples_saved
);
826 void cso_set_stencil_ref(struct cso_context
*ctx
,
827 const struct pipe_stencil_ref
*sr
)
829 if (memcmp(&ctx
->stencil_ref
, sr
, sizeof(ctx
->stencil_ref
))) {
830 ctx
->stencil_ref
= *sr
;
831 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, sr
);
836 cso_save_stencil_ref(struct cso_context
*ctx
)
838 ctx
->stencil_ref_saved
= ctx
->stencil_ref
;
843 cso_restore_stencil_ref(struct cso_context
*ctx
)
845 if (memcmp(&ctx
->stencil_ref
, &ctx
->stencil_ref_saved
,
846 sizeof(ctx
->stencil_ref
))) {
847 ctx
->stencil_ref
= ctx
->stencil_ref_saved
;
848 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, &ctx
->stencil_ref
);
852 void cso_set_render_condition(struct cso_context
*ctx
,
853 struct pipe_query
*query
,
854 boolean condition
, uint mode
)
856 struct pipe_context
*pipe
= ctx
->pipe
;
858 if (ctx
->render_condition
!= query
||
859 ctx
->render_condition_mode
!= mode
||
860 ctx
->render_condition_cond
!= condition
) {
861 pipe
->render_condition(pipe
, query
, condition
, mode
);
862 ctx
->render_condition
= query
;
863 ctx
->render_condition_cond
= condition
;
864 ctx
->render_condition_mode
= mode
;
869 cso_save_render_condition(struct cso_context
*ctx
)
871 ctx
->render_condition_saved
= ctx
->render_condition
;
872 ctx
->render_condition_cond_saved
= ctx
->render_condition_cond
;
873 ctx
->render_condition_mode_saved
= ctx
->render_condition_mode
;
877 cso_restore_render_condition(struct cso_context
*ctx
)
879 cso_set_render_condition(ctx
, ctx
->render_condition_saved
,
880 ctx
->render_condition_cond_saved
,
881 ctx
->render_condition_mode_saved
);
884 void cso_set_geometry_shader_handle(struct cso_context
*ctx
, void *handle
)
886 assert(ctx
->has_geometry_shader
|| !handle
);
888 if (ctx
->has_geometry_shader
&& ctx
->geometry_shader
!= handle
) {
889 ctx
->geometry_shader
= handle
;
890 ctx
->pipe
->bind_gs_state(ctx
->pipe
, handle
);
894 void cso_delete_geometry_shader(struct cso_context
*ctx
, void *handle
)
896 if (handle
== ctx
->geometry_shader
) {
897 /* unbind before deleting */
898 ctx
->pipe
->bind_gs_state(ctx
->pipe
, NULL
);
899 ctx
->geometry_shader
= NULL
;
901 ctx
->pipe
->delete_gs_state(ctx
->pipe
, handle
);
905 cso_save_geometry_shader(struct cso_context
*ctx
)
907 if (!ctx
->has_geometry_shader
) {
911 assert(!ctx
->geometry_shader_saved
);
912 ctx
->geometry_shader_saved
= ctx
->geometry_shader
;
916 cso_restore_geometry_shader(struct cso_context
*ctx
)
918 if (!ctx
->has_geometry_shader
) {
922 if (ctx
->geometry_shader_saved
!= ctx
->geometry_shader
) {
923 ctx
->pipe
->bind_gs_state(ctx
->pipe
, ctx
->geometry_shader_saved
);
924 ctx
->geometry_shader
= ctx
->geometry_shader_saved
;
926 ctx
->geometry_shader_saved
= NULL
;
929 void cso_set_tessctrl_shader_handle(struct cso_context
*ctx
, void *handle
)
931 assert(ctx
->has_tessellation
|| !handle
);
933 if (ctx
->has_tessellation
&& ctx
->tessctrl_shader
!= handle
) {
934 ctx
->tessctrl_shader
= handle
;
935 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, handle
);
939 void cso_delete_tessctrl_shader(struct cso_context
*ctx
, void *handle
)
941 if (handle
== ctx
->tessctrl_shader
) {
942 /* unbind before deleting */
943 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, NULL
);
944 ctx
->tessctrl_shader
= NULL
;
946 ctx
->pipe
->delete_tcs_state(ctx
->pipe
, handle
);
950 cso_save_tessctrl_shader(struct cso_context
*ctx
)
952 if (!ctx
->has_tessellation
) {
956 assert(!ctx
->tessctrl_shader_saved
);
957 ctx
->tessctrl_shader_saved
= ctx
->tessctrl_shader
;
961 cso_restore_tessctrl_shader(struct cso_context
*ctx
)
963 if (!ctx
->has_tessellation
) {
967 if (ctx
->tessctrl_shader_saved
!= ctx
->tessctrl_shader
) {
968 ctx
->pipe
->bind_tcs_state(ctx
->pipe
, ctx
->tessctrl_shader_saved
);
969 ctx
->tessctrl_shader
= ctx
->tessctrl_shader_saved
;
971 ctx
->tessctrl_shader_saved
= NULL
;
974 void cso_set_tesseval_shader_handle(struct cso_context
*ctx
, void *handle
)
976 assert(ctx
->has_tessellation
|| !handle
);
978 if (ctx
->has_tessellation
&& ctx
->tesseval_shader
!= handle
) {
979 ctx
->tesseval_shader
= handle
;
980 ctx
->pipe
->bind_tes_state(ctx
->pipe
, handle
);
984 void cso_delete_tesseval_shader(struct cso_context
*ctx
, void *handle
)
986 if (handle
== ctx
->tesseval_shader
) {
987 /* unbind before deleting */
988 ctx
->pipe
->bind_tes_state(ctx
->pipe
, NULL
);
989 ctx
->tesseval_shader
= NULL
;
991 ctx
->pipe
->delete_tes_state(ctx
->pipe
, handle
);
995 cso_save_tesseval_shader(struct cso_context
*ctx
)
997 if (!ctx
->has_tessellation
) {
1001 assert(!ctx
->tesseval_shader_saved
);
1002 ctx
->tesseval_shader_saved
= ctx
->tesseval_shader
;
1006 cso_restore_tesseval_shader(struct cso_context
*ctx
)
1008 if (!ctx
->has_tessellation
) {
1012 if (ctx
->tesseval_shader_saved
!= ctx
->tesseval_shader
) {
1013 ctx
->pipe
->bind_tes_state(ctx
->pipe
, ctx
->tesseval_shader_saved
);
1014 ctx
->tesseval_shader
= ctx
->tesseval_shader_saved
;
1016 ctx
->tesseval_shader_saved
= NULL
;
1019 void cso_set_compute_shader_handle(struct cso_context
*ctx
, void *handle
)
1021 assert(ctx
->has_compute_shader
|| !handle
);
1023 if (ctx
->has_compute_shader
&& ctx
->compute_shader
!= handle
) {
1024 ctx
->compute_shader
= handle
;
1025 ctx
->pipe
->bind_compute_state(ctx
->pipe
, handle
);
1029 void cso_delete_compute_shader(struct cso_context
*ctx
, void *handle
)
1031 if (handle
== ctx
->compute_shader
) {
1032 /* unbind before deleting */
1033 ctx
->pipe
->bind_compute_state(ctx
->pipe
, NULL
);
1034 ctx
->compute_shader
= NULL
;
1036 ctx
->pipe
->delete_compute_state(ctx
->pipe
, handle
);
1040 cso_set_vertex_elements(struct cso_context
*ctx
,
1042 const struct pipe_vertex_element
*states
)
1044 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1045 unsigned key_size
, hash_key
;
1046 struct cso_hash_iter iter
;
1048 struct cso_velems_state velems_state
;
1051 u_vbuf_set_vertex_elements(vbuf
, count
, states
);
1055 /* Need to include the count into the stored state data too.
1056 * Otherwise first few count pipe_vertex_elements could be identical
1057 * even if count is different, and there's no guarantee the hash would
1058 * be different in that case neither.
1060 key_size
= sizeof(struct pipe_vertex_element
) * count
+ sizeof(unsigned);
1061 velems_state
.count
= count
;
1062 memcpy(velems_state
.velems
, states
,
1063 sizeof(struct pipe_vertex_element
) * count
);
1064 hash_key
= cso_construct_key((void*)&velems_state
, key_size
);
1065 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_VELEMENTS
,
1066 (void*)&velems_state
, key_size
);
1068 if (cso_hash_iter_is_null(iter
)) {
1069 struct cso_velements
*cso
= MALLOC(sizeof(struct cso_velements
));
1071 return PIPE_ERROR_OUT_OF_MEMORY
;
1073 memcpy(&cso
->state
, &velems_state
, key_size
);
1074 cso
->data
= ctx
->pipe
->create_vertex_elements_state(ctx
->pipe
, count
,
1075 &cso
->state
.velems
[0]);
1077 (cso_state_callback
) ctx
->pipe
->delete_vertex_elements_state
;
1078 cso
->context
= ctx
->pipe
;
1080 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_VELEMENTS
, cso
);
1081 if (cso_hash_iter_is_null(iter
)) {
1083 return PIPE_ERROR_OUT_OF_MEMORY
;
1089 handle
= ((struct cso_velements
*)cso_hash_iter_data(iter
))->data
;
1092 if (ctx
->velements
!= handle
) {
1093 ctx
->velements
= handle
;
1094 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, handle
);
1100 cso_save_vertex_elements(struct cso_context
*ctx
)
1102 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1105 u_vbuf_save_vertex_elements(vbuf
);
1109 assert(!ctx
->velements_saved
);
1110 ctx
->velements_saved
= ctx
->velements
;
1114 cso_restore_vertex_elements(struct cso_context
*ctx
)
1116 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1119 u_vbuf_restore_vertex_elements(vbuf
);
1123 if (ctx
->velements
!= ctx
->velements_saved
) {
1124 ctx
->velements
= ctx
->velements_saved
;
1125 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, ctx
->velements_saved
);
1127 ctx
->velements_saved
= NULL
;
1130 /* vertex buffers */
1132 void cso_set_vertex_buffers(struct cso_context
*ctx
,
1133 unsigned start_slot
, unsigned count
,
1134 const struct pipe_vertex_buffer
*buffers
)
1136 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1139 u_vbuf_set_vertex_buffers(vbuf
, start_slot
, count
, buffers
);
1143 /* Save what's in the auxiliary slot, so that we can save and restore it
1145 if (start_slot
<= ctx
->aux_vertex_buffer_index
&&
1146 start_slot
+count
> ctx
->aux_vertex_buffer_index
) {
1148 const struct pipe_vertex_buffer
*vb
=
1149 buffers
+ (ctx
->aux_vertex_buffer_index
- start_slot
);
1151 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
,
1153 memcpy(&ctx
->aux_vertex_buffer_current
, vb
,
1154 sizeof(struct pipe_vertex_buffer
));
1157 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
,
1159 ctx
->aux_vertex_buffer_current
.user_buffer
= NULL
;
1163 ctx
->pipe
->set_vertex_buffers(ctx
->pipe
, start_slot
, count
, buffers
);
1167 cso_save_aux_vertex_buffer_slot(struct cso_context
*ctx
)
1169 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1172 u_vbuf_save_aux_vertex_buffer_slot(vbuf
);
1176 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
,
1177 ctx
->aux_vertex_buffer_current
.buffer
);
1178 memcpy(&ctx
->aux_vertex_buffer_saved
, &ctx
->aux_vertex_buffer_current
,
1179 sizeof(struct pipe_vertex_buffer
));
1183 cso_restore_aux_vertex_buffer_slot(struct cso_context
*ctx
)
1185 struct u_vbuf
*vbuf
= ctx
->vbuf
;
1188 u_vbuf_restore_aux_vertex_buffer_slot(vbuf
);
1192 cso_set_vertex_buffers(ctx
, ctx
->aux_vertex_buffer_index
, 1,
1193 &ctx
->aux_vertex_buffer_saved
);
1194 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
, NULL
);
1197 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context
*ctx
)
1199 return ctx
->aux_vertex_buffer_index
;
1205 cso_single_sampler(struct cso_context
*ctx
, unsigned shader_stage
,
1206 unsigned idx
, const struct pipe_sampler_state
*templ
)
1209 unsigned key_size
= sizeof(struct pipe_sampler_state
);
1210 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
1211 struct cso_sampler
*cso
;
1212 struct cso_hash_iter iter
=
1213 cso_find_state_template(ctx
->cache
,
1214 hash_key
, CSO_SAMPLER
,
1215 (void *) templ
, key_size
);
1217 if (cso_hash_iter_is_null(iter
)) {
1218 cso
= MALLOC(sizeof(struct cso_sampler
));
1220 return PIPE_ERROR_OUT_OF_MEMORY
;
1222 memcpy(&cso
->state
, templ
, sizeof(*templ
));
1223 cso
->data
= ctx
->pipe
->create_sampler_state(ctx
->pipe
, &cso
->state
);
1225 (cso_state_callback
) ctx
->pipe
->delete_sampler_state
;
1226 cso
->context
= ctx
->pipe
;
1227 cso
->hash_key
= hash_key
;
1229 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_SAMPLER
, cso
);
1230 if (cso_hash_iter_is_null(iter
)) {
1232 return PIPE_ERROR_OUT_OF_MEMORY
;
1236 cso
= cso_hash_iter_data(iter
);
1239 ctx
->samplers
[shader_stage
].cso_samplers
[idx
] = cso
;
1240 ctx
->samplers
[shader_stage
].samplers
[idx
] = cso
->data
;
1242 ctx
->samplers
[shader_stage
].cso_samplers
[idx
] = NULL
;
1243 ctx
->samplers
[shader_stage
].samplers
[idx
] = NULL
;
1251 * Send staged sampler state to the driver.
1254 cso_single_sampler_done(struct cso_context
*ctx
,
1255 enum pipe_shader_type shader_stage
)
1257 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1258 const unsigned old_nr_samplers
= info
->nr_samplers
;
1261 /* find highest non-null sampler */
1262 for (i
= PIPE_MAX_SAMPLERS
; i
> 0; i
--) {
1263 if (info
->samplers
[i
- 1] != NULL
)
1267 info
->nr_samplers
= i
;
1268 ctx
->pipe
->bind_sampler_states(ctx
->pipe
, shader_stage
, 0,
1269 MAX2(old_nr_samplers
, info
->nr_samplers
),
1275 * If the function encouters any errors it will return the
1276 * last one. Done to always try to set as many samplers
1280 cso_set_samplers(struct cso_context
*ctx
,
1281 enum pipe_shader_type shader_stage
,
1283 const struct pipe_sampler_state
**templates
)
1285 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1287 enum pipe_error temp
, error
= PIPE_OK
;
1289 for (i
= 0; i
< nr
; i
++) {
1290 temp
= cso_single_sampler(ctx
, shader_stage
, i
, templates
[i
]);
1291 if (temp
!= PIPE_OK
)
1295 for ( ; i
< info
->nr_samplers
; i
++) {
1296 temp
= cso_single_sampler(ctx
, shader_stage
, i
, NULL
);
1297 if (temp
!= PIPE_OK
)
1301 cso_single_sampler_done(ctx
, shader_stage
);
1307 cso_save_fragment_samplers(struct cso_context
*ctx
)
1309 struct sampler_info
*info
= &ctx
->samplers
[PIPE_SHADER_FRAGMENT
];
1310 struct sampler_info
*saved
= &ctx
->fragment_samplers_saved
;
1312 saved
->nr_samplers
= info
->nr_samplers
;
1313 memcpy(saved
->cso_samplers
, info
->cso_samplers
, info
->nr_samplers
*
1314 sizeof(*info
->cso_samplers
));
1315 memcpy(saved
->samplers
, info
->samplers
, info
->nr_samplers
*
1316 sizeof(*info
->samplers
));
1321 cso_restore_fragment_samplers(struct cso_context
*ctx
)
1323 struct sampler_info
*info
= &ctx
->samplers
[PIPE_SHADER_FRAGMENT
];
1324 struct sampler_info
*saved
= &ctx
->fragment_samplers_saved
;
1325 int delta
= (int)info
->nr_samplers
- saved
->nr_samplers
;
1327 memcpy(info
->cso_samplers
, saved
->cso_samplers
,
1328 saved
->nr_samplers
* sizeof(*info
->cso_samplers
));
1329 memcpy(info
->samplers
, saved
->samplers
,
1330 saved
->nr_samplers
* sizeof(*info
->samplers
));
1333 memset(&info
->cso_samplers
[saved
->nr_samplers
], 0,
1334 delta
* sizeof(*info
->cso_samplers
));
1335 memset(&info
->samplers
[saved
->nr_samplers
], 0,
1336 delta
* sizeof(*info
->samplers
));
1339 cso_single_sampler_done(ctx
, PIPE_SHADER_FRAGMENT
);
1344 cso_set_sampler_views(struct cso_context
*ctx
,
1345 enum pipe_shader_type shader_stage
,
1347 struct pipe_sampler_view
**views
)
1349 if (shader_stage
== PIPE_SHADER_FRAGMENT
) {
1351 boolean any_change
= FALSE
;
1353 /* reference new views */
1354 for (i
= 0; i
< count
; i
++) {
1355 any_change
|= ctx
->fragment_views
[i
] != views
[i
];
1356 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], views
[i
]);
1358 /* unref extra old views, if any */
1359 for (; i
< ctx
->nr_fragment_views
; i
++) {
1360 any_change
|= ctx
->fragment_views
[i
] != NULL
;
1361 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1364 /* bind the new sampler views */
1366 ctx
->pipe
->set_sampler_views(ctx
->pipe
, shader_stage
, 0,
1367 MAX2(ctx
->nr_fragment_views
, count
),
1368 ctx
->fragment_views
);
1371 ctx
->nr_fragment_views
= count
;
1374 ctx
->pipe
->set_sampler_views(ctx
->pipe
, shader_stage
, 0, count
, views
);
1379 cso_save_fragment_sampler_views(struct cso_context
*ctx
)
1383 ctx
->nr_fragment_views_saved
= ctx
->nr_fragment_views
;
1385 for (i
= 0; i
< ctx
->nr_fragment_views
; i
++) {
1386 assert(!ctx
->fragment_views_saved
[i
]);
1387 pipe_sampler_view_reference(&ctx
->fragment_views_saved
[i
],
1388 ctx
->fragment_views
[i
]);
1394 cso_restore_fragment_sampler_views(struct cso_context
*ctx
)
1396 unsigned i
, nr_saved
= ctx
->nr_fragment_views_saved
;
1399 for (i
= 0; i
< nr_saved
; i
++) {
1400 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1401 /* move the reference from one pointer to another */
1402 ctx
->fragment_views
[i
] = ctx
->fragment_views_saved
[i
];
1403 ctx
->fragment_views_saved
[i
] = NULL
;
1405 for (; i
< ctx
->nr_fragment_views
; i
++) {
1406 pipe_sampler_view_reference(&ctx
->fragment_views
[i
], NULL
);
1409 num
= MAX2(ctx
->nr_fragment_views
, nr_saved
);
1411 /* bind the old/saved sampler views */
1412 ctx
->pipe
->set_sampler_views(ctx
->pipe
, PIPE_SHADER_FRAGMENT
, 0, num
,
1413 ctx
->fragment_views
);
1415 ctx
->nr_fragment_views
= nr_saved
;
1416 ctx
->nr_fragment_views_saved
= 0;
1421 cso_set_shader_images(struct cso_context
*ctx
,
1422 enum pipe_shader_type shader_stage
,
1423 unsigned start
, unsigned count
,
1424 struct pipe_image_view
*images
)
1426 if (shader_stage
== PIPE_SHADER_FRAGMENT
&& start
== 0 && count
>= 1) {
1427 util_copy_image_view(&ctx
->fragment_image0_current
, &images
[0]);
1430 ctx
->pipe
->set_shader_images(ctx
->pipe
, shader_stage
, start
, count
, images
);
1435 cso_save_fragment_image0(struct cso_context
*ctx
)
1437 util_copy_image_view(&ctx
->fragment_image0_saved
,
1438 &ctx
->fragment_image0_current
);
1443 cso_restore_fragment_image0(struct cso_context
*ctx
)
1445 cso_set_shader_images(ctx
, PIPE_SHADER_FRAGMENT
, 0, 1,
1446 &ctx
->fragment_image0_saved
);
1451 cso_set_stream_outputs(struct cso_context
*ctx
,
1452 unsigned num_targets
,
1453 struct pipe_stream_output_target
**targets
,
1454 const unsigned *offsets
)
1456 struct pipe_context
*pipe
= ctx
->pipe
;
1459 if (!ctx
->has_streamout
) {
1460 assert(num_targets
== 0);
1464 if (ctx
->nr_so_targets
== 0 && num_targets
== 0) {
1465 /* Nothing to do. */
1469 /* reference new targets */
1470 for (i
= 0; i
< num_targets
; i
++) {
1471 pipe_so_target_reference(&ctx
->so_targets
[i
], targets
[i
]);
1473 /* unref extra old targets, if any */
1474 for (; i
< ctx
->nr_so_targets
; i
++) {
1475 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1478 pipe
->set_stream_output_targets(pipe
, num_targets
, targets
,
1480 ctx
->nr_so_targets
= num_targets
;
1484 cso_save_stream_outputs(struct cso_context
*ctx
)
1488 if (!ctx
->has_streamout
) {
1492 ctx
->nr_so_targets_saved
= ctx
->nr_so_targets
;
1494 for (i
= 0; i
< ctx
->nr_so_targets
; i
++) {
1495 assert(!ctx
->so_targets_saved
[i
]);
1496 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], ctx
->so_targets
[i
]);
1501 cso_restore_stream_outputs(struct cso_context
*ctx
)
1503 struct pipe_context
*pipe
= ctx
->pipe
;
1505 unsigned offset
[PIPE_MAX_SO_BUFFERS
];
1507 if (!ctx
->has_streamout
) {
1511 if (ctx
->nr_so_targets
== 0 && ctx
->nr_so_targets_saved
== 0) {
1512 /* Nothing to do. */
1516 assert(ctx
->nr_so_targets_saved
<= PIPE_MAX_SO_BUFFERS
);
1517 for (i
= 0; i
< ctx
->nr_so_targets_saved
; i
++) {
1518 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1519 /* move the reference from one pointer to another */
1520 ctx
->so_targets
[i
] = ctx
->so_targets_saved
[i
];
1521 ctx
->so_targets_saved
[i
] = NULL
;
1522 /* -1 means append */
1523 offset
[i
] = (unsigned)-1;
1525 for (; i
< ctx
->nr_so_targets
; i
++) {
1526 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1529 pipe
->set_stream_output_targets(pipe
, ctx
->nr_so_targets_saved
,
1530 ctx
->so_targets
, offset
);
1532 ctx
->nr_so_targets
= ctx
->nr_so_targets_saved
;
1533 ctx
->nr_so_targets_saved
= 0;
1536 /* constant buffers */
1539 cso_set_constant_buffer(struct cso_context
*cso
, unsigned shader_stage
,
1540 unsigned index
, struct pipe_constant_buffer
*cb
)
1542 struct pipe_context
*pipe
= cso
->pipe
;
1544 pipe
->set_constant_buffer(pipe
, shader_stage
, index
, cb
);
1547 util_copy_constant_buffer(&cso
->aux_constbuf_current
[shader_stage
], cb
);
1552 cso_set_constant_buffer_resource(struct cso_context
*cso
,
1553 unsigned shader_stage
,
1555 struct pipe_resource
*buffer
)
1558 struct pipe_constant_buffer cb
;
1560 cb
.buffer_offset
= 0;
1561 cb
.buffer_size
= buffer
->width0
;
1562 cb
.user_buffer
= NULL
;
1563 cso_set_constant_buffer(cso
, shader_stage
, index
, &cb
);
1565 cso_set_constant_buffer(cso
, shader_stage
, index
, NULL
);
1570 cso_save_constant_buffer_slot0(struct cso_context
*cso
,
1571 unsigned shader_stage
)
1573 util_copy_constant_buffer(&cso
->aux_constbuf_saved
[shader_stage
],
1574 &cso
->aux_constbuf_current
[shader_stage
]);
1578 cso_restore_constant_buffer_slot0(struct cso_context
*cso
,
1579 unsigned shader_stage
)
1581 cso_set_constant_buffer(cso
, shader_stage
, 0,
1582 &cso
->aux_constbuf_saved
[shader_stage
]);
1583 pipe_resource_reference(&cso
->aux_constbuf_saved
[shader_stage
].buffer
,
1589 * Save all the CSO state items specified by the state_mask bitmask
1590 * of CSO_BIT_x flags.
1593 cso_save_state(struct cso_context
*cso
, unsigned state_mask
)
1595 assert(cso
->saved_state
== 0);
1597 cso
->saved_state
= state_mask
;
1599 if (state_mask
& CSO_BIT_AUX_VERTEX_BUFFER_SLOT
)
1600 cso_save_aux_vertex_buffer_slot(cso
);
1601 if (state_mask
& CSO_BIT_BLEND
)
1602 cso_save_blend(cso
);
1603 if (state_mask
& CSO_BIT_DEPTH_STENCIL_ALPHA
)
1604 cso_save_depth_stencil_alpha(cso
);
1605 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLERS
)
1606 cso_save_fragment_samplers(cso
);
1607 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLER_VIEWS
)
1608 cso_save_fragment_sampler_views(cso
);
1609 if (state_mask
& CSO_BIT_FRAGMENT_SHADER
)
1610 cso_save_fragment_shader(cso
);
1611 if (state_mask
& CSO_BIT_FRAMEBUFFER
)
1612 cso_save_framebuffer(cso
);
1613 if (state_mask
& CSO_BIT_GEOMETRY_SHADER
)
1614 cso_save_geometry_shader(cso
);
1615 if (state_mask
& CSO_BIT_MIN_SAMPLES
)
1616 cso_save_min_samples(cso
);
1617 if (state_mask
& CSO_BIT_RASTERIZER
)
1618 cso_save_rasterizer(cso
);
1619 if (state_mask
& CSO_BIT_RENDER_CONDITION
)
1620 cso_save_render_condition(cso
);
1621 if (state_mask
& CSO_BIT_SAMPLE_MASK
)
1622 cso_save_sample_mask(cso
);
1623 if (state_mask
& CSO_BIT_STENCIL_REF
)
1624 cso_save_stencil_ref(cso
);
1625 if (state_mask
& CSO_BIT_STREAM_OUTPUTS
)
1626 cso_save_stream_outputs(cso
);
1627 if (state_mask
& CSO_BIT_TESSCTRL_SHADER
)
1628 cso_save_tessctrl_shader(cso
);
1629 if (state_mask
& CSO_BIT_TESSEVAL_SHADER
)
1630 cso_save_tesseval_shader(cso
);
1631 if (state_mask
& CSO_BIT_VERTEX_ELEMENTS
)
1632 cso_save_vertex_elements(cso
);
1633 if (state_mask
& CSO_BIT_VERTEX_SHADER
)
1634 cso_save_vertex_shader(cso
);
1635 if (state_mask
& CSO_BIT_VIEWPORT
)
1636 cso_save_viewport(cso
);
1637 if (state_mask
& CSO_BIT_PAUSE_QUERIES
)
1638 cso
->pipe
->set_active_query_state(cso
->pipe
, false);
1639 if (state_mask
& CSO_BIT_FRAGMENT_IMAGE0
)
1640 cso_save_fragment_image0(cso
);
1645 * Restore the state which was saved by cso_save_state().
1648 cso_restore_state(struct cso_context
*cso
)
1650 unsigned state_mask
= cso
->saved_state
;
1654 if (state_mask
& CSO_BIT_AUX_VERTEX_BUFFER_SLOT
)
1655 cso_restore_aux_vertex_buffer_slot(cso
);
1656 if (state_mask
& CSO_BIT_BLEND
)
1657 cso_restore_blend(cso
);
1658 if (state_mask
& CSO_BIT_DEPTH_STENCIL_ALPHA
)
1659 cso_restore_depth_stencil_alpha(cso
);
1660 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLERS
)
1661 cso_restore_fragment_samplers(cso
);
1662 if (state_mask
& CSO_BIT_FRAGMENT_SAMPLER_VIEWS
)
1663 cso_restore_fragment_sampler_views(cso
);
1664 if (state_mask
& CSO_BIT_FRAGMENT_SHADER
)
1665 cso_restore_fragment_shader(cso
);
1666 if (state_mask
& CSO_BIT_FRAMEBUFFER
)
1667 cso_restore_framebuffer(cso
);
1668 if (state_mask
& CSO_BIT_GEOMETRY_SHADER
)
1669 cso_restore_geometry_shader(cso
);
1670 if (state_mask
& CSO_BIT_MIN_SAMPLES
)
1671 cso_restore_min_samples(cso
);
1672 if (state_mask
& CSO_BIT_RASTERIZER
)
1673 cso_restore_rasterizer(cso
);
1674 if (state_mask
& CSO_BIT_RENDER_CONDITION
)
1675 cso_restore_render_condition(cso
);
1676 if (state_mask
& CSO_BIT_SAMPLE_MASK
)
1677 cso_restore_sample_mask(cso
);
1678 if (state_mask
& CSO_BIT_STENCIL_REF
)
1679 cso_restore_stencil_ref(cso
);
1680 if (state_mask
& CSO_BIT_STREAM_OUTPUTS
)
1681 cso_restore_stream_outputs(cso
);
1682 if (state_mask
& CSO_BIT_TESSCTRL_SHADER
)
1683 cso_restore_tessctrl_shader(cso
);
1684 if (state_mask
& CSO_BIT_TESSEVAL_SHADER
)
1685 cso_restore_tesseval_shader(cso
);
1686 if (state_mask
& CSO_BIT_VERTEX_ELEMENTS
)
1687 cso_restore_vertex_elements(cso
);
1688 if (state_mask
& CSO_BIT_VERTEX_SHADER
)
1689 cso_restore_vertex_shader(cso
);
1690 if (state_mask
& CSO_BIT_VIEWPORT
)
1691 cso_restore_viewport(cso
);
1692 if (state_mask
& CSO_BIT_PAUSE_QUERIES
)
1693 cso
->pipe
->set_active_query_state(cso
->pipe
, true);
1694 if (state_mask
& CSO_BIT_FRAGMENT_IMAGE0
)
1695 cso_restore_fragment_image0(cso
);
1697 cso
->saved_state
= 0;
1705 cso_set_index_buffer(struct cso_context
*cso
,
1706 const struct pipe_index_buffer
*ib
)
1708 struct u_vbuf
*vbuf
= cso
->vbuf
;
1711 u_vbuf_set_index_buffer(vbuf
, ib
);
1713 struct pipe_context
*pipe
= cso
->pipe
;
1714 pipe
->set_index_buffer(pipe
, ib
);
1719 cso_draw_vbo(struct cso_context
*cso
,
1720 const struct pipe_draw_info
*info
)
1722 struct u_vbuf
*vbuf
= cso
->vbuf
;
1725 u_vbuf_draw_vbo(vbuf
, info
);
1727 struct pipe_context
*pipe
= cso
->pipe
;
1728 pipe
->draw_vbo(pipe
, info
);
1733 cso_draw_arrays(struct cso_context
*cso
, uint mode
, uint start
, uint count
)
1735 struct pipe_draw_info info
;
1737 util_draw_init_info(&info
);
1742 info
.min_index
= start
;
1743 info
.max_index
= start
+ count
- 1;
1745 cso_draw_vbo(cso
, &info
);
1749 cso_draw_arrays_instanced(struct cso_context
*cso
, uint mode
,
1750 uint start
, uint count
,
1751 uint start_instance
, uint instance_count
)
1753 struct pipe_draw_info info
;
1755 util_draw_init_info(&info
);
1760 info
.min_index
= start
;
1761 info
.max_index
= start
+ count
- 1;
1762 info
.start_instance
= start_instance
;
1763 info
.instance_count
= instance_count
;
1765 cso_draw_vbo(cso
, &info
);