1 /**************************************************************************
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
60 void *samplers
[PIPE_MAX_SAMPLERS
];
64 void *samplers
[PIPE_MAX_SAMPLERS
];
67 void *samplers_saved
[PIPE_MAX_SAMPLERS
];
68 unsigned nr_samplers_saved
;
70 struct pipe_sampler_view
*views
[PIPE_MAX_SAMPLERS
];
73 struct pipe_sampler_view
*views_saved
[PIPE_MAX_SAMPLERS
];
74 unsigned nr_views_saved
;
80 struct pipe_context
*pipe
;
81 struct cso_cache
*cache
;
84 boolean has_geometry_shader
;
85 boolean has_streamout
;
87 struct sampler_info samplers
[PIPE_SHADER_TYPES
];
89 struct pipe_vertex_buffer aux_vertex_buffer_current
;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved
;
91 unsigned aux_vertex_buffer_index
;
93 unsigned nr_so_targets
;
94 struct pipe_stream_output_target
*so_targets
[PIPE_MAX_SO_BUFFERS
];
96 unsigned nr_so_targets_saved
;
97 struct pipe_stream_output_target
*so_targets_saved
[PIPE_MAX_SO_BUFFERS
];
99 /** Current and saved state.
100 * The saved state is used as a 1-deep stack.
102 void *blend
, *blend_saved
;
103 void *depth_stencil
, *depth_stencil_saved
;
104 void *rasterizer
, *rasterizer_saved
;
105 void *fragment_shader
, *fragment_shader_saved
;
106 void *vertex_shader
, *vertex_shader_saved
;
107 void *geometry_shader
, *geometry_shader_saved
;
108 void *velements
, *velements_saved
;
110 struct pipe_clip_state clip
;
111 struct pipe_clip_state clip_saved
;
113 struct pipe_framebuffer_state fb
, fb_saved
;
114 struct pipe_viewport_state vp
, vp_saved
;
115 struct pipe_blend_color blend_color
;
116 unsigned sample_mask
, sample_mask_saved
;
117 struct pipe_stencil_ref stencil_ref
, stencil_ref_saved
;
121 static boolean
delete_blend_state(struct cso_context
*ctx
, void *state
)
123 struct cso_blend
*cso
= (struct cso_blend
*)state
;
125 if (ctx
->blend
== cso
->data
)
128 if (cso
->delete_state
)
129 cso
->delete_state(cso
->context
, cso
->data
);
134 static boolean
delete_depth_stencil_state(struct cso_context
*ctx
, void *state
)
136 struct cso_depth_stencil_alpha
*cso
=
137 (struct cso_depth_stencil_alpha
*)state
;
139 if (ctx
->depth_stencil
== cso
->data
)
142 if (cso
->delete_state
)
143 cso
->delete_state(cso
->context
, cso
->data
);
149 static boolean
delete_sampler_state(struct cso_context
*ctx
, void *state
)
151 struct cso_sampler
*cso
= (struct cso_sampler
*)state
;
152 if (cso
->delete_state
)
153 cso
->delete_state(cso
->context
, cso
->data
);
158 static boolean
delete_rasterizer_state(struct cso_context
*ctx
, void *state
)
160 struct cso_rasterizer
*cso
= (struct cso_rasterizer
*)state
;
162 if (ctx
->rasterizer
== cso
->data
)
164 if (cso
->delete_state
)
165 cso
->delete_state(cso
->context
, cso
->data
);
170 static boolean
delete_vertex_elements(struct cso_context
*ctx
,
173 struct cso_velements
*cso
= (struct cso_velements
*)state
;
175 if (ctx
->velements
== cso
->data
)
178 if (cso
->delete_state
)
179 cso
->delete_state(cso
->context
, cso
->data
);
185 static INLINE boolean
delete_cso(struct cso_context
*ctx
,
186 void *state
, enum cso_cache_type type
)
190 return delete_blend_state(ctx
, state
);
192 return delete_sampler_state(ctx
, state
);
193 case CSO_DEPTH_STENCIL_ALPHA
:
194 return delete_depth_stencil_state(ctx
, state
);
196 return delete_rasterizer_state(ctx
, state
);
198 return delete_vertex_elements(ctx
, state
);
207 sanitize_hash(struct cso_hash
*hash
, enum cso_cache_type type
,
208 int max_size
, void *user_data
)
210 struct cso_context
*ctx
= (struct cso_context
*)user_data
;
211 /* if we're approach the maximum size, remove fourth of the entries
212 * otherwise every subsequent call will go through the same */
213 int hash_size
= cso_hash_size(hash
);
214 int max_entries
= (max_size
> hash_size
) ? max_size
: hash_size
;
215 int to_remove
= (max_size
< max_entries
) * max_entries
/4;
216 struct cso_hash_iter iter
= cso_hash_first_node(hash
);
217 if (hash_size
> max_size
)
218 to_remove
+= hash_size
- max_size
;
220 /*remove elements until we're good */
221 /*fixme: currently we pick the nodes to remove at random*/
222 void *cso
= cso_hash_iter_data(iter
);
223 if (delete_cso(ctx
, cso
, type
)) {
224 iter
= cso_hash_erase(hash
, iter
);
227 iter
= cso_hash_iter_next(iter
);
231 static void cso_init_vbuf(struct cso_context
*cso
)
233 struct u_vbuf_caps caps
;
235 u_vbuf_get_caps(cso
->pipe
->screen
, &caps
);
237 /* Install u_vbuf if there is anything unsupported. */
238 if (!caps
.buffer_offset_unaligned
||
239 !caps
.buffer_stride_unaligned
||
240 !caps
.velem_src_offset_unaligned
||
241 !caps
.format_fixed32
||
242 !caps
.format_float16
||
243 !caps
.format_float64
||
244 !caps
.format_norm32
||
245 !caps
.format_scaled32
||
246 !caps
.user_vertex_buffers
) {
247 cso
->vbuf
= u_vbuf_create(cso
->pipe
, &caps
,
248 cso
->aux_vertex_buffer_index
);
252 struct cso_context
*cso_create_context( struct pipe_context
*pipe
)
254 struct cso_context
*ctx
= CALLOC_STRUCT(cso_context
);
258 ctx
->cache
= cso_cache_create();
259 if (ctx
->cache
== NULL
)
261 cso_cache_set_sanitize_callback(ctx
->cache
,
266 ctx
->sample_mask_saved
= ~0;
268 ctx
->aux_vertex_buffer_index
= 0; /* 0 for now */
272 /* Enable for testing: */
273 if (0) cso_set_maximum_cache_size( ctx
->cache
, 4 );
275 if (pipe
->screen
->get_shader_param(pipe
->screen
, PIPE_SHADER_GEOMETRY
,
276 PIPE_SHADER_CAP_MAX_INSTRUCTIONS
) > 0) {
277 ctx
->has_geometry_shader
= TRUE
;
279 if (pipe
->screen
->get_param(pipe
->screen
,
280 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS
) != 0) {
281 ctx
->has_streamout
= TRUE
;
287 cso_destroy_context( ctx
);
292 * Prior to context destruction, this function unbinds all state objects.
294 void cso_release_all( struct cso_context
*ctx
)
299 ctx
->pipe
->bind_blend_state( ctx
->pipe
, NULL
);
300 ctx
->pipe
->bind_rasterizer_state( ctx
->pipe
, NULL
);
301 ctx
->pipe
->bind_fragment_sampler_states( ctx
->pipe
, 0, NULL
);
302 if (ctx
->pipe
->bind_vertex_sampler_states
)
303 ctx
->pipe
->bind_vertex_sampler_states(ctx
->pipe
, 0, NULL
);
304 ctx
->pipe
->bind_depth_stencil_alpha_state( ctx
->pipe
, NULL
);
305 ctx
->pipe
->bind_fs_state( ctx
->pipe
, NULL
);
306 ctx
->pipe
->bind_vs_state( ctx
->pipe
, NULL
);
307 ctx
->pipe
->bind_vertex_elements_state( ctx
->pipe
, NULL
);
308 ctx
->pipe
->set_fragment_sampler_views(ctx
->pipe
, 0, NULL
);
309 if (ctx
->pipe
->set_vertex_sampler_views
)
310 ctx
->pipe
->set_vertex_sampler_views(ctx
->pipe
, 0, NULL
);
311 if (ctx
->pipe
->set_stream_output_targets
)
312 ctx
->pipe
->set_stream_output_targets(ctx
->pipe
, 0, NULL
, 0);
315 /* free fragment samplers, views */
316 for (shader
= 0; shader
< Elements(ctx
->samplers
); shader
++) {
317 struct sampler_info
*info
= &ctx
->samplers
[shader
];
318 for (i
= 0; i
< PIPE_MAX_SAMPLERS
; i
++) {
319 pipe_sampler_view_reference(&info
->views
[i
], NULL
);
320 pipe_sampler_view_reference(&info
->views_saved
[i
], NULL
);
324 util_unreference_framebuffer_state(&ctx
->fb
);
325 util_unreference_framebuffer_state(&ctx
->fb_saved
);
327 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
, NULL
);
328 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
, NULL
);
330 for (i
= 0; i
< PIPE_MAX_SO_BUFFERS
; i
++) {
331 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
332 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], NULL
);
336 cso_cache_delete( ctx
->cache
);
343 * Free the CSO context. NOTE: the state tracker should have previously called
346 void cso_destroy_context( struct cso_context
*ctx
)
350 u_vbuf_destroy(ctx
->vbuf
);
356 /* Those function will either find the state of the given template
357 * in the cache or they will create a new state from the given
358 * template, insert it in the cache and return it.
362 * If the driver returns 0 from the create method then they will assign
363 * the data member of the cso to be the template itself.
366 enum pipe_error
cso_set_blend(struct cso_context
*ctx
,
367 const struct pipe_blend_state
*templ
)
369 unsigned key_size
, hash_key
;
370 struct cso_hash_iter iter
;
373 key_size
= templ
->independent_blend_enable
?
374 sizeof(struct pipe_blend_state
) :
375 (char *)&(templ
->rt
[1]) - (char *)templ
;
376 hash_key
= cso_construct_key((void*)templ
, key_size
);
377 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_BLEND
,
378 (void*)templ
, key_size
);
380 if (cso_hash_iter_is_null(iter
)) {
381 struct cso_blend
*cso
= MALLOC(sizeof(struct cso_blend
));
383 return PIPE_ERROR_OUT_OF_MEMORY
;
385 memset(&cso
->state
, 0, sizeof cso
->state
);
386 memcpy(&cso
->state
, templ
, key_size
);
387 cso
->data
= ctx
->pipe
->create_blend_state(ctx
->pipe
, &cso
->state
);
388 cso
->delete_state
= (cso_state_callback
)ctx
->pipe
->delete_blend_state
;
389 cso
->context
= ctx
->pipe
;
391 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_BLEND
, cso
);
392 if (cso_hash_iter_is_null(iter
)) {
394 return PIPE_ERROR_OUT_OF_MEMORY
;
400 handle
= ((struct cso_blend
*)cso_hash_iter_data(iter
))->data
;
403 if (ctx
->blend
!= handle
) {
405 ctx
->pipe
->bind_blend_state(ctx
->pipe
, handle
);
410 void cso_save_blend(struct cso_context
*ctx
)
412 assert(!ctx
->blend_saved
);
413 ctx
->blend_saved
= ctx
->blend
;
416 void cso_restore_blend(struct cso_context
*ctx
)
418 if (ctx
->blend
!= ctx
->blend_saved
) {
419 ctx
->blend
= ctx
->blend_saved
;
420 ctx
->pipe
->bind_blend_state(ctx
->pipe
, ctx
->blend_saved
);
422 ctx
->blend_saved
= NULL
;
428 cso_set_depth_stencil_alpha(struct cso_context
*ctx
,
429 const struct pipe_depth_stencil_alpha_state
*templ
)
431 unsigned key_size
= sizeof(struct pipe_depth_stencil_alpha_state
);
432 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
433 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
435 CSO_DEPTH_STENCIL_ALPHA
,
436 (void*)templ
, key_size
);
439 if (cso_hash_iter_is_null(iter
)) {
440 struct cso_depth_stencil_alpha
*cso
=
441 MALLOC(sizeof(struct cso_depth_stencil_alpha
));
443 return PIPE_ERROR_OUT_OF_MEMORY
;
445 memcpy(&cso
->state
, templ
, sizeof(*templ
));
446 cso
->data
= ctx
->pipe
->create_depth_stencil_alpha_state(ctx
->pipe
,
449 (cso_state_callback
)ctx
->pipe
->delete_depth_stencil_alpha_state
;
450 cso
->context
= ctx
->pipe
;
452 iter
= cso_insert_state(ctx
->cache
, hash_key
,
453 CSO_DEPTH_STENCIL_ALPHA
, cso
);
454 if (cso_hash_iter_is_null(iter
)) {
456 return PIPE_ERROR_OUT_OF_MEMORY
;
462 handle
= ((struct cso_depth_stencil_alpha
*)
463 cso_hash_iter_data(iter
))->data
;
466 if (ctx
->depth_stencil
!= handle
) {
467 ctx
->depth_stencil
= handle
;
468 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
, handle
);
473 void cso_save_depth_stencil_alpha(struct cso_context
*ctx
)
475 assert(!ctx
->depth_stencil_saved
);
476 ctx
->depth_stencil_saved
= ctx
->depth_stencil
;
479 void cso_restore_depth_stencil_alpha(struct cso_context
*ctx
)
481 if (ctx
->depth_stencil
!= ctx
->depth_stencil_saved
) {
482 ctx
->depth_stencil
= ctx
->depth_stencil_saved
;
483 ctx
->pipe
->bind_depth_stencil_alpha_state(ctx
->pipe
,
484 ctx
->depth_stencil_saved
);
486 ctx
->depth_stencil_saved
= NULL
;
491 enum pipe_error
cso_set_rasterizer(struct cso_context
*ctx
,
492 const struct pipe_rasterizer_state
*templ
)
494 unsigned key_size
= sizeof(struct pipe_rasterizer_state
);
495 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
496 struct cso_hash_iter iter
= cso_find_state_template(ctx
->cache
,
499 (void*)templ
, key_size
);
502 if (cso_hash_iter_is_null(iter
)) {
503 struct cso_rasterizer
*cso
= MALLOC(sizeof(struct cso_rasterizer
));
505 return PIPE_ERROR_OUT_OF_MEMORY
;
507 memcpy(&cso
->state
, templ
, sizeof(*templ
));
508 cso
->data
= ctx
->pipe
->create_rasterizer_state(ctx
->pipe
, &cso
->state
);
510 (cso_state_callback
)ctx
->pipe
->delete_rasterizer_state
;
511 cso
->context
= ctx
->pipe
;
513 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_RASTERIZER
, cso
);
514 if (cso_hash_iter_is_null(iter
)) {
516 return PIPE_ERROR_OUT_OF_MEMORY
;
522 handle
= ((struct cso_rasterizer
*)cso_hash_iter_data(iter
))->data
;
525 if (ctx
->rasterizer
!= handle
) {
526 ctx
->rasterizer
= handle
;
527 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, handle
);
532 void cso_save_rasterizer(struct cso_context
*ctx
)
534 assert(!ctx
->rasterizer_saved
);
535 ctx
->rasterizer_saved
= ctx
->rasterizer
;
538 void cso_restore_rasterizer(struct cso_context
*ctx
)
540 if (ctx
->rasterizer
!= ctx
->rasterizer_saved
) {
541 ctx
->rasterizer
= ctx
->rasterizer_saved
;
542 ctx
->pipe
->bind_rasterizer_state(ctx
->pipe
, ctx
->rasterizer_saved
);
544 ctx
->rasterizer_saved
= NULL
;
549 enum pipe_error
cso_set_fragment_shader_handle(struct cso_context
*ctx
,
552 if (ctx
->fragment_shader
!= handle
) {
553 ctx
->fragment_shader
= handle
;
554 ctx
->pipe
->bind_fs_state(ctx
->pipe
, handle
);
559 void cso_delete_fragment_shader(struct cso_context
*ctx
, void *handle
)
561 if (handle
== ctx
->fragment_shader
) {
562 /* unbind before deleting */
563 ctx
->pipe
->bind_fs_state(ctx
->pipe
, NULL
);
564 ctx
->fragment_shader
= NULL
;
566 ctx
->pipe
->delete_fs_state(ctx
->pipe
, handle
);
569 void cso_save_fragment_shader(struct cso_context
*ctx
)
571 assert(!ctx
->fragment_shader_saved
);
572 ctx
->fragment_shader_saved
= ctx
->fragment_shader
;
575 void cso_restore_fragment_shader(struct cso_context
*ctx
)
577 if (ctx
->fragment_shader_saved
!= ctx
->fragment_shader
) {
578 ctx
->pipe
->bind_fs_state(ctx
->pipe
, ctx
->fragment_shader_saved
);
579 ctx
->fragment_shader
= ctx
->fragment_shader_saved
;
581 ctx
->fragment_shader_saved
= NULL
;
585 enum pipe_error
cso_set_vertex_shader_handle(struct cso_context
*ctx
,
588 if (ctx
->vertex_shader
!= handle
) {
589 ctx
->vertex_shader
= handle
;
590 ctx
->pipe
->bind_vs_state(ctx
->pipe
, handle
);
595 void cso_delete_vertex_shader(struct cso_context
*ctx
, void *handle
)
597 if (handle
== ctx
->vertex_shader
) {
598 /* unbind before deleting */
599 ctx
->pipe
->bind_vs_state(ctx
->pipe
, NULL
);
600 ctx
->vertex_shader
= NULL
;
602 ctx
->pipe
->delete_vs_state(ctx
->pipe
, handle
);
605 void cso_save_vertex_shader(struct cso_context
*ctx
)
607 assert(!ctx
->vertex_shader_saved
);
608 ctx
->vertex_shader_saved
= ctx
->vertex_shader
;
611 void cso_restore_vertex_shader(struct cso_context
*ctx
)
613 if (ctx
->vertex_shader_saved
!= ctx
->vertex_shader
) {
614 ctx
->pipe
->bind_vs_state(ctx
->pipe
, ctx
->vertex_shader_saved
);
615 ctx
->vertex_shader
= ctx
->vertex_shader_saved
;
617 ctx
->vertex_shader_saved
= NULL
;
621 enum pipe_error
cso_set_framebuffer(struct cso_context
*ctx
,
622 const struct pipe_framebuffer_state
*fb
)
624 if (memcmp(&ctx
->fb
, fb
, sizeof(*fb
)) != 0) {
625 util_copy_framebuffer_state(&ctx
->fb
, fb
);
626 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, fb
);
631 void cso_save_framebuffer(struct cso_context
*ctx
)
633 util_copy_framebuffer_state(&ctx
->fb_saved
, &ctx
->fb
);
636 void cso_restore_framebuffer(struct cso_context
*ctx
)
638 if (memcmp(&ctx
->fb
, &ctx
->fb_saved
, sizeof(ctx
->fb
))) {
639 util_copy_framebuffer_state(&ctx
->fb
, &ctx
->fb_saved
);
640 ctx
->pipe
->set_framebuffer_state(ctx
->pipe
, &ctx
->fb
);
641 util_unreference_framebuffer_state(&ctx
->fb_saved
);
646 enum pipe_error
cso_set_viewport(struct cso_context
*ctx
,
647 const struct pipe_viewport_state
*vp
)
649 if (memcmp(&ctx
->vp
, vp
, sizeof(*vp
))) {
651 ctx
->pipe
->set_viewport_state(ctx
->pipe
, vp
);
656 void cso_save_viewport(struct cso_context
*ctx
)
658 ctx
->vp_saved
= ctx
->vp
;
662 void cso_restore_viewport(struct cso_context
*ctx
)
664 if (memcmp(&ctx
->vp
, &ctx
->vp_saved
, sizeof(ctx
->vp
))) {
665 ctx
->vp
= ctx
->vp_saved
;
666 ctx
->pipe
->set_viewport_state(ctx
->pipe
, &ctx
->vp
);
671 enum pipe_error
cso_set_blend_color(struct cso_context
*ctx
,
672 const struct pipe_blend_color
*bc
)
674 if (memcmp(&ctx
->blend_color
, bc
, sizeof(ctx
->blend_color
))) {
675 ctx
->blend_color
= *bc
;
676 ctx
->pipe
->set_blend_color(ctx
->pipe
, bc
);
681 enum pipe_error
cso_set_sample_mask(struct cso_context
*ctx
,
682 unsigned sample_mask
)
684 if (ctx
->sample_mask
!= sample_mask
) {
685 ctx
->sample_mask
= sample_mask
;
686 ctx
->pipe
->set_sample_mask(ctx
->pipe
, sample_mask
);
691 void cso_save_sample_mask(struct cso_context
*ctx
)
693 ctx
->sample_mask_saved
= ctx
->sample_mask
;
696 void cso_restore_sample_mask(struct cso_context
*ctx
)
698 cso_set_sample_mask(ctx
, ctx
->sample_mask_saved
);
701 enum pipe_error
cso_set_stencil_ref(struct cso_context
*ctx
,
702 const struct pipe_stencil_ref
*sr
)
704 if (memcmp(&ctx
->stencil_ref
, sr
, sizeof(ctx
->stencil_ref
))) {
705 ctx
->stencil_ref
= *sr
;
706 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, sr
);
711 void cso_save_stencil_ref(struct cso_context
*ctx
)
713 ctx
->stencil_ref_saved
= ctx
->stencil_ref
;
717 void cso_restore_stencil_ref(struct cso_context
*ctx
)
719 if (memcmp(&ctx
->stencil_ref
, &ctx
->stencil_ref_saved
,
720 sizeof(ctx
->stencil_ref
))) {
721 ctx
->stencil_ref
= ctx
->stencil_ref_saved
;
722 ctx
->pipe
->set_stencil_ref(ctx
->pipe
, &ctx
->stencil_ref
);
726 enum pipe_error
cso_set_geometry_shader_handle(struct cso_context
*ctx
,
729 assert(ctx
->has_geometry_shader
|| !handle
);
731 if (ctx
->has_geometry_shader
&& ctx
->geometry_shader
!= handle
) {
732 ctx
->geometry_shader
= handle
;
733 ctx
->pipe
->bind_gs_state(ctx
->pipe
, handle
);
738 void cso_delete_geometry_shader(struct cso_context
*ctx
, void *handle
)
740 if (handle
== ctx
->geometry_shader
) {
741 /* unbind before deleting */
742 ctx
->pipe
->bind_gs_state(ctx
->pipe
, NULL
);
743 ctx
->geometry_shader
= NULL
;
745 ctx
->pipe
->delete_gs_state(ctx
->pipe
, handle
);
748 void cso_save_geometry_shader(struct cso_context
*ctx
)
750 if (!ctx
->has_geometry_shader
) {
754 assert(!ctx
->geometry_shader_saved
);
755 ctx
->geometry_shader_saved
= ctx
->geometry_shader
;
758 void cso_restore_geometry_shader(struct cso_context
*ctx
)
760 if (!ctx
->has_geometry_shader
) {
764 if (ctx
->geometry_shader_saved
!= ctx
->geometry_shader
) {
765 ctx
->pipe
->bind_gs_state(ctx
->pipe
, ctx
->geometry_shader_saved
);
766 ctx
->geometry_shader
= ctx
->geometry_shader_saved
;
768 ctx
->geometry_shader_saved
= NULL
;
774 clip_state_cpy(struct pipe_clip_state
*dst
,
775 const struct pipe_clip_state
*src
)
777 memcpy(dst
->ucp
, src
->ucp
, sizeof(dst
->ucp
));
781 clip_state_cmp(const struct pipe_clip_state
*a
,
782 const struct pipe_clip_state
*b
)
784 return memcmp(a
->ucp
, b
->ucp
, sizeof(a
->ucp
));
788 cso_set_clip(struct cso_context
*ctx
,
789 const struct pipe_clip_state
*clip
)
791 if (clip_state_cmp(&ctx
->clip
, clip
)) {
792 clip_state_cpy(&ctx
->clip
, clip
);
793 ctx
->pipe
->set_clip_state(ctx
->pipe
, clip
);
798 cso_save_clip(struct cso_context
*ctx
)
800 clip_state_cpy(&ctx
->clip_saved
, &ctx
->clip
);
804 cso_restore_clip(struct cso_context
*ctx
)
806 if (clip_state_cmp(&ctx
->clip
, &ctx
->clip_saved
)) {
807 clip_state_cpy(&ctx
->clip
, &ctx
->clip_saved
);
808 ctx
->pipe
->set_clip_state(ctx
->pipe
, &ctx
->clip_saved
);
813 cso_set_vertex_elements(struct cso_context
*ctx
,
815 const struct pipe_vertex_element
*states
)
817 struct u_vbuf
*vbuf
= ctx
->vbuf
;
818 unsigned key_size
, hash_key
;
819 struct cso_hash_iter iter
;
821 struct cso_velems_state velems_state
;
824 u_vbuf_set_vertex_elements(vbuf
, count
, states
);
828 /* Need to include the count into the stored state data too.
829 * Otherwise first few count pipe_vertex_elements could be identical
830 * even if count is different, and there's no guarantee the hash would
831 * be different in that case neither.
833 key_size
= sizeof(struct pipe_vertex_element
) * count
+ sizeof(unsigned);
834 velems_state
.count
= count
;
835 memcpy(velems_state
.velems
, states
,
836 sizeof(struct pipe_vertex_element
) * count
);
837 hash_key
= cso_construct_key((void*)&velems_state
, key_size
);
838 iter
= cso_find_state_template(ctx
->cache
, hash_key
, CSO_VELEMENTS
,
839 (void*)&velems_state
, key_size
);
841 if (cso_hash_iter_is_null(iter
)) {
842 struct cso_velements
*cso
= MALLOC(sizeof(struct cso_velements
));
844 return PIPE_ERROR_OUT_OF_MEMORY
;
846 memcpy(&cso
->state
, &velems_state
, key_size
);
847 cso
->data
= ctx
->pipe
->create_vertex_elements_state(ctx
->pipe
, count
,
848 &cso
->state
.velems
[0]);
850 (cso_state_callback
) ctx
->pipe
->delete_vertex_elements_state
;
851 cso
->context
= ctx
->pipe
;
853 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_VELEMENTS
, cso
);
854 if (cso_hash_iter_is_null(iter
)) {
856 return PIPE_ERROR_OUT_OF_MEMORY
;
862 handle
= ((struct cso_velements
*)cso_hash_iter_data(iter
))->data
;
865 if (ctx
->velements
!= handle
) {
866 ctx
->velements
= handle
;
867 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, handle
);
872 void cso_save_vertex_elements(struct cso_context
*ctx
)
874 struct u_vbuf
*vbuf
= ctx
->vbuf
;
877 u_vbuf_save_vertex_elements(vbuf
);
881 assert(!ctx
->velements_saved
);
882 ctx
->velements_saved
= ctx
->velements
;
885 void cso_restore_vertex_elements(struct cso_context
*ctx
)
887 struct u_vbuf
*vbuf
= ctx
->vbuf
;
890 u_vbuf_restore_vertex_elements(vbuf
);
894 if (ctx
->velements
!= ctx
->velements_saved
) {
895 ctx
->velements
= ctx
->velements_saved
;
896 ctx
->pipe
->bind_vertex_elements_state(ctx
->pipe
, ctx
->velements_saved
);
898 ctx
->velements_saved
= NULL
;
903 void cso_set_vertex_buffers(struct cso_context
*ctx
,
904 unsigned start_slot
, unsigned count
,
905 const struct pipe_vertex_buffer
*buffers
)
907 struct u_vbuf
*vbuf
= ctx
->vbuf
;
910 u_vbuf_set_vertex_buffers(vbuf
, start_slot
, count
, buffers
);
914 /* Save what's in the auxiliary slot, so that we can save and restore it
916 if (start_slot
<= ctx
->aux_vertex_buffer_index
&&
917 start_slot
+count
> ctx
->aux_vertex_buffer_index
) {
919 const struct pipe_vertex_buffer
*vb
=
920 buffers
+ (ctx
->aux_vertex_buffer_index
- start_slot
);
922 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
,
924 memcpy(&ctx
->aux_vertex_buffer_current
, vb
,
925 sizeof(struct pipe_vertex_buffer
));
928 pipe_resource_reference(&ctx
->aux_vertex_buffer_current
.buffer
,
930 ctx
->aux_vertex_buffer_current
.user_buffer
= NULL
;
934 ctx
->pipe
->set_vertex_buffers(ctx
->pipe
, start_slot
, count
, buffers
);
937 void cso_save_aux_vertex_buffer_slot(struct cso_context
*ctx
)
939 struct u_vbuf
*vbuf
= ctx
->vbuf
;
942 u_vbuf_save_aux_vertex_buffer_slot(vbuf
);
946 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
,
947 ctx
->aux_vertex_buffer_current
.buffer
);
948 memcpy(&ctx
->aux_vertex_buffer_saved
, &ctx
->aux_vertex_buffer_current
,
949 sizeof(struct pipe_vertex_buffer
));
952 void cso_restore_aux_vertex_buffer_slot(struct cso_context
*ctx
)
954 struct u_vbuf
*vbuf
= ctx
->vbuf
;
957 u_vbuf_restore_aux_vertex_buffer_slot(vbuf
);
961 cso_set_vertex_buffers(ctx
, ctx
->aux_vertex_buffer_index
, 1,
962 &ctx
->aux_vertex_buffer_saved
);
963 pipe_resource_reference(&ctx
->aux_vertex_buffer_saved
.buffer
, NULL
);
966 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context
*ctx
)
968 return ctx
->aux_vertex_buffer_index
;
972 /**************** fragment/vertex sampler view state *************************/
974 static enum pipe_error
975 single_sampler(struct cso_context
*ctx
,
976 struct sampler_info
*info
,
978 const struct pipe_sampler_state
*templ
)
983 unsigned key_size
= sizeof(struct pipe_sampler_state
);
984 unsigned hash_key
= cso_construct_key((void*)templ
, key_size
);
985 struct cso_hash_iter iter
=
986 cso_find_state_template(ctx
->cache
,
987 hash_key
, CSO_SAMPLER
,
988 (void *) templ
, key_size
);
990 if (cso_hash_iter_is_null(iter
)) {
991 struct cso_sampler
*cso
= MALLOC(sizeof(struct cso_sampler
));
993 return PIPE_ERROR_OUT_OF_MEMORY
;
995 memcpy(&cso
->state
, templ
, sizeof(*templ
));
996 cso
->data
= ctx
->pipe
->create_sampler_state(ctx
->pipe
, &cso
->state
);
998 (cso_state_callback
) ctx
->pipe
->delete_sampler_state
;
999 cso
->context
= ctx
->pipe
;
1001 iter
= cso_insert_state(ctx
->cache
, hash_key
, CSO_SAMPLER
, cso
);
1002 if (cso_hash_iter_is_null(iter
)) {
1004 return PIPE_ERROR_OUT_OF_MEMORY
;
1010 handle
= ((struct cso_sampler
*)cso_hash_iter_data(iter
))->data
;
1014 info
->samplers
[idx
] = handle
;
1020 cso_single_sampler(struct cso_context
*ctx
,
1021 unsigned shader_stage
,
1023 const struct pipe_sampler_state
*templ
)
1025 return single_sampler(ctx
, &ctx
->samplers
[shader_stage
], idx
, templ
);
1031 single_sampler_done(struct cso_context
*ctx
, unsigned shader_stage
)
1033 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1036 /* find highest non-null sampler */
1037 for (i
= PIPE_MAX_SAMPLERS
; i
> 0; i
--) {
1038 if (info
->samplers
[i
- 1] != NULL
)
1042 info
->nr_samplers
= i
;
1044 if (info
->hw
.nr_samplers
!= info
->nr_samplers
||
1045 memcmp(info
->hw
.samplers
,
1047 info
->nr_samplers
* sizeof(void *)) != 0)
1049 memcpy(info
->hw
.samplers
,
1051 info
->nr_samplers
* sizeof(void *));
1052 info
->hw
.nr_samplers
= info
->nr_samplers
;
1054 switch (shader_stage
) {
1055 case PIPE_SHADER_FRAGMENT
:
1056 ctx
->pipe
->bind_fragment_sampler_states(ctx
->pipe
,
1060 case PIPE_SHADER_VERTEX
:
1061 ctx
->pipe
->bind_vertex_sampler_states(ctx
->pipe
,
1065 case PIPE_SHADER_GEOMETRY
:
1066 ctx
->pipe
->bind_geometry_sampler_states(ctx
->pipe
,
1071 assert(!"bad shader type in single_sampler_done()");
1077 cso_single_sampler_done(struct cso_context
*ctx
, unsigned shader_stage
)
1079 single_sampler_done(ctx
, shader_stage
);
1084 * If the function encouters any errors it will return the
1085 * last one. Done to always try to set as many samplers
1089 cso_set_samplers(struct cso_context
*ctx
,
1090 unsigned shader_stage
,
1092 const struct pipe_sampler_state
**templates
)
1094 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1096 enum pipe_error temp
, error
= PIPE_OK
;
1101 for (i
= 0; i
< nr
; i
++) {
1102 temp
= single_sampler(ctx
, info
, i
, templates
[i
]);
1103 if (temp
!= PIPE_OK
)
1107 for ( ; i
< info
->nr_samplers
; i
++) {
1108 temp
= single_sampler(ctx
, info
, i
, NULL
);
1109 if (temp
!= PIPE_OK
)
1113 single_sampler_done(ctx
, shader_stage
);
1119 cso_save_samplers(struct cso_context
*ctx
, unsigned shader_stage
)
1121 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1122 info
->nr_samplers_saved
= info
->nr_samplers
;
1123 memcpy(info
->samplers_saved
, info
->samplers
, sizeof(info
->samplers
));
1128 cso_restore_samplers(struct cso_context
*ctx
, unsigned shader_stage
)
1130 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1131 info
->nr_samplers
= info
->nr_samplers_saved
;
1132 memcpy(info
->samplers
, info
->samplers_saved
, sizeof(info
->samplers
));
1133 single_sampler_done(ctx
, shader_stage
);
1138 cso_set_sampler_views(struct cso_context
*ctx
,
1139 unsigned shader_stage
,
1141 struct pipe_sampler_view
**views
)
1143 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1146 /* reference new views */
1147 for (i
= 0; i
< count
; i
++) {
1148 pipe_sampler_view_reference(&info
->views
[i
], views
[i
]);
1150 /* unref extra old views, if any */
1151 for (; i
< info
->nr_views
; i
++) {
1152 pipe_sampler_view_reference(&info
->views
[i
], NULL
);
1155 info
->nr_views
= count
;
1157 /* bind the new sampler views */
1158 switch (shader_stage
) {
1159 case PIPE_SHADER_FRAGMENT
:
1160 ctx
->pipe
->set_fragment_sampler_views(ctx
->pipe
, count
, info
->views
);
1162 case PIPE_SHADER_VERTEX
:
1163 ctx
->pipe
->set_vertex_sampler_views(ctx
->pipe
, count
, info
->views
);
1165 case PIPE_SHADER_GEOMETRY
:
1166 ctx
->pipe
->set_geometry_sampler_views(ctx
->pipe
, count
, info
->views
);
1169 assert(!"bad shader type in cso_set_sampler_views()");
1175 cso_save_sampler_views(struct cso_context
*ctx
, unsigned shader_stage
)
1177 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1180 info
->nr_views_saved
= info
->nr_views
;
1182 for (i
= 0; i
< info
->nr_views
; i
++) {
1183 assert(!info
->views_saved
[i
]);
1184 pipe_sampler_view_reference(&info
->views_saved
[i
], info
->views
[i
]);
1190 cso_restore_sampler_views(struct cso_context
*ctx
, unsigned shader_stage
)
1192 struct sampler_info
*info
= &ctx
->samplers
[shader_stage
];
1193 unsigned i
, nr_saved
= info
->nr_views_saved
;
1195 for (i
= 0; i
< nr_saved
; i
++) {
1196 pipe_sampler_view_reference(&info
->views
[i
], NULL
);
1197 /* move the reference from one pointer to another */
1198 info
->views
[i
] = info
->views_saved
[i
];
1199 info
->views_saved
[i
] = NULL
;
1201 for (; i
< info
->nr_views
; i
++) {
1202 pipe_sampler_view_reference(&info
->views
[i
], NULL
);
1205 /* bind the old/saved sampler views */
1206 switch (shader_stage
) {
1207 case PIPE_SHADER_FRAGMENT
:
1208 ctx
->pipe
->set_fragment_sampler_views(ctx
->pipe
, nr_saved
, info
->views
);
1210 case PIPE_SHADER_VERTEX
:
1211 ctx
->pipe
->set_vertex_sampler_views(ctx
->pipe
, nr_saved
, info
->views
);
1213 case PIPE_SHADER_GEOMETRY
:
1214 ctx
->pipe
->set_geometry_sampler_views(ctx
->pipe
, nr_saved
, info
->views
);
1217 assert(!"bad shader type in cso_restore_sampler_views()");
1220 info
->nr_views
= nr_saved
;
1221 info
->nr_views_saved
= 0;
1226 cso_set_stream_outputs(struct cso_context
*ctx
,
1227 unsigned num_targets
,
1228 struct pipe_stream_output_target
**targets
,
1229 unsigned append_bitmask
)
1231 struct pipe_context
*pipe
= ctx
->pipe
;
1234 if (!ctx
->has_streamout
) {
1235 assert(num_targets
== 0);
1239 if (ctx
->nr_so_targets
== 0 && num_targets
== 0) {
1240 /* Nothing to do. */
1244 /* reference new targets */
1245 for (i
= 0; i
< num_targets
; i
++) {
1246 pipe_so_target_reference(&ctx
->so_targets
[i
], targets
[i
]);
1248 /* unref extra old targets, if any */
1249 for (; i
< ctx
->nr_so_targets
; i
++) {
1250 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1253 pipe
->set_stream_output_targets(pipe
, num_targets
, targets
,
1255 ctx
->nr_so_targets
= num_targets
;
1259 cso_save_stream_outputs(struct cso_context
*ctx
)
1263 if (!ctx
->has_streamout
) {
1267 ctx
->nr_so_targets_saved
= ctx
->nr_so_targets
;
1269 for (i
= 0; i
< ctx
->nr_so_targets
; i
++) {
1270 assert(!ctx
->so_targets_saved
[i
]);
1271 pipe_so_target_reference(&ctx
->so_targets_saved
[i
], ctx
->so_targets
[i
]);
1276 cso_restore_stream_outputs(struct cso_context
*ctx
)
1278 struct pipe_context
*pipe
= ctx
->pipe
;
1281 if (!ctx
->has_streamout
) {
1285 if (ctx
->nr_so_targets
== 0 && ctx
->nr_so_targets_saved
== 0) {
1286 /* Nothing to do. */
1290 for (i
= 0; i
< ctx
->nr_so_targets_saved
; i
++) {
1291 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1292 /* move the reference from one pointer to another */
1293 ctx
->so_targets
[i
] = ctx
->so_targets_saved
[i
];
1294 ctx
->so_targets_saved
[i
] = NULL
;
1296 for (; i
< ctx
->nr_so_targets
; i
++) {
1297 pipe_so_target_reference(&ctx
->so_targets
[i
], NULL
);
1300 /* ~0 means append */
1301 pipe
->set_stream_output_targets(pipe
, ctx
->nr_so_targets_saved
,
1302 ctx
->so_targets
, ~0);
1304 ctx
->nr_so_targets
= ctx
->nr_so_targets_saved
;
1305 ctx
->nr_so_targets_saved
= 0;
1311 cso_set_index_buffer(struct cso_context
*cso
,
1312 const struct pipe_index_buffer
*ib
)
1314 struct u_vbuf
*vbuf
= cso
->vbuf
;
1317 u_vbuf_set_index_buffer(vbuf
, ib
);
1319 struct pipe_context
*pipe
= cso
->pipe
;
1320 pipe
->set_index_buffer(pipe
, ib
);
1325 cso_draw_vbo(struct cso_context
*cso
,
1326 const struct pipe_draw_info
*info
)
1328 struct u_vbuf
*vbuf
= cso
->vbuf
;
1331 u_vbuf_draw_vbo(vbuf
, info
);
1333 struct pipe_context
*pipe
= cso
->pipe
;
1334 pipe
->draw_vbo(pipe
, info
);
1339 cso_draw_arrays(struct cso_context
*cso
, uint mode
, uint start
, uint count
)
1341 struct pipe_draw_info info
;
1343 util_draw_init_info(&info
);
1348 info
.min_index
= start
;
1349 info
.max_index
= start
+ count
- 1;
1351 cso_draw_vbo(cso
, &info
);