cso: remove unreachable break statements
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 uint nr_vertex_buffers;
90 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
91
92 uint nr_vertex_buffers_saved;
93 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved, *geometry_shader;
108 void *vertex_shader, *vertex_shader_saved, *geometry_shader_saved;
109 void *velements, *velements_saved;
110
111 struct pipe_clip_state clip;
112 struct pipe_clip_state clip_saved;
113
114 struct pipe_framebuffer_state fb, fb_saved;
115 struct pipe_viewport_state vp, vp_saved;
116 struct pipe_blend_color blend_color;
117 unsigned sample_mask;
118 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
119 };
120
121
122 static boolean delete_blend_state(struct cso_context *ctx, void *state)
123 {
124 struct cso_blend *cso = (struct cso_blend *)state;
125
126 if (ctx->blend == cso->data)
127 return FALSE;
128
129 if (cso->delete_state)
130 cso->delete_state(cso->context, cso->data);
131 FREE(state);
132 return TRUE;
133 }
134
135 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
136 {
137 struct cso_depth_stencil_alpha *cso =
138 (struct cso_depth_stencil_alpha *)state;
139
140 if (ctx->depth_stencil == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146
147 return TRUE;
148 }
149
150 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
151 {
152 struct cso_sampler *cso = (struct cso_sampler *)state;
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156 return TRUE;
157 }
158
159 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
160 {
161 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
162
163 if (ctx->rasterizer == cso->data)
164 return FALSE;
165 if (cso->delete_state)
166 cso->delete_state(cso->context, cso->data);
167 FREE(state);
168 return TRUE;
169 }
170
171 static boolean delete_vertex_elements(struct cso_context *ctx,
172 void *state)
173 {
174 struct cso_velements *cso = (struct cso_velements *)state;
175
176 if (ctx->velements == cso->data)
177 return FALSE;
178
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185
186 static INLINE boolean delete_cso(struct cso_context *ctx,
187 void *state, enum cso_cache_type type)
188 {
189 switch (type) {
190 case CSO_BLEND:
191 return delete_blend_state(ctx, state);
192 case CSO_SAMPLER:
193 return delete_sampler_state(ctx, state);
194 case CSO_DEPTH_STENCIL_ALPHA:
195 return delete_depth_stencil_state(ctx, state);
196 case CSO_RASTERIZER:
197 return delete_rasterizer_state(ctx, state);
198 case CSO_VELEMENTS:
199 return delete_vertex_elements(ctx, state);
200 default:
201 assert(0);
202 FREE(state);
203 }
204 return FALSE;
205 }
206
207 static INLINE void
208 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
209 int max_size, void *user_data)
210 {
211 struct cso_context *ctx = (struct cso_context *)user_data;
212 /* if we're approach the maximum size, remove fourth of the entries
213 * otherwise every subsequent call will go through the same */
214 int hash_size = cso_hash_size(hash);
215 int max_entries = (max_size > hash_size) ? max_size : hash_size;
216 int to_remove = (max_size < max_entries) * max_entries/4;
217 struct cso_hash_iter iter = cso_hash_first_node(hash);
218 if (hash_size > max_size)
219 to_remove += hash_size - max_size;
220 while (to_remove) {
221 /*remove elements until we're good */
222 /*fixme: currently we pick the nodes to remove at random*/
223 void *cso = cso_hash_iter_data(iter);
224 if (delete_cso(ctx, cso, type)) {
225 iter = cso_hash_erase(hash, iter);
226 --to_remove;
227 } else
228 iter = cso_hash_iter_next(iter);
229 }
230 }
231
232 static void cso_init_vbuf(struct cso_context *cso)
233 {
234 struct u_vbuf_caps caps;
235
236 u_vbuf_get_caps(cso->pipe->screen, &caps);
237
238 /* Install u_vbuf if there is anything unsupported. */
239 if (!caps.buffer_offset_unaligned ||
240 !caps.buffer_stride_unaligned ||
241 !caps.velem_src_offset_unaligned ||
242 !caps.format_fixed32 ||
243 !caps.format_float16 ||
244 !caps.format_float64 ||
245 !caps.format_norm32 ||
246 !caps.format_scaled32 ||
247 !caps.user_vertex_buffers) {
248 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
249 }
250 }
251
252 struct cso_context *cso_create_context( struct pipe_context *pipe )
253 {
254 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
255 if (ctx == NULL)
256 goto out;
257
258 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
259
260 ctx->cache = cso_cache_create();
261 if (ctx->cache == NULL)
262 goto out;
263 cso_cache_set_sanitize_callback(ctx->cache,
264 sanitize_hash,
265 ctx);
266
267 ctx->pipe = pipe;
268
269 cso_init_vbuf(ctx);
270
271 /* Enable for testing: */
272 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
273
274 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
275 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
276 ctx->has_geometry_shader = TRUE;
277 }
278 if (pipe->screen->get_param(pipe->screen,
279 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
280 ctx->has_streamout = TRUE;
281 }
282
283 return ctx;
284
285 out:
286 cso_destroy_context( ctx );
287 return NULL;
288 }
289
290 /**
291 * Prior to context destruction, this function unbinds all state objects.
292 */
293 void cso_release_all( struct cso_context *ctx )
294 {
295 unsigned i, shader;
296
297 if (ctx->pipe) {
298 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
299 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
300 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
301 if (ctx->pipe->bind_vertex_sampler_states)
302 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
303 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
304 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
305 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
306 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
307 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
308 if (ctx->pipe->set_vertex_sampler_views)
309 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
310 if (ctx->pipe->set_stream_output_targets)
311 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
312 }
313
314 /* free fragment samplers, views */
315 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
316 struct sampler_info *info = &ctx->samplers[shader];
317 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
318 pipe_sampler_view_reference(&info->views[i], NULL);
319 pipe_sampler_view_reference(&info->views_saved[i], NULL);
320 }
321 }
322
323 util_unreference_framebuffer_state(&ctx->fb);
324 util_unreference_framebuffer_state(&ctx->fb_saved);
325
326 util_copy_vertex_buffers(ctx->vertex_buffers,
327 &ctx->nr_vertex_buffers,
328 NULL, 0);
329 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
330 &ctx->nr_vertex_buffers_saved,
331 NULL, 0);
332
333 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
334 pipe_so_target_reference(&ctx->so_targets[i], NULL);
335 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
336 }
337
338 if (ctx->cache) {
339 cso_cache_delete( ctx->cache );
340 ctx->cache = NULL;
341 }
342 }
343
344
345 /**
346 * Free the CSO context. NOTE: the state tracker should have previously called
347 * cso_release_all().
348 */
349 void cso_destroy_context( struct cso_context *ctx )
350 {
351 if (ctx) {
352 if (ctx->vbuf)
353 u_vbuf_destroy(ctx->vbuf);
354 FREE( ctx );
355 }
356 }
357
358
359 /* Those function will either find the state of the given template
360 * in the cache or they will create a new state from the given
361 * template, insert it in the cache and return it.
362 */
363
364 /*
365 * If the driver returns 0 from the create method then they will assign
366 * the data member of the cso to be the template itself.
367 */
368
369 enum pipe_error cso_set_blend(struct cso_context *ctx,
370 const struct pipe_blend_state *templ)
371 {
372 unsigned key_size, hash_key;
373 struct cso_hash_iter iter;
374 void *handle;
375
376 key_size = templ->independent_blend_enable ?
377 sizeof(struct pipe_blend_state) :
378 (char *)&(templ->rt[1]) - (char *)templ;
379 hash_key = cso_construct_key((void*)templ, key_size);
380 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
381 (void*)templ, key_size);
382
383 if (cso_hash_iter_is_null(iter)) {
384 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
385 if (!cso)
386 return PIPE_ERROR_OUT_OF_MEMORY;
387
388 memset(&cso->state, 0, sizeof cso->state);
389 memcpy(&cso->state, templ, key_size);
390 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
391 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
392 cso->context = ctx->pipe;
393
394 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
395 if (cso_hash_iter_is_null(iter)) {
396 FREE(cso);
397 return PIPE_ERROR_OUT_OF_MEMORY;
398 }
399
400 handle = cso->data;
401 }
402 else {
403 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
404 }
405
406 if (ctx->blend != handle) {
407 ctx->blend = handle;
408 ctx->pipe->bind_blend_state(ctx->pipe, handle);
409 }
410 return PIPE_OK;
411 }
412
413 void cso_save_blend(struct cso_context *ctx)
414 {
415 assert(!ctx->blend_saved);
416 ctx->blend_saved = ctx->blend;
417 }
418
419 void cso_restore_blend(struct cso_context *ctx)
420 {
421 if (ctx->blend != ctx->blend_saved) {
422 ctx->blend = ctx->blend_saved;
423 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
424 }
425 ctx->blend_saved = NULL;
426 }
427
428
429
430 enum pipe_error
431 cso_set_depth_stencil_alpha(struct cso_context *ctx,
432 const struct pipe_depth_stencil_alpha_state *templ)
433 {
434 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
435 unsigned hash_key = cso_construct_key((void*)templ, key_size);
436 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
437 hash_key,
438 CSO_DEPTH_STENCIL_ALPHA,
439 (void*)templ, key_size);
440 void *handle;
441
442 if (cso_hash_iter_is_null(iter)) {
443 struct cso_depth_stencil_alpha *cso =
444 MALLOC(sizeof(struct cso_depth_stencil_alpha));
445 if (!cso)
446 return PIPE_ERROR_OUT_OF_MEMORY;
447
448 memcpy(&cso->state, templ, sizeof(*templ));
449 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
450 &cso->state);
451 cso->delete_state =
452 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
453 cso->context = ctx->pipe;
454
455 iter = cso_insert_state(ctx->cache, hash_key,
456 CSO_DEPTH_STENCIL_ALPHA, cso);
457 if (cso_hash_iter_is_null(iter)) {
458 FREE(cso);
459 return PIPE_ERROR_OUT_OF_MEMORY;
460 }
461
462 handle = cso->data;
463 }
464 else {
465 handle = ((struct cso_depth_stencil_alpha *)
466 cso_hash_iter_data(iter))->data;
467 }
468
469 if (ctx->depth_stencil != handle) {
470 ctx->depth_stencil = handle;
471 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
472 }
473 return PIPE_OK;
474 }
475
476 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
477 {
478 assert(!ctx->depth_stencil_saved);
479 ctx->depth_stencil_saved = ctx->depth_stencil;
480 }
481
482 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
483 {
484 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
485 ctx->depth_stencil = ctx->depth_stencil_saved;
486 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
487 ctx->depth_stencil_saved);
488 }
489 ctx->depth_stencil_saved = NULL;
490 }
491
492
493
494 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
495 const struct pipe_rasterizer_state *templ)
496 {
497 unsigned key_size = sizeof(struct pipe_rasterizer_state);
498 unsigned hash_key = cso_construct_key((void*)templ, key_size);
499 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
500 hash_key,
501 CSO_RASTERIZER,
502 (void*)templ, key_size);
503 void *handle = NULL;
504
505 if (cso_hash_iter_is_null(iter)) {
506 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
507 if (!cso)
508 return PIPE_ERROR_OUT_OF_MEMORY;
509
510 memcpy(&cso->state, templ, sizeof(*templ));
511 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
512 cso->delete_state =
513 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
514 cso->context = ctx->pipe;
515
516 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
517 if (cso_hash_iter_is_null(iter)) {
518 FREE(cso);
519 return PIPE_ERROR_OUT_OF_MEMORY;
520 }
521
522 handle = cso->data;
523 }
524 else {
525 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
526 }
527
528 if (ctx->rasterizer != handle) {
529 ctx->rasterizer = handle;
530 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
531 }
532 return PIPE_OK;
533 }
534
535 void cso_save_rasterizer(struct cso_context *ctx)
536 {
537 assert(!ctx->rasterizer_saved);
538 ctx->rasterizer_saved = ctx->rasterizer;
539 }
540
541 void cso_restore_rasterizer(struct cso_context *ctx)
542 {
543 if (ctx->rasterizer != ctx->rasterizer_saved) {
544 ctx->rasterizer = ctx->rasterizer_saved;
545 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
546 }
547 ctx->rasterizer_saved = NULL;
548 }
549
550
551
552 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
553 void *handle )
554 {
555 if (ctx->fragment_shader != handle) {
556 ctx->fragment_shader = handle;
557 ctx->pipe->bind_fs_state(ctx->pipe, handle);
558 }
559 return PIPE_OK;
560 }
561
562 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
563 {
564 if (handle == ctx->fragment_shader) {
565 /* unbind before deleting */
566 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
567 ctx->fragment_shader = NULL;
568 }
569 ctx->pipe->delete_fs_state(ctx->pipe, handle);
570 }
571
572 void cso_save_fragment_shader(struct cso_context *ctx)
573 {
574 assert(!ctx->fragment_shader_saved);
575 ctx->fragment_shader_saved = ctx->fragment_shader;
576 }
577
578 void cso_restore_fragment_shader(struct cso_context *ctx)
579 {
580 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
581 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
582 ctx->fragment_shader = ctx->fragment_shader_saved;
583 }
584 ctx->fragment_shader_saved = NULL;
585 }
586
587
588 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
589 void *handle )
590 {
591 if (ctx->vertex_shader != handle) {
592 ctx->vertex_shader = handle;
593 ctx->pipe->bind_vs_state(ctx->pipe, handle);
594 }
595 return PIPE_OK;
596 }
597
598 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
599 {
600 if (handle == ctx->vertex_shader) {
601 /* unbind before deleting */
602 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
603 ctx->vertex_shader = NULL;
604 }
605 ctx->pipe->delete_vs_state(ctx->pipe, handle);
606 }
607
608 void cso_save_vertex_shader(struct cso_context *ctx)
609 {
610 assert(!ctx->vertex_shader_saved);
611 ctx->vertex_shader_saved = ctx->vertex_shader;
612 }
613
614 void cso_restore_vertex_shader(struct cso_context *ctx)
615 {
616 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
617 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
618 ctx->vertex_shader = ctx->vertex_shader_saved;
619 }
620 ctx->vertex_shader_saved = NULL;
621 }
622
623
624 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
625 const struct pipe_framebuffer_state *fb)
626 {
627 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
628 util_copy_framebuffer_state(&ctx->fb, fb);
629 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
630 }
631 return PIPE_OK;
632 }
633
634 void cso_save_framebuffer(struct cso_context *ctx)
635 {
636 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
637 }
638
639 void cso_restore_framebuffer(struct cso_context *ctx)
640 {
641 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
642 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
643 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
644 util_unreference_framebuffer_state(&ctx->fb_saved);
645 }
646 }
647
648
649 enum pipe_error cso_set_viewport(struct cso_context *ctx,
650 const struct pipe_viewport_state *vp)
651 {
652 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
653 ctx->vp = *vp;
654 ctx->pipe->set_viewport_state(ctx->pipe, vp);
655 }
656 return PIPE_OK;
657 }
658
659 void cso_save_viewport(struct cso_context *ctx)
660 {
661 ctx->vp_saved = ctx->vp;
662 }
663
664
665 void cso_restore_viewport(struct cso_context *ctx)
666 {
667 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
668 ctx->vp = ctx->vp_saved;
669 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
670 }
671 }
672
673
674 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
675 const struct pipe_blend_color *bc)
676 {
677 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
678 ctx->blend_color = *bc;
679 ctx->pipe->set_blend_color(ctx->pipe, bc);
680 }
681 return PIPE_OK;
682 }
683
684 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
685 unsigned sample_mask)
686 {
687 if (ctx->sample_mask != sample_mask) {
688 ctx->sample_mask = sample_mask;
689 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
690 }
691 return PIPE_OK;
692 }
693
694 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
695 const struct pipe_stencil_ref *sr)
696 {
697 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
698 ctx->stencil_ref = *sr;
699 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
700 }
701 return PIPE_OK;
702 }
703
704 void cso_save_stencil_ref(struct cso_context *ctx)
705 {
706 ctx->stencil_ref_saved = ctx->stencil_ref;
707 }
708
709
710 void cso_restore_stencil_ref(struct cso_context *ctx)
711 {
712 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
713 sizeof(ctx->stencil_ref))) {
714 ctx->stencil_ref = ctx->stencil_ref_saved;
715 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
716 }
717 }
718
719 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
720 void *handle)
721 {
722 assert(ctx->has_geometry_shader || !handle);
723
724 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
725 ctx->geometry_shader = handle;
726 ctx->pipe->bind_gs_state(ctx->pipe, handle);
727 }
728 return PIPE_OK;
729 }
730
731 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
732 {
733 if (handle == ctx->geometry_shader) {
734 /* unbind before deleting */
735 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
736 ctx->geometry_shader = NULL;
737 }
738 ctx->pipe->delete_gs_state(ctx->pipe, handle);
739 }
740
741 void cso_save_geometry_shader(struct cso_context *ctx)
742 {
743 if (!ctx->has_geometry_shader) {
744 return;
745 }
746
747 assert(!ctx->geometry_shader_saved);
748 ctx->geometry_shader_saved = ctx->geometry_shader;
749 }
750
751 void cso_restore_geometry_shader(struct cso_context *ctx)
752 {
753 if (!ctx->has_geometry_shader) {
754 return;
755 }
756
757 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
758 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
759 ctx->geometry_shader = ctx->geometry_shader_saved;
760 }
761 ctx->geometry_shader_saved = NULL;
762 }
763
764 /* clip state */
765
766 static INLINE void
767 clip_state_cpy(struct pipe_clip_state *dst,
768 const struct pipe_clip_state *src)
769 {
770 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
771 }
772
773 static INLINE int
774 clip_state_cmp(const struct pipe_clip_state *a,
775 const struct pipe_clip_state *b)
776 {
777 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
778 }
779
780 void
781 cso_set_clip(struct cso_context *ctx,
782 const struct pipe_clip_state *clip)
783 {
784 if (clip_state_cmp(&ctx->clip, clip)) {
785 clip_state_cpy(&ctx->clip, clip);
786 ctx->pipe->set_clip_state(ctx->pipe, clip);
787 }
788 }
789
790 void
791 cso_save_clip(struct cso_context *ctx)
792 {
793 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
794 }
795
796 void
797 cso_restore_clip(struct cso_context *ctx)
798 {
799 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
800 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
801 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
802 }
803 }
804
805 enum pipe_error
806 cso_set_vertex_elements(struct cso_context *ctx,
807 unsigned count,
808 const struct pipe_vertex_element *states)
809 {
810 struct u_vbuf *vbuf = ctx->vbuf;
811 unsigned key_size, hash_key;
812 struct cso_hash_iter iter;
813 void *handle;
814 struct cso_velems_state velems_state;
815
816 if (vbuf) {
817 u_vbuf_set_vertex_elements(vbuf, count, states);
818 return PIPE_OK;
819 }
820
821 /* Need to include the count into the stored state data too.
822 * Otherwise first few count pipe_vertex_elements could be identical
823 * even if count is different, and there's no guarantee the hash would
824 * be different in that case neither.
825 */
826 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
827 velems_state.count = count;
828 memcpy(velems_state.velems, states,
829 sizeof(struct pipe_vertex_element) * count);
830 hash_key = cso_construct_key((void*)&velems_state, key_size);
831 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
832 (void*)&velems_state, key_size);
833
834 if (cso_hash_iter_is_null(iter)) {
835 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
836 if (!cso)
837 return PIPE_ERROR_OUT_OF_MEMORY;
838
839 memcpy(&cso->state, &velems_state, key_size);
840 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
841 &cso->state.velems[0]);
842 cso->delete_state =
843 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
844 cso->context = ctx->pipe;
845
846 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
847 if (cso_hash_iter_is_null(iter)) {
848 FREE(cso);
849 return PIPE_ERROR_OUT_OF_MEMORY;
850 }
851
852 handle = cso->data;
853 }
854 else {
855 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
856 }
857
858 if (ctx->velements != handle) {
859 ctx->velements = handle;
860 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
861 }
862 return PIPE_OK;
863 }
864
865 void cso_save_vertex_elements(struct cso_context *ctx)
866 {
867 struct u_vbuf *vbuf = ctx->vbuf;
868
869 if (vbuf) {
870 u_vbuf_save_vertex_elements(vbuf);
871 return;
872 }
873
874 assert(!ctx->velements_saved);
875 ctx->velements_saved = ctx->velements;
876 }
877
878 void cso_restore_vertex_elements(struct cso_context *ctx)
879 {
880 struct u_vbuf *vbuf = ctx->vbuf;
881
882 if (vbuf) {
883 u_vbuf_restore_vertex_elements(vbuf);
884 return;
885 }
886
887 if (ctx->velements != ctx->velements_saved) {
888 ctx->velements = ctx->velements_saved;
889 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
890 }
891 ctx->velements_saved = NULL;
892 }
893
894 /* vertex buffers */
895
896 void cso_set_vertex_buffers(struct cso_context *ctx,
897 unsigned count,
898 const struct pipe_vertex_buffer *buffers)
899 {
900 struct u_vbuf *vbuf = ctx->vbuf;
901
902 if (vbuf) {
903 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
904 return;
905 }
906
907 if (count != ctx->nr_vertex_buffers ||
908 memcmp(buffers, ctx->vertex_buffers,
909 sizeof(struct pipe_vertex_buffer) * count) != 0) {
910 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
911 buffers, count);
912 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
913 }
914 }
915
916 void cso_save_vertex_buffers(struct cso_context *ctx)
917 {
918 struct u_vbuf *vbuf = ctx->vbuf;
919
920 if (vbuf) {
921 u_vbuf_save_vertex_buffers(vbuf);
922 return;
923 }
924
925 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
926 &ctx->nr_vertex_buffers_saved,
927 ctx->vertex_buffers,
928 ctx->nr_vertex_buffers);
929 }
930
931 void cso_restore_vertex_buffers(struct cso_context *ctx)
932 {
933 unsigned i;
934 struct u_vbuf *vbuf = ctx->vbuf;
935
936 if (vbuf) {
937 u_vbuf_restore_vertex_buffers(vbuf);
938 return;
939 }
940
941 util_copy_vertex_buffers(ctx->vertex_buffers,
942 &ctx->nr_vertex_buffers,
943 ctx->vertex_buffers_saved,
944 ctx->nr_vertex_buffers_saved);
945
946 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
947 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
948 }
949 ctx->nr_vertex_buffers_saved = 0;
950
951 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
952 ctx->vertex_buffers);
953 }
954
955
956 /**************** fragment/vertex sampler view state *************************/
957
958 static enum pipe_error
959 single_sampler(struct cso_context *ctx,
960 struct sampler_info *info,
961 unsigned idx,
962 const struct pipe_sampler_state *templ)
963 {
964 void *handle = NULL;
965
966 if (templ != NULL) {
967 unsigned key_size = sizeof(struct pipe_sampler_state);
968 unsigned hash_key = cso_construct_key((void*)templ, key_size);
969 struct cso_hash_iter iter =
970 cso_find_state_template(ctx->cache,
971 hash_key, CSO_SAMPLER,
972 (void *) templ, key_size);
973
974 if (cso_hash_iter_is_null(iter)) {
975 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
976 if (!cso)
977 return PIPE_ERROR_OUT_OF_MEMORY;
978
979 memcpy(&cso->state, templ, sizeof(*templ));
980 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
981 cso->delete_state =
982 (cso_state_callback) ctx->pipe->delete_sampler_state;
983 cso->context = ctx->pipe;
984
985 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
986 if (cso_hash_iter_is_null(iter)) {
987 FREE(cso);
988 return PIPE_ERROR_OUT_OF_MEMORY;
989 }
990
991 handle = cso->data;
992 }
993 else {
994 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
995 }
996 }
997
998 info->samplers[idx] = handle;
999
1000 return PIPE_OK;
1001 }
1002
1003 enum pipe_error
1004 cso_single_sampler(struct cso_context *ctx,
1005 unsigned shader_stage,
1006 unsigned idx,
1007 const struct pipe_sampler_state *templ)
1008 {
1009 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1010 }
1011
1012
1013
1014 static void
1015 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1016 {
1017 struct sampler_info *info = &ctx->samplers[shader_stage];
1018 unsigned i;
1019
1020 /* find highest non-null sampler */
1021 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1022 if (info->samplers[i - 1] != NULL)
1023 break;
1024 }
1025
1026 info->nr_samplers = i;
1027
1028 if (info->hw.nr_samplers != info->nr_samplers ||
1029 memcmp(info->hw.samplers,
1030 info->samplers,
1031 info->nr_samplers * sizeof(void *)) != 0)
1032 {
1033 memcpy(info->hw.samplers,
1034 info->samplers,
1035 info->nr_samplers * sizeof(void *));
1036 info->hw.nr_samplers = info->nr_samplers;
1037
1038 switch (shader_stage) {
1039 case PIPE_SHADER_FRAGMENT:
1040 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1041 info->nr_samplers,
1042 info->samplers);
1043 break;
1044 case PIPE_SHADER_VERTEX:
1045 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1046 info->nr_samplers,
1047 info->samplers);
1048 break;
1049 case PIPE_SHADER_GEOMETRY:
1050 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1051 info->nr_samplers,
1052 info->samplers);
1053 break;
1054 default:
1055 assert(!"bad shader type in single_sampler_done()");
1056 }
1057 }
1058 }
1059
1060 void
1061 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1062 {
1063 single_sampler_done(ctx, shader_stage);
1064 }
1065
1066
1067 /*
1068 * If the function encouters any errors it will return the
1069 * last one. Done to always try to set as many samplers
1070 * as possible.
1071 */
1072 enum pipe_error
1073 cso_set_samplers(struct cso_context *ctx,
1074 unsigned shader_stage,
1075 unsigned nr,
1076 const struct pipe_sampler_state **templates)
1077 {
1078 struct sampler_info *info = &ctx->samplers[shader_stage];
1079 unsigned i;
1080 enum pipe_error temp, error = PIPE_OK;
1081
1082 /* TODO: fastpath
1083 */
1084
1085 for (i = 0; i < nr; i++) {
1086 temp = single_sampler(ctx, info, i, templates[i]);
1087 if (temp != PIPE_OK)
1088 error = temp;
1089 }
1090
1091 for ( ; i < info->nr_samplers; i++) {
1092 temp = single_sampler(ctx, info, i, NULL);
1093 if (temp != PIPE_OK)
1094 error = temp;
1095 }
1096
1097 single_sampler_done(ctx, shader_stage);
1098
1099 return error;
1100 }
1101
1102 void
1103 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1104 {
1105 struct sampler_info *info = &ctx->samplers[shader_stage];
1106 info->nr_samplers_saved = info->nr_samplers;
1107 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1108 }
1109
1110
1111 void
1112 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1113 {
1114 struct sampler_info *info = &ctx->samplers[shader_stage];
1115 info->nr_samplers = info->nr_samplers_saved;
1116 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1117 single_sampler_done(ctx, shader_stage);
1118 }
1119
1120
1121 void
1122 cso_set_sampler_views(struct cso_context *ctx,
1123 unsigned shader_stage,
1124 unsigned count,
1125 struct pipe_sampler_view **views)
1126 {
1127 struct sampler_info *info = &ctx->samplers[shader_stage];
1128 unsigned i;
1129
1130 /* reference new views */
1131 for (i = 0; i < count; i++) {
1132 pipe_sampler_view_reference(&info->views[i], views[i]);
1133 }
1134 /* unref extra old views, if any */
1135 for (; i < info->nr_views; i++) {
1136 pipe_sampler_view_reference(&info->views[i], NULL);
1137 }
1138
1139 info->nr_views = count;
1140
1141 /* bind the new sampler views */
1142 switch (shader_stage) {
1143 case PIPE_SHADER_FRAGMENT:
1144 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1145 break;
1146 case PIPE_SHADER_VERTEX:
1147 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1148 break;
1149 case PIPE_SHADER_GEOMETRY:
1150 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1151 break;
1152 default:
1153 assert(!"bad shader type in cso_set_sampler_views()");
1154 }
1155 }
1156
1157
1158 void
1159 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1160 {
1161 struct sampler_info *info = &ctx->samplers[shader_stage];
1162 unsigned i;
1163
1164 info->nr_views_saved = info->nr_views;
1165
1166 for (i = 0; i < info->nr_views; i++) {
1167 assert(!info->views_saved[i]);
1168 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1169 }
1170 }
1171
1172
1173 void
1174 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1175 {
1176 struct sampler_info *info = &ctx->samplers[shader_stage];
1177 unsigned i, nr_saved = info->nr_views_saved;
1178
1179 for (i = 0; i < nr_saved; i++) {
1180 pipe_sampler_view_reference(&info->views[i], NULL);
1181 /* move the reference from one pointer to another */
1182 info->views[i] = info->views_saved[i];
1183 info->views_saved[i] = NULL;
1184 }
1185 for (; i < info->nr_views; i++) {
1186 pipe_sampler_view_reference(&info->views[i], NULL);
1187 }
1188
1189 /* bind the old/saved sampler views */
1190 switch (shader_stage) {
1191 case PIPE_SHADER_FRAGMENT:
1192 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1193 break;
1194 case PIPE_SHADER_VERTEX:
1195 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1196 break;
1197 case PIPE_SHADER_GEOMETRY:
1198 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1199 break;
1200 default:
1201 assert(!"bad shader type in cso_restore_sampler_views()");
1202 }
1203
1204 info->nr_views = nr_saved;
1205 info->nr_views_saved = 0;
1206 }
1207
1208
1209 void
1210 cso_set_stream_outputs(struct cso_context *ctx,
1211 unsigned num_targets,
1212 struct pipe_stream_output_target **targets,
1213 unsigned append_bitmask)
1214 {
1215 struct pipe_context *pipe = ctx->pipe;
1216 uint i;
1217
1218 if (!ctx->has_streamout) {
1219 assert(num_targets == 0);
1220 return;
1221 }
1222
1223 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1224 /* Nothing to do. */
1225 return;
1226 }
1227
1228 /* reference new targets */
1229 for (i = 0; i < num_targets; i++) {
1230 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1231 }
1232 /* unref extra old targets, if any */
1233 for (; i < ctx->nr_so_targets; i++) {
1234 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1235 }
1236
1237 pipe->set_stream_output_targets(pipe, num_targets, targets,
1238 append_bitmask);
1239 ctx->nr_so_targets = num_targets;
1240 }
1241
1242 void
1243 cso_save_stream_outputs(struct cso_context *ctx)
1244 {
1245 uint i;
1246
1247 if (!ctx->has_streamout) {
1248 return;
1249 }
1250
1251 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1252
1253 for (i = 0; i < ctx->nr_so_targets; i++) {
1254 assert(!ctx->so_targets_saved[i]);
1255 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1256 }
1257 }
1258
1259 void
1260 cso_restore_stream_outputs(struct cso_context *ctx)
1261 {
1262 struct pipe_context *pipe = ctx->pipe;
1263 uint i;
1264
1265 if (!ctx->has_streamout) {
1266 return;
1267 }
1268
1269 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1270 /* Nothing to do. */
1271 return;
1272 }
1273
1274 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1275 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1276 /* move the reference from one pointer to another */
1277 ctx->so_targets[i] = ctx->so_targets_saved[i];
1278 ctx->so_targets_saved[i] = NULL;
1279 }
1280 for (; i < ctx->nr_so_targets; i++) {
1281 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1282 }
1283
1284 /* ~0 means append */
1285 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1286 ctx->so_targets, ~0);
1287
1288 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1289 ctx->nr_so_targets_saved = 0;
1290 }
1291
1292 /* drawing */
1293
1294 void
1295 cso_set_index_buffer(struct cso_context *cso,
1296 const struct pipe_index_buffer *ib)
1297 {
1298 struct u_vbuf *vbuf = cso->vbuf;
1299
1300 if (vbuf) {
1301 u_vbuf_set_index_buffer(vbuf, ib);
1302 } else {
1303 struct pipe_context *pipe = cso->pipe;
1304 pipe->set_index_buffer(pipe, ib);
1305 }
1306 }
1307
1308 void
1309 cso_draw_vbo(struct cso_context *cso,
1310 const struct pipe_draw_info *info)
1311 {
1312 struct u_vbuf *vbuf = cso->vbuf;
1313
1314 if (vbuf) {
1315 u_vbuf_draw_vbo(vbuf, info);
1316 } else {
1317 struct pipe_context *pipe = cso->pipe;
1318 pipe->draw_vbo(pipe, info);
1319 }
1320 }
1321
1322 void
1323 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1324 {
1325 struct pipe_draw_info info;
1326
1327 util_draw_init_info(&info);
1328
1329 info.mode = mode;
1330 info.start = start;
1331 info.count = count;
1332 info.min_index = start;
1333 info.max_index = start + count - 1;
1334
1335 cso_draw_vbo(cso, &info);
1336 }