cso: 80-column wrapping, remove trailing whitespace, etc
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 uint nr_vertex_buffers;
90 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
91
92 uint nr_vertex_buffers_saved;
93 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved, *geometry_shader;
108 void *vertex_shader, *vertex_shader_saved, *geometry_shader_saved;
109 void *velements, *velements_saved;
110
111 struct pipe_clip_state clip;
112 struct pipe_clip_state clip_saved;
113
114 struct pipe_framebuffer_state fb, fb_saved;
115 struct pipe_viewport_state vp, vp_saved;
116 struct pipe_blend_color blend_color;
117 unsigned sample_mask;
118 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
119 };
120
121
122 static boolean delete_blend_state(struct cso_context *ctx, void *state)
123 {
124 struct cso_blend *cso = (struct cso_blend *)state;
125
126 if (ctx->blend == cso->data)
127 return FALSE;
128
129 if (cso->delete_state)
130 cso->delete_state(cso->context, cso->data);
131 FREE(state);
132 return TRUE;
133 }
134
135 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
136 {
137 struct cso_depth_stencil_alpha *cso =
138 (struct cso_depth_stencil_alpha *)state;
139
140 if (ctx->depth_stencil == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146
147 return TRUE;
148 }
149
150 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
151 {
152 struct cso_sampler *cso = (struct cso_sampler *)state;
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156 return TRUE;
157 }
158
159 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
160 {
161 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
162
163 if (ctx->rasterizer == cso->data)
164 return FALSE;
165 if (cso->delete_state)
166 cso->delete_state(cso->context, cso->data);
167 FREE(state);
168 return TRUE;
169 }
170
171 static boolean delete_vertex_elements(struct cso_context *ctx,
172 void *state)
173 {
174 struct cso_velements *cso = (struct cso_velements *)state;
175
176 if (ctx->velements == cso->data)
177 return FALSE;
178
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185
186 static INLINE boolean delete_cso(struct cso_context *ctx,
187 void *state, enum cso_cache_type type)
188 {
189 switch (type) {
190 case CSO_BLEND:
191 return delete_blend_state(ctx, state);
192 break;
193 case CSO_SAMPLER:
194 return delete_sampler_state(ctx, state);
195 break;
196 case CSO_DEPTH_STENCIL_ALPHA:
197 return delete_depth_stencil_state(ctx, state);
198 break;
199 case CSO_RASTERIZER:
200 return delete_rasterizer_state(ctx, state);
201 break;
202 case CSO_VELEMENTS:
203 return delete_vertex_elements(ctx, state);
204 break;
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static INLINE void
213 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
214 int max_size, void *user_data)
215 {
216 struct cso_context *ctx = (struct cso_context *)user_data;
217 /* if we're approach the maximum size, remove fourth of the entries
218 * otherwise every subsequent call will go through the same */
219 int hash_size = cso_hash_size(hash);
220 int max_entries = (max_size > hash_size) ? max_size : hash_size;
221 int to_remove = (max_size < max_entries) * max_entries/4;
222 struct cso_hash_iter iter = cso_hash_first_node(hash);
223 if (hash_size > max_size)
224 to_remove += hash_size - max_size;
225 while (to_remove) {
226 /*remove elements until we're good */
227 /*fixme: currently we pick the nodes to remove at random*/
228 void *cso = cso_hash_iter_data(iter);
229 if (delete_cso(ctx, cso, type)) {
230 iter = cso_hash_erase(hash, iter);
231 --to_remove;
232 } else
233 iter = cso_hash_iter_next(iter);
234 }
235 }
236
237 static void cso_init_vbuf(struct cso_context *cso)
238 {
239 struct u_vbuf_caps caps;
240
241 u_vbuf_get_caps(cso->pipe->screen, &caps);
242
243 /* Install u_vbuf if there is anything unsupported. */
244 if (!caps.buffer_offset_unaligned ||
245 !caps.buffer_stride_unaligned ||
246 !caps.velem_src_offset_unaligned ||
247 !caps.format_fixed32 ||
248 !caps.format_float16 ||
249 !caps.format_float64 ||
250 !caps.format_norm32 ||
251 !caps.format_scaled32 ||
252 !caps.user_vertex_buffers) {
253 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
254 }
255 }
256
257 struct cso_context *cso_create_context( struct pipe_context *pipe )
258 {
259 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
260 if (ctx == NULL)
261 goto out;
262
263 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
264
265 ctx->cache = cso_cache_create();
266 if (ctx->cache == NULL)
267 goto out;
268 cso_cache_set_sanitize_callback(ctx->cache,
269 sanitize_hash,
270 ctx);
271
272 ctx->pipe = pipe;
273
274 cso_init_vbuf(ctx);
275
276 /* Enable for testing: */
277 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
278
279 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
280 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
281 ctx->has_geometry_shader = TRUE;
282 }
283 if (pipe->screen->get_param(pipe->screen,
284 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
285 ctx->has_streamout = TRUE;
286 }
287
288 return ctx;
289
290 out:
291 cso_destroy_context( ctx );
292 return NULL;
293 }
294
295 /**
296 * Prior to context destruction, this function unbinds all state objects.
297 */
298 void cso_release_all( struct cso_context *ctx )
299 {
300 unsigned i, shader;
301
302 if (ctx->pipe) {
303 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
304 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
305 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
306 if (ctx->pipe->bind_vertex_sampler_states)
307 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
308 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
309 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
310 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
311 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
312 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
313 if (ctx->pipe->set_vertex_sampler_views)
314 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
315 if (ctx->pipe->set_stream_output_targets)
316 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
317 }
318
319 /* free fragment samplers, views */
320 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
321 struct sampler_info *info = &ctx->samplers[shader];
322 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
323 pipe_sampler_view_reference(&info->views[i], NULL);
324 pipe_sampler_view_reference(&info->views_saved[i], NULL);
325 }
326 }
327
328 util_unreference_framebuffer_state(&ctx->fb);
329 util_unreference_framebuffer_state(&ctx->fb_saved);
330
331 util_copy_vertex_buffers(ctx->vertex_buffers,
332 &ctx->nr_vertex_buffers,
333 NULL, 0);
334 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
335 &ctx->nr_vertex_buffers_saved,
336 NULL, 0);
337
338 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
339 pipe_so_target_reference(&ctx->so_targets[i], NULL);
340 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
341 }
342
343 if (ctx->cache) {
344 cso_cache_delete( ctx->cache );
345 ctx->cache = NULL;
346 }
347 }
348
349
350 /**
351 * Free the CSO context. NOTE: the state tracker should have previously called
352 * cso_release_all().
353 */
354 void cso_destroy_context( struct cso_context *ctx )
355 {
356 if (ctx) {
357 if (ctx->vbuf)
358 u_vbuf_destroy(ctx->vbuf);
359 FREE( ctx );
360 }
361 }
362
363
364 /* Those function will either find the state of the given template
365 * in the cache or they will create a new state from the given
366 * template, insert it in the cache and return it.
367 */
368
369 /*
370 * If the driver returns 0 from the create method then they will assign
371 * the data member of the cso to be the template itself.
372 */
373
374 enum pipe_error cso_set_blend(struct cso_context *ctx,
375 const struct pipe_blend_state *templ)
376 {
377 unsigned key_size, hash_key;
378 struct cso_hash_iter iter;
379 void *handle;
380
381 key_size = templ->independent_blend_enable ?
382 sizeof(struct pipe_blend_state) :
383 (char *)&(templ->rt[1]) - (char *)templ;
384 hash_key = cso_construct_key((void*)templ, key_size);
385 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
386 (void*)templ, key_size);
387
388 if (cso_hash_iter_is_null(iter)) {
389 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
390 if (!cso)
391 return PIPE_ERROR_OUT_OF_MEMORY;
392
393 memset(&cso->state, 0, sizeof cso->state);
394 memcpy(&cso->state, templ, key_size);
395 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
396 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
397 cso->context = ctx->pipe;
398
399 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
400 if (cso_hash_iter_is_null(iter)) {
401 FREE(cso);
402 return PIPE_ERROR_OUT_OF_MEMORY;
403 }
404
405 handle = cso->data;
406 }
407 else {
408 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
409 }
410
411 if (ctx->blend != handle) {
412 ctx->blend = handle;
413 ctx->pipe->bind_blend_state(ctx->pipe, handle);
414 }
415 return PIPE_OK;
416 }
417
418 void cso_save_blend(struct cso_context *ctx)
419 {
420 assert(!ctx->blend_saved);
421 ctx->blend_saved = ctx->blend;
422 }
423
424 void cso_restore_blend(struct cso_context *ctx)
425 {
426 if (ctx->blend != ctx->blend_saved) {
427 ctx->blend = ctx->blend_saved;
428 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
429 }
430 ctx->blend_saved = NULL;
431 }
432
433
434
435 enum pipe_error
436 cso_set_depth_stencil_alpha(struct cso_context *ctx,
437 const struct pipe_depth_stencil_alpha_state *templ)
438 {
439 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
440 unsigned hash_key = cso_construct_key((void*)templ, key_size);
441 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
442 hash_key,
443 CSO_DEPTH_STENCIL_ALPHA,
444 (void*)templ, key_size);
445 void *handle;
446
447 if (cso_hash_iter_is_null(iter)) {
448 struct cso_depth_stencil_alpha *cso =
449 MALLOC(sizeof(struct cso_depth_stencil_alpha));
450 if (!cso)
451 return PIPE_ERROR_OUT_OF_MEMORY;
452
453 memcpy(&cso->state, templ, sizeof(*templ));
454 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
455 &cso->state);
456 cso->delete_state =
457 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
458 cso->context = ctx->pipe;
459
460 iter = cso_insert_state(ctx->cache, hash_key,
461 CSO_DEPTH_STENCIL_ALPHA, cso);
462 if (cso_hash_iter_is_null(iter)) {
463 FREE(cso);
464 return PIPE_ERROR_OUT_OF_MEMORY;
465 }
466
467 handle = cso->data;
468 }
469 else {
470 handle = ((struct cso_depth_stencil_alpha *)
471 cso_hash_iter_data(iter))->data;
472 }
473
474 if (ctx->depth_stencil != handle) {
475 ctx->depth_stencil = handle;
476 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
477 }
478 return PIPE_OK;
479 }
480
481 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
482 {
483 assert(!ctx->depth_stencil_saved);
484 ctx->depth_stencil_saved = ctx->depth_stencil;
485 }
486
487 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
488 {
489 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
490 ctx->depth_stencil = ctx->depth_stencil_saved;
491 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
492 ctx->depth_stencil_saved);
493 }
494 ctx->depth_stencil_saved = NULL;
495 }
496
497
498
499 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
500 const struct pipe_rasterizer_state *templ)
501 {
502 unsigned key_size = sizeof(struct pipe_rasterizer_state);
503 unsigned hash_key = cso_construct_key((void*)templ, key_size);
504 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
505 hash_key,
506 CSO_RASTERIZER,
507 (void*)templ, key_size);
508 void *handle = NULL;
509
510 if (cso_hash_iter_is_null(iter)) {
511 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
512 if (!cso)
513 return PIPE_ERROR_OUT_OF_MEMORY;
514
515 memcpy(&cso->state, templ, sizeof(*templ));
516 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
517 cso->delete_state =
518 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
519 cso->context = ctx->pipe;
520
521 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
522 if (cso_hash_iter_is_null(iter)) {
523 FREE(cso);
524 return PIPE_ERROR_OUT_OF_MEMORY;
525 }
526
527 handle = cso->data;
528 }
529 else {
530 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
531 }
532
533 if (ctx->rasterizer != handle) {
534 ctx->rasterizer = handle;
535 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
536 }
537 return PIPE_OK;
538 }
539
540 void cso_save_rasterizer(struct cso_context *ctx)
541 {
542 assert(!ctx->rasterizer_saved);
543 ctx->rasterizer_saved = ctx->rasterizer;
544 }
545
546 void cso_restore_rasterizer(struct cso_context *ctx)
547 {
548 if (ctx->rasterizer != ctx->rasterizer_saved) {
549 ctx->rasterizer = ctx->rasterizer_saved;
550 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
551 }
552 ctx->rasterizer_saved = NULL;
553 }
554
555
556
557 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
558 void *handle )
559 {
560 if (ctx->fragment_shader != handle) {
561 ctx->fragment_shader = handle;
562 ctx->pipe->bind_fs_state(ctx->pipe, handle);
563 }
564 return PIPE_OK;
565 }
566
567 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
568 {
569 if (handle == ctx->fragment_shader) {
570 /* unbind before deleting */
571 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
572 ctx->fragment_shader = NULL;
573 }
574 ctx->pipe->delete_fs_state(ctx->pipe, handle);
575 }
576
577 void cso_save_fragment_shader(struct cso_context *ctx)
578 {
579 assert(!ctx->fragment_shader_saved);
580 ctx->fragment_shader_saved = ctx->fragment_shader;
581 }
582
583 void cso_restore_fragment_shader(struct cso_context *ctx)
584 {
585 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
586 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
587 ctx->fragment_shader = ctx->fragment_shader_saved;
588 }
589 ctx->fragment_shader_saved = NULL;
590 }
591
592
593 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
594 void *handle )
595 {
596 if (ctx->vertex_shader != handle) {
597 ctx->vertex_shader = handle;
598 ctx->pipe->bind_vs_state(ctx->pipe, handle);
599 }
600 return PIPE_OK;
601 }
602
603 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
604 {
605 if (handle == ctx->vertex_shader) {
606 /* unbind before deleting */
607 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
608 ctx->vertex_shader = NULL;
609 }
610 ctx->pipe->delete_vs_state(ctx->pipe, handle);
611 }
612
613 void cso_save_vertex_shader(struct cso_context *ctx)
614 {
615 assert(!ctx->vertex_shader_saved);
616 ctx->vertex_shader_saved = ctx->vertex_shader;
617 }
618
619 void cso_restore_vertex_shader(struct cso_context *ctx)
620 {
621 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
622 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
623 ctx->vertex_shader = ctx->vertex_shader_saved;
624 }
625 ctx->vertex_shader_saved = NULL;
626 }
627
628
629 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
630 const struct pipe_framebuffer_state *fb)
631 {
632 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
633 util_copy_framebuffer_state(&ctx->fb, fb);
634 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
635 }
636 return PIPE_OK;
637 }
638
639 void cso_save_framebuffer(struct cso_context *ctx)
640 {
641 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
642 }
643
644 void cso_restore_framebuffer(struct cso_context *ctx)
645 {
646 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
647 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
648 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
649 util_unreference_framebuffer_state(&ctx->fb_saved);
650 }
651 }
652
653
654 enum pipe_error cso_set_viewport(struct cso_context *ctx,
655 const struct pipe_viewport_state *vp)
656 {
657 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
658 ctx->vp = *vp;
659 ctx->pipe->set_viewport_state(ctx->pipe, vp);
660 }
661 return PIPE_OK;
662 }
663
664 void cso_save_viewport(struct cso_context *ctx)
665 {
666 ctx->vp_saved = ctx->vp;
667 }
668
669
670 void cso_restore_viewport(struct cso_context *ctx)
671 {
672 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
673 ctx->vp = ctx->vp_saved;
674 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
675 }
676 }
677
678
679 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
680 const struct pipe_blend_color *bc)
681 {
682 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
683 ctx->blend_color = *bc;
684 ctx->pipe->set_blend_color(ctx->pipe, bc);
685 }
686 return PIPE_OK;
687 }
688
689 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
690 unsigned sample_mask)
691 {
692 if (ctx->sample_mask != sample_mask) {
693 ctx->sample_mask = sample_mask;
694 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
695 }
696 return PIPE_OK;
697 }
698
699 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
700 const struct pipe_stencil_ref *sr)
701 {
702 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
703 ctx->stencil_ref = *sr;
704 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
705 }
706 return PIPE_OK;
707 }
708
709 void cso_save_stencil_ref(struct cso_context *ctx)
710 {
711 ctx->stencil_ref_saved = ctx->stencil_ref;
712 }
713
714
715 void cso_restore_stencil_ref(struct cso_context *ctx)
716 {
717 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
718 sizeof(ctx->stencil_ref))) {
719 ctx->stencil_ref = ctx->stencil_ref_saved;
720 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
721 }
722 }
723
724 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
725 void *handle)
726 {
727 assert(ctx->has_geometry_shader || !handle);
728
729 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
730 ctx->geometry_shader = handle;
731 ctx->pipe->bind_gs_state(ctx->pipe, handle);
732 }
733 return PIPE_OK;
734 }
735
736 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
737 {
738 if (handle == ctx->geometry_shader) {
739 /* unbind before deleting */
740 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
741 ctx->geometry_shader = NULL;
742 }
743 ctx->pipe->delete_gs_state(ctx->pipe, handle);
744 }
745
746 void cso_save_geometry_shader(struct cso_context *ctx)
747 {
748 if (!ctx->has_geometry_shader) {
749 return;
750 }
751
752 assert(!ctx->geometry_shader_saved);
753 ctx->geometry_shader_saved = ctx->geometry_shader;
754 }
755
756 void cso_restore_geometry_shader(struct cso_context *ctx)
757 {
758 if (!ctx->has_geometry_shader) {
759 return;
760 }
761
762 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
763 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
764 ctx->geometry_shader = ctx->geometry_shader_saved;
765 }
766 ctx->geometry_shader_saved = NULL;
767 }
768
769 /* clip state */
770
771 static INLINE void
772 clip_state_cpy(struct pipe_clip_state *dst,
773 const struct pipe_clip_state *src)
774 {
775 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
776 }
777
778 static INLINE int
779 clip_state_cmp(const struct pipe_clip_state *a,
780 const struct pipe_clip_state *b)
781 {
782 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
783 }
784
785 void
786 cso_set_clip(struct cso_context *ctx,
787 const struct pipe_clip_state *clip)
788 {
789 if (clip_state_cmp(&ctx->clip, clip)) {
790 clip_state_cpy(&ctx->clip, clip);
791 ctx->pipe->set_clip_state(ctx->pipe, clip);
792 }
793 }
794
795 void
796 cso_save_clip(struct cso_context *ctx)
797 {
798 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
799 }
800
801 void
802 cso_restore_clip(struct cso_context *ctx)
803 {
804 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
805 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
806 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
807 }
808 }
809
810 enum pipe_error
811 cso_set_vertex_elements(struct cso_context *ctx,
812 unsigned count,
813 const struct pipe_vertex_element *states)
814 {
815 struct u_vbuf *vbuf = ctx->vbuf;
816 unsigned key_size, hash_key;
817 struct cso_hash_iter iter;
818 void *handle;
819 struct cso_velems_state velems_state;
820
821 if (vbuf) {
822 u_vbuf_set_vertex_elements(vbuf, count, states);
823 return PIPE_OK;
824 }
825
826 /* Need to include the count into the stored state data too.
827 * Otherwise first few count pipe_vertex_elements could be identical
828 * even if count is different, and there's no guarantee the hash would
829 * be different in that case neither.
830 */
831 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
832 velems_state.count = count;
833 memcpy(velems_state.velems, states,
834 sizeof(struct pipe_vertex_element) * count);
835 hash_key = cso_construct_key((void*)&velems_state, key_size);
836 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
837 (void*)&velems_state, key_size);
838
839 if (cso_hash_iter_is_null(iter)) {
840 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
841 if (!cso)
842 return PIPE_ERROR_OUT_OF_MEMORY;
843
844 memcpy(&cso->state, &velems_state, key_size);
845 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
846 &cso->state.velems[0]);
847 cso->delete_state =
848 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
849 cso->context = ctx->pipe;
850
851 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
852 if (cso_hash_iter_is_null(iter)) {
853 FREE(cso);
854 return PIPE_ERROR_OUT_OF_MEMORY;
855 }
856
857 handle = cso->data;
858 }
859 else {
860 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
861 }
862
863 if (ctx->velements != handle) {
864 ctx->velements = handle;
865 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
866 }
867 return PIPE_OK;
868 }
869
870 void cso_save_vertex_elements(struct cso_context *ctx)
871 {
872 struct u_vbuf *vbuf = ctx->vbuf;
873
874 if (vbuf) {
875 u_vbuf_save_vertex_elements(vbuf);
876 return;
877 }
878
879 assert(!ctx->velements_saved);
880 ctx->velements_saved = ctx->velements;
881 }
882
883 void cso_restore_vertex_elements(struct cso_context *ctx)
884 {
885 struct u_vbuf *vbuf = ctx->vbuf;
886
887 if (vbuf) {
888 u_vbuf_restore_vertex_elements(vbuf);
889 return;
890 }
891
892 if (ctx->velements != ctx->velements_saved) {
893 ctx->velements = ctx->velements_saved;
894 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
895 }
896 ctx->velements_saved = NULL;
897 }
898
899 /* vertex buffers */
900
901 void cso_set_vertex_buffers(struct cso_context *ctx,
902 unsigned count,
903 const struct pipe_vertex_buffer *buffers)
904 {
905 struct u_vbuf *vbuf = ctx->vbuf;
906
907 if (vbuf) {
908 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
909 return;
910 }
911
912 if (count != ctx->nr_vertex_buffers ||
913 memcmp(buffers, ctx->vertex_buffers,
914 sizeof(struct pipe_vertex_buffer) * count) != 0) {
915 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
916 buffers, count);
917 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
918 }
919 }
920
921 void cso_save_vertex_buffers(struct cso_context *ctx)
922 {
923 struct u_vbuf *vbuf = ctx->vbuf;
924
925 if (vbuf) {
926 u_vbuf_save_vertex_buffers(vbuf);
927 return;
928 }
929
930 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
931 &ctx->nr_vertex_buffers_saved,
932 ctx->vertex_buffers,
933 ctx->nr_vertex_buffers);
934 }
935
936 void cso_restore_vertex_buffers(struct cso_context *ctx)
937 {
938 unsigned i;
939 struct u_vbuf *vbuf = ctx->vbuf;
940
941 if (vbuf) {
942 u_vbuf_restore_vertex_buffers(vbuf);
943 return;
944 }
945
946 util_copy_vertex_buffers(ctx->vertex_buffers,
947 &ctx->nr_vertex_buffers,
948 ctx->vertex_buffers_saved,
949 ctx->nr_vertex_buffers_saved);
950
951 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
952 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
953 }
954 ctx->nr_vertex_buffers_saved = 0;
955
956 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
957 ctx->vertex_buffers);
958 }
959
960
961 /**************** fragment/vertex sampler view state *************************/
962
963 static enum pipe_error
964 single_sampler(struct cso_context *ctx,
965 struct sampler_info *info,
966 unsigned idx,
967 const struct pipe_sampler_state *templ)
968 {
969 void *handle = NULL;
970
971 if (templ != NULL) {
972 unsigned key_size = sizeof(struct pipe_sampler_state);
973 unsigned hash_key = cso_construct_key((void*)templ, key_size);
974 struct cso_hash_iter iter =
975 cso_find_state_template(ctx->cache,
976 hash_key, CSO_SAMPLER,
977 (void *) templ, key_size);
978
979 if (cso_hash_iter_is_null(iter)) {
980 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
981 if (!cso)
982 return PIPE_ERROR_OUT_OF_MEMORY;
983
984 memcpy(&cso->state, templ, sizeof(*templ));
985 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
986 cso->delete_state =
987 (cso_state_callback) ctx->pipe->delete_sampler_state;
988 cso->context = ctx->pipe;
989
990 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
991 if (cso_hash_iter_is_null(iter)) {
992 FREE(cso);
993 return PIPE_ERROR_OUT_OF_MEMORY;
994 }
995
996 handle = cso->data;
997 }
998 else {
999 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1000 }
1001 }
1002
1003 info->samplers[idx] = handle;
1004
1005 return PIPE_OK;
1006 }
1007
1008 enum pipe_error
1009 cso_single_sampler(struct cso_context *ctx,
1010 unsigned shader_stage,
1011 unsigned idx,
1012 const struct pipe_sampler_state *templ)
1013 {
1014 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1015 }
1016
1017
1018
1019 static void
1020 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1021 {
1022 struct sampler_info *info = &ctx->samplers[shader_stage];
1023 unsigned i;
1024
1025 /* find highest non-null sampler */
1026 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1027 if (info->samplers[i - 1] != NULL)
1028 break;
1029 }
1030
1031 info->nr_samplers = i;
1032
1033 if (info->hw.nr_samplers != info->nr_samplers ||
1034 memcmp(info->hw.samplers,
1035 info->samplers,
1036 info->nr_samplers * sizeof(void *)) != 0)
1037 {
1038 memcpy(info->hw.samplers,
1039 info->samplers,
1040 info->nr_samplers * sizeof(void *));
1041 info->hw.nr_samplers = info->nr_samplers;
1042
1043 switch (shader_stage) {
1044 case PIPE_SHADER_FRAGMENT:
1045 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1046 info->nr_samplers,
1047 info->samplers);
1048 break;
1049 case PIPE_SHADER_VERTEX:
1050 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1051 info->nr_samplers,
1052 info->samplers);
1053 break;
1054 case PIPE_SHADER_GEOMETRY:
1055 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1056 info->nr_samplers,
1057 info->samplers);
1058 break;
1059 default:
1060 assert(!"bad shader type in single_sampler_done()");
1061 }
1062 }
1063 }
1064
1065 void
1066 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1067 {
1068 single_sampler_done(ctx, shader_stage);
1069 }
1070
1071
1072 /*
1073 * If the function encouters any errors it will return the
1074 * last one. Done to always try to set as many samplers
1075 * as possible.
1076 */
1077 enum pipe_error
1078 cso_set_samplers(struct cso_context *ctx,
1079 unsigned shader_stage,
1080 unsigned nr,
1081 const struct pipe_sampler_state **templates)
1082 {
1083 struct sampler_info *info = &ctx->samplers[shader_stage];
1084 unsigned i;
1085 enum pipe_error temp, error = PIPE_OK;
1086
1087 /* TODO: fastpath
1088 */
1089
1090 for (i = 0; i < nr; i++) {
1091 temp = single_sampler(ctx, info, i, templates[i]);
1092 if (temp != PIPE_OK)
1093 error = temp;
1094 }
1095
1096 for ( ; i < info->nr_samplers; i++) {
1097 temp = single_sampler(ctx, info, i, NULL);
1098 if (temp != PIPE_OK)
1099 error = temp;
1100 }
1101
1102 single_sampler_done(ctx, shader_stage);
1103
1104 return error;
1105 }
1106
1107 void
1108 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1109 {
1110 struct sampler_info *info = &ctx->samplers[shader_stage];
1111 info->nr_samplers_saved = info->nr_samplers;
1112 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1113 }
1114
1115
1116 void
1117 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1118 {
1119 struct sampler_info *info = &ctx->samplers[shader_stage];
1120 info->nr_samplers = info->nr_samplers_saved;
1121 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1122 single_sampler_done(ctx, shader_stage);
1123 }
1124
1125
1126 void
1127 cso_set_sampler_views(struct cso_context *ctx,
1128 unsigned shader_stage,
1129 unsigned count,
1130 struct pipe_sampler_view **views)
1131 {
1132 struct sampler_info *info = &ctx->samplers[shader_stage];
1133 unsigned i;
1134
1135 /* reference new views */
1136 for (i = 0; i < count; i++) {
1137 pipe_sampler_view_reference(&info->views[i], views[i]);
1138 }
1139 /* unref extra old views, if any */
1140 for (; i < info->nr_views; i++) {
1141 pipe_sampler_view_reference(&info->views[i], NULL);
1142 }
1143
1144 info->nr_views = count;
1145
1146 /* bind the new sampler views */
1147 switch (shader_stage) {
1148 case PIPE_SHADER_FRAGMENT:
1149 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1150 break;
1151 case PIPE_SHADER_VERTEX:
1152 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1153 break;
1154 case PIPE_SHADER_GEOMETRY:
1155 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1156 break;
1157 default:
1158 assert(!"bad shader type in cso_set_sampler_views()");
1159 }
1160 }
1161
1162
1163 void
1164 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1165 {
1166 struct sampler_info *info = &ctx->samplers[shader_stage];
1167 unsigned i;
1168
1169 info->nr_views_saved = info->nr_views;
1170
1171 for (i = 0; i < info->nr_views; i++) {
1172 assert(!info->views_saved[i]);
1173 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1174 }
1175 }
1176
1177
1178 void
1179 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1180 {
1181 struct sampler_info *info = &ctx->samplers[shader_stage];
1182 unsigned i, nr_saved = info->nr_views_saved;
1183
1184 for (i = 0; i < nr_saved; i++) {
1185 pipe_sampler_view_reference(&info->views[i], NULL);
1186 /* move the reference from one pointer to another */
1187 info->views[i] = info->views_saved[i];
1188 info->views_saved[i] = NULL;
1189 }
1190 for (; i < info->nr_views; i++) {
1191 pipe_sampler_view_reference(&info->views[i], NULL);
1192 }
1193
1194 /* bind the old/saved sampler views */
1195 switch (shader_stage) {
1196 case PIPE_SHADER_FRAGMENT:
1197 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1198 break;
1199 case PIPE_SHADER_VERTEX:
1200 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1201 break;
1202 case PIPE_SHADER_GEOMETRY:
1203 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1204 break;
1205 default:
1206 assert(!"bad shader type in cso_restore_sampler_views()");
1207 }
1208
1209 info->nr_views = nr_saved;
1210 info->nr_views_saved = 0;
1211 }
1212
1213
1214 void
1215 cso_set_stream_outputs(struct cso_context *ctx,
1216 unsigned num_targets,
1217 struct pipe_stream_output_target **targets,
1218 unsigned append_bitmask)
1219 {
1220 struct pipe_context *pipe = ctx->pipe;
1221 uint i;
1222
1223 if (!ctx->has_streamout) {
1224 assert(num_targets == 0);
1225 return;
1226 }
1227
1228 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1229 /* Nothing to do. */
1230 return;
1231 }
1232
1233 /* reference new targets */
1234 for (i = 0; i < num_targets; i++) {
1235 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1236 }
1237 /* unref extra old targets, if any */
1238 for (; i < ctx->nr_so_targets; i++) {
1239 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1240 }
1241
1242 pipe->set_stream_output_targets(pipe, num_targets, targets,
1243 append_bitmask);
1244 ctx->nr_so_targets = num_targets;
1245 }
1246
1247 void
1248 cso_save_stream_outputs(struct cso_context *ctx)
1249 {
1250 uint i;
1251
1252 if (!ctx->has_streamout) {
1253 return;
1254 }
1255
1256 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1257
1258 for (i = 0; i < ctx->nr_so_targets; i++) {
1259 assert(!ctx->so_targets_saved[i]);
1260 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1261 }
1262 }
1263
1264 void
1265 cso_restore_stream_outputs(struct cso_context *ctx)
1266 {
1267 struct pipe_context *pipe = ctx->pipe;
1268 uint i;
1269
1270 if (!ctx->has_streamout) {
1271 return;
1272 }
1273
1274 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1275 /* Nothing to do. */
1276 return;
1277 }
1278
1279 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1280 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1281 /* move the reference from one pointer to another */
1282 ctx->so_targets[i] = ctx->so_targets_saved[i];
1283 ctx->so_targets_saved[i] = NULL;
1284 }
1285 for (; i < ctx->nr_so_targets; i++) {
1286 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1287 }
1288
1289 /* ~0 means append */
1290 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1291 ctx->so_targets, ~0);
1292
1293 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1294 ctx->nr_so_targets_saved = 0;
1295 }
1296
1297 /* drawing */
1298
1299 void
1300 cso_set_index_buffer(struct cso_context *cso,
1301 const struct pipe_index_buffer *ib)
1302 {
1303 struct u_vbuf *vbuf = cso->vbuf;
1304
1305 if (vbuf) {
1306 u_vbuf_set_index_buffer(vbuf, ib);
1307 } else {
1308 struct pipe_context *pipe = cso->pipe;
1309 pipe->set_index_buffer(pipe, ib);
1310 }
1311 }
1312
1313 void
1314 cso_draw_vbo(struct cso_context *cso,
1315 const struct pipe_draw_info *info)
1316 {
1317 struct u_vbuf *vbuf = cso->vbuf;
1318
1319 if (vbuf) {
1320 u_vbuf_draw_vbo(vbuf, info);
1321 } else {
1322 struct pipe_context *pipe = cso->pipe;
1323 pipe->draw_vbo(pipe, info);
1324 }
1325 }
1326
1327 void
1328 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1329 {
1330 struct pipe_draw_info info;
1331
1332 util_draw_init_info(&info);
1333
1334 info.mode = mode;
1335 info.start = start;
1336 info.count = count;
1337 info.min_index = start;
1338 info.max_index = start + count - 1;
1339
1340 cso_draw_vbo(cso, &info);
1341 }