gallium: make user vertex buffers optional
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info fragment_samplers;
88 struct sampler_info vertex_samplers;
89
90 uint nr_vertex_buffers;
91 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
92
93 uint nr_vertex_buffers_saved;
94 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved, *geometry_shader;
109 void *vertex_shader, *vertex_shader_saved, *geometry_shader_saved;
110 void *velements, *velements_saved;
111
112 struct pipe_clip_state clip;
113 struct pipe_clip_state clip_saved;
114
115 struct pipe_framebuffer_state fb, fb_saved;
116 struct pipe_viewport_state vp, vp_saved;
117 struct pipe_blend_color blend_color;
118 unsigned sample_mask;
119 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
120 };
121
122
123 static boolean delete_blend_state(struct cso_context *ctx, void *state)
124 {
125 struct cso_blend *cso = (struct cso_blend *)state;
126
127 if (ctx->blend == cso->data)
128 return FALSE;
129
130 if (cso->delete_state)
131 cso->delete_state(cso->context, cso->data);
132 FREE(state);
133 return TRUE;
134 }
135
136 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
137 {
138 struct cso_depth_stencil_alpha *cso = (struct cso_depth_stencil_alpha *)state;
139
140 if (ctx->depth_stencil == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146
147 return TRUE;
148 }
149
150 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
151 {
152 struct cso_sampler *cso = (struct cso_sampler *)state;
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156 return TRUE;
157 }
158
159 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
160 {
161 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
162
163 if (ctx->rasterizer == cso->data)
164 return FALSE;
165 if (cso->delete_state)
166 cso->delete_state(cso->context, cso->data);
167 FREE(state);
168 return TRUE;
169 }
170
171 static boolean delete_vertex_elements(struct cso_context *ctx,
172 void *state)
173 {
174 struct cso_velements *cso = (struct cso_velements *)state;
175
176 if (ctx->velements == cso->data)
177 return FALSE;
178
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185
186 static INLINE boolean delete_cso(struct cso_context *ctx,
187 void *state, enum cso_cache_type type)
188 {
189 switch (type) {
190 case CSO_BLEND:
191 return delete_blend_state(ctx, state);
192 break;
193 case CSO_SAMPLER:
194 return delete_sampler_state(ctx, state);
195 break;
196 case CSO_DEPTH_STENCIL_ALPHA:
197 return delete_depth_stencil_state(ctx, state);
198 break;
199 case CSO_RASTERIZER:
200 return delete_rasterizer_state(ctx, state);
201 break;
202 case CSO_VELEMENTS:
203 return delete_vertex_elements(ctx, state);
204 break;
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static INLINE void sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
213 int max_size, void *user_data)
214 {
215 struct cso_context *ctx = (struct cso_context *)user_data;
216 /* if we're approach the maximum size, remove fourth of the entries
217 * otherwise every subsequent call will go through the same */
218 int hash_size = cso_hash_size(hash);
219 int max_entries = (max_size > hash_size) ? max_size : hash_size;
220 int to_remove = (max_size < max_entries) * max_entries/4;
221 struct cso_hash_iter iter = cso_hash_first_node(hash);
222 if (hash_size > max_size)
223 to_remove += hash_size - max_size;
224 while (to_remove) {
225 /*remove elements until we're good */
226 /*fixme: currently we pick the nodes to remove at random*/
227 void *cso = cso_hash_iter_data(iter);
228 if (delete_cso(ctx, cso, type)) {
229 iter = cso_hash_erase(hash, iter);
230 --to_remove;
231 } else
232 iter = cso_hash_iter_next(iter);
233 }
234 }
235
236
237 struct cso_context *cso_create_context( struct pipe_context *pipe )
238 {
239 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
240 if (ctx == NULL)
241 goto out;
242
243 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
244
245 ctx->cache = cso_cache_create();
246 if (ctx->cache == NULL)
247 goto out;
248 cso_cache_set_sanitize_callback(ctx->cache,
249 sanitize_hash,
250 ctx);
251
252 ctx->pipe = pipe;
253
254 /* Enable for testing: */
255 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
256
257 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
258 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
259 ctx->has_geometry_shader = TRUE;
260 }
261 if (pipe->screen->get_param(pipe->screen,
262 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
263 ctx->has_streamout = TRUE;
264 }
265
266 return ctx;
267
268 out:
269 cso_destroy_context( ctx );
270 return NULL;
271 }
272
273 void cso_install_vbuf(struct cso_context *ctx, struct u_vbuf *vbuf)
274 {
275 ctx->vbuf = vbuf;
276 }
277
278 /**
279 * Prior to context destruction, this function unbinds all state objects.
280 */
281 void cso_release_all( struct cso_context *ctx )
282 {
283 unsigned i;
284 struct sampler_info *info;
285
286 if (ctx->pipe) {
287 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
288 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
289 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
290 if (ctx->pipe->bind_vertex_sampler_states)
291 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
292 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
293 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
294 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
295 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
296 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
297 if (ctx->pipe->set_vertex_sampler_views)
298 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
299 if (ctx->pipe->set_stream_output_targets)
300 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
301 }
302
303 /* free fragment samplers, views */
304 info = &ctx->fragment_samplers;
305 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
306 pipe_sampler_view_reference(&info->views[i], NULL);
307 pipe_sampler_view_reference(&info->views_saved[i], NULL);
308 }
309
310 /* free vertex samplers, views */
311 info = &ctx->vertex_samplers;
312 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
313 pipe_sampler_view_reference(&info->views[i], NULL);
314 pipe_sampler_view_reference(&info->views_saved[i], NULL);
315 }
316
317 util_unreference_framebuffer_state(&ctx->fb);
318 util_unreference_framebuffer_state(&ctx->fb_saved);
319
320 util_copy_vertex_buffers(ctx->vertex_buffers,
321 &ctx->nr_vertex_buffers,
322 NULL, 0);
323 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
324 &ctx->nr_vertex_buffers_saved,
325 NULL, 0);
326
327 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
328 pipe_so_target_reference(&ctx->so_targets[i], NULL);
329 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
330 }
331
332 if (ctx->cache) {
333 cso_cache_delete( ctx->cache );
334 ctx->cache = NULL;
335 }
336 }
337
338
339 /**
340 * Free the CSO context. NOTE: the state tracker should have previously called
341 * cso_release_all().
342 */
343 void cso_destroy_context( struct cso_context *ctx )
344 {
345 if (ctx) {
346 FREE( ctx );
347 }
348 }
349
350
351 /* Those function will either find the state of the given template
352 * in the cache or they will create a new state from the given
353 * template, insert it in the cache and return it.
354 */
355
356 /*
357 * If the driver returns 0 from the create method then they will assign
358 * the data member of the cso to be the template itself.
359 */
360
361 enum pipe_error cso_set_blend(struct cso_context *ctx,
362 const struct pipe_blend_state *templ)
363 {
364 unsigned key_size, hash_key;
365 struct cso_hash_iter iter;
366 void *handle;
367
368 key_size = templ->independent_blend_enable ? sizeof(struct pipe_blend_state) :
369 (char *)&(templ->rt[1]) - (char *)templ;
370 hash_key = cso_construct_key((void*)templ, key_size);
371 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND, (void*)templ, key_size);
372
373 if (cso_hash_iter_is_null(iter)) {
374 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
375 if (!cso)
376 return PIPE_ERROR_OUT_OF_MEMORY;
377
378 memset(&cso->state, 0, sizeof cso->state);
379 memcpy(&cso->state, templ, key_size);
380 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
381 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
382 cso->context = ctx->pipe;
383
384 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
385 if (cso_hash_iter_is_null(iter)) {
386 FREE(cso);
387 return PIPE_ERROR_OUT_OF_MEMORY;
388 }
389
390 handle = cso->data;
391 }
392 else {
393 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
394 }
395
396 if (ctx->blend != handle) {
397 ctx->blend = handle;
398 ctx->pipe->bind_blend_state(ctx->pipe, handle);
399 }
400 return PIPE_OK;
401 }
402
403 void cso_save_blend(struct cso_context *ctx)
404 {
405 assert(!ctx->blend_saved);
406 ctx->blend_saved = ctx->blend;
407 }
408
409 void cso_restore_blend(struct cso_context *ctx)
410 {
411 if (ctx->blend != ctx->blend_saved) {
412 ctx->blend = ctx->blend_saved;
413 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
414 }
415 ctx->blend_saved = NULL;
416 }
417
418
419
420 enum pipe_error cso_set_depth_stencil_alpha(struct cso_context *ctx,
421 const struct pipe_depth_stencil_alpha_state *templ)
422 {
423 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
424 unsigned hash_key = cso_construct_key((void*)templ, key_size);
425 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
426 hash_key,
427 CSO_DEPTH_STENCIL_ALPHA,
428 (void*)templ, key_size);
429 void *handle;
430
431 if (cso_hash_iter_is_null(iter)) {
432 struct cso_depth_stencil_alpha *cso = MALLOC(sizeof(struct cso_depth_stencil_alpha));
433 if (!cso)
434 return PIPE_ERROR_OUT_OF_MEMORY;
435
436 memcpy(&cso->state, templ, sizeof(*templ));
437 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe, &cso->state);
438 cso->delete_state = (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
439 cso->context = ctx->pipe;
440
441 iter = cso_insert_state(ctx->cache, hash_key, CSO_DEPTH_STENCIL_ALPHA, cso);
442 if (cso_hash_iter_is_null(iter)) {
443 FREE(cso);
444 return PIPE_ERROR_OUT_OF_MEMORY;
445 }
446
447 handle = cso->data;
448 }
449 else {
450 handle = ((struct cso_depth_stencil_alpha *)cso_hash_iter_data(iter))->data;
451 }
452
453 if (ctx->depth_stencil != handle) {
454 ctx->depth_stencil = handle;
455 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
456 }
457 return PIPE_OK;
458 }
459
460 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
461 {
462 assert(!ctx->depth_stencil_saved);
463 ctx->depth_stencil_saved = ctx->depth_stencil;
464 }
465
466 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
467 {
468 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
469 ctx->depth_stencil = ctx->depth_stencil_saved;
470 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, ctx->depth_stencil_saved);
471 }
472 ctx->depth_stencil_saved = NULL;
473 }
474
475
476
477 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
478 const struct pipe_rasterizer_state *templ)
479 {
480 unsigned key_size = sizeof(struct pipe_rasterizer_state);
481 unsigned hash_key = cso_construct_key((void*)templ, key_size);
482 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
483 hash_key, CSO_RASTERIZER,
484 (void*)templ, key_size);
485 void *handle = NULL;
486
487 if (cso_hash_iter_is_null(iter)) {
488 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
489 if (!cso)
490 return PIPE_ERROR_OUT_OF_MEMORY;
491
492 memcpy(&cso->state, templ, sizeof(*templ));
493 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
494 cso->delete_state = (cso_state_callback)ctx->pipe->delete_rasterizer_state;
495 cso->context = ctx->pipe;
496
497 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
498 if (cso_hash_iter_is_null(iter)) {
499 FREE(cso);
500 return PIPE_ERROR_OUT_OF_MEMORY;
501 }
502
503 handle = cso->data;
504 }
505 else {
506 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
507 }
508
509 if (ctx->rasterizer != handle) {
510 ctx->rasterizer = handle;
511 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
512 }
513 return PIPE_OK;
514 }
515
516 void cso_save_rasterizer(struct cso_context *ctx)
517 {
518 assert(!ctx->rasterizer_saved);
519 ctx->rasterizer_saved = ctx->rasterizer;
520 }
521
522 void cso_restore_rasterizer(struct cso_context *ctx)
523 {
524 if (ctx->rasterizer != ctx->rasterizer_saved) {
525 ctx->rasterizer = ctx->rasterizer_saved;
526 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
527 }
528 ctx->rasterizer_saved = NULL;
529 }
530
531
532
533 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
534 void *handle )
535 {
536 if (ctx->fragment_shader != handle) {
537 ctx->fragment_shader = handle;
538 ctx->pipe->bind_fs_state(ctx->pipe, handle);
539 }
540 return PIPE_OK;
541 }
542
543 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
544 {
545 if (handle == ctx->fragment_shader) {
546 /* unbind before deleting */
547 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
548 ctx->fragment_shader = NULL;
549 }
550 ctx->pipe->delete_fs_state(ctx->pipe, handle);
551 }
552
553 void cso_save_fragment_shader(struct cso_context *ctx)
554 {
555 assert(!ctx->fragment_shader_saved);
556 ctx->fragment_shader_saved = ctx->fragment_shader;
557 }
558
559 void cso_restore_fragment_shader(struct cso_context *ctx)
560 {
561 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
562 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
563 ctx->fragment_shader = ctx->fragment_shader_saved;
564 }
565 ctx->fragment_shader_saved = NULL;
566 }
567
568
569 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
570 void *handle )
571 {
572 if (ctx->vertex_shader != handle) {
573 ctx->vertex_shader = handle;
574 ctx->pipe->bind_vs_state(ctx->pipe, handle);
575 }
576 return PIPE_OK;
577 }
578
579 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
580 {
581 if (handle == ctx->vertex_shader) {
582 /* unbind before deleting */
583 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
584 ctx->vertex_shader = NULL;
585 }
586 ctx->pipe->delete_vs_state(ctx->pipe, handle);
587 }
588
589 void cso_save_vertex_shader(struct cso_context *ctx)
590 {
591 assert(!ctx->vertex_shader_saved);
592 ctx->vertex_shader_saved = ctx->vertex_shader;
593 }
594
595 void cso_restore_vertex_shader(struct cso_context *ctx)
596 {
597 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
598 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
599 ctx->vertex_shader = ctx->vertex_shader_saved;
600 }
601 ctx->vertex_shader_saved = NULL;
602 }
603
604
605 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
606 const struct pipe_framebuffer_state *fb)
607 {
608 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
609 util_copy_framebuffer_state(&ctx->fb, fb);
610 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
611 }
612 return PIPE_OK;
613 }
614
615 void cso_save_framebuffer(struct cso_context *ctx)
616 {
617 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
618 }
619
620 void cso_restore_framebuffer(struct cso_context *ctx)
621 {
622 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
623 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
624 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
625 util_unreference_framebuffer_state(&ctx->fb_saved);
626 }
627 }
628
629
630 enum pipe_error cso_set_viewport(struct cso_context *ctx,
631 const struct pipe_viewport_state *vp)
632 {
633 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
634 ctx->vp = *vp;
635 ctx->pipe->set_viewport_state(ctx->pipe, vp);
636 }
637 return PIPE_OK;
638 }
639
640 void cso_save_viewport(struct cso_context *ctx)
641 {
642 ctx->vp_saved = ctx->vp;
643 }
644
645
646 void cso_restore_viewport(struct cso_context *ctx)
647 {
648 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
649 ctx->vp = ctx->vp_saved;
650 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
651 }
652 }
653
654
655 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
656 const struct pipe_blend_color *bc)
657 {
658 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
659 ctx->blend_color = *bc;
660 ctx->pipe->set_blend_color(ctx->pipe, bc);
661 }
662 return PIPE_OK;
663 }
664
665 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
666 unsigned sample_mask)
667 {
668 if (ctx->sample_mask != sample_mask) {
669 ctx->sample_mask = sample_mask;
670 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
671 }
672 return PIPE_OK;
673 }
674
675 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
676 const struct pipe_stencil_ref *sr)
677 {
678 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
679 ctx->stencil_ref = *sr;
680 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
681 }
682 return PIPE_OK;
683 }
684
685 void cso_save_stencil_ref(struct cso_context *ctx)
686 {
687 ctx->stencil_ref_saved = ctx->stencil_ref;
688 }
689
690
691 void cso_restore_stencil_ref(struct cso_context *ctx)
692 {
693 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved, sizeof(ctx->stencil_ref))) {
694 ctx->stencil_ref = ctx->stencil_ref_saved;
695 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
696 }
697 }
698
699 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
700 void *handle)
701 {
702 assert(ctx->has_geometry_shader || !handle);
703
704 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
705 ctx->geometry_shader = handle;
706 ctx->pipe->bind_gs_state(ctx->pipe, handle);
707 }
708 return PIPE_OK;
709 }
710
711 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
712 {
713 if (handle == ctx->geometry_shader) {
714 /* unbind before deleting */
715 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
716 ctx->geometry_shader = NULL;
717 }
718 ctx->pipe->delete_gs_state(ctx->pipe, handle);
719 }
720
721 void cso_save_geometry_shader(struct cso_context *ctx)
722 {
723 if (!ctx->has_geometry_shader) {
724 return;
725 }
726
727 assert(!ctx->geometry_shader_saved);
728 ctx->geometry_shader_saved = ctx->geometry_shader;
729 }
730
731 void cso_restore_geometry_shader(struct cso_context *ctx)
732 {
733 if (!ctx->has_geometry_shader) {
734 return;
735 }
736
737 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
738 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
739 ctx->geometry_shader = ctx->geometry_shader_saved;
740 }
741 ctx->geometry_shader_saved = NULL;
742 }
743
744 /* clip state */
745
746 static INLINE void
747 clip_state_cpy(struct pipe_clip_state *dst,
748 const struct pipe_clip_state *src)
749 {
750 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
751 }
752
753 static INLINE int
754 clip_state_cmp(const struct pipe_clip_state *a,
755 const struct pipe_clip_state *b)
756 {
757 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
758 }
759
760 void
761 cso_set_clip(struct cso_context *ctx,
762 const struct pipe_clip_state *clip)
763 {
764 if (clip_state_cmp(&ctx->clip, clip)) {
765 clip_state_cpy(&ctx->clip, clip);
766 ctx->pipe->set_clip_state(ctx->pipe, clip);
767 }
768 }
769
770 void
771 cso_save_clip(struct cso_context *ctx)
772 {
773 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
774 }
775
776 void
777 cso_restore_clip(struct cso_context *ctx)
778 {
779 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
780 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
781 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
782 }
783 }
784
785 enum pipe_error cso_set_vertex_elements(struct cso_context *ctx,
786 unsigned count,
787 const struct pipe_vertex_element *states)
788 {
789 struct u_vbuf *vbuf = ctx->vbuf;
790 unsigned key_size, hash_key;
791 struct cso_hash_iter iter;
792 void *handle;
793 struct cso_velems_state velems_state;
794
795 if (vbuf) {
796 u_vbuf_set_vertex_elements(vbuf, count, states);
797 return PIPE_OK;
798 }
799
800 /* need to include the count into the stored state data too.
801 Otherwise first few count pipe_vertex_elements could be identical even if count
802 is different, and there's no guarantee the hash would be different in that
803 case neither */
804 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
805 velems_state.count = count;
806 memcpy(velems_state.velems, states, sizeof(struct pipe_vertex_element) * count);
807 hash_key = cso_construct_key((void*)&velems_state, key_size);
808 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS, (void*)&velems_state, key_size);
809
810 if (cso_hash_iter_is_null(iter)) {
811 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
812 if (!cso)
813 return PIPE_ERROR_OUT_OF_MEMORY;
814
815 memcpy(&cso->state, &velems_state, key_size);
816 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count, &cso->state.velems[0]);
817 cso->delete_state = (cso_state_callback)ctx->pipe->delete_vertex_elements_state;
818 cso->context = ctx->pipe;
819
820 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
821 if (cso_hash_iter_is_null(iter)) {
822 FREE(cso);
823 return PIPE_ERROR_OUT_OF_MEMORY;
824 }
825
826 handle = cso->data;
827 }
828 else {
829 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
830 }
831
832 if (ctx->velements != handle) {
833 ctx->velements = handle;
834 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
835 }
836 return PIPE_OK;
837 }
838
839 void cso_save_vertex_elements(struct cso_context *ctx)
840 {
841 struct u_vbuf *vbuf = ctx->vbuf;
842
843 if (vbuf) {
844 u_vbuf_save_vertex_elements(vbuf);
845 return;
846 }
847
848 assert(!ctx->velements_saved);
849 ctx->velements_saved = ctx->velements;
850 }
851
852 void cso_restore_vertex_elements(struct cso_context *ctx)
853 {
854 struct u_vbuf *vbuf = ctx->vbuf;
855
856 if (vbuf) {
857 u_vbuf_restore_vertex_elements(vbuf);
858 return;
859 }
860
861 if (ctx->velements != ctx->velements_saved) {
862 ctx->velements = ctx->velements_saved;
863 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
864 }
865 ctx->velements_saved = NULL;
866 }
867
868 /* vertex buffers */
869
870 void cso_set_vertex_buffers(struct cso_context *ctx,
871 unsigned count,
872 const struct pipe_vertex_buffer *buffers)
873 {
874 struct u_vbuf *vbuf = ctx->vbuf;
875
876 if (vbuf) {
877 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
878 return;
879 }
880
881 if (count != ctx->nr_vertex_buffers ||
882 memcmp(buffers, ctx->vertex_buffers,
883 sizeof(struct pipe_vertex_buffer) * count) != 0) {
884 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
885 buffers, count);
886 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
887 }
888 }
889
890 void cso_save_vertex_buffers(struct cso_context *ctx)
891 {
892 struct u_vbuf *vbuf = ctx->vbuf;
893
894 if (vbuf) {
895 u_vbuf_save_vertex_buffers(vbuf);
896 return;
897 }
898
899 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
900 &ctx->nr_vertex_buffers_saved,
901 ctx->vertex_buffers,
902 ctx->nr_vertex_buffers);
903 }
904
905 void cso_restore_vertex_buffers(struct cso_context *ctx)
906 {
907 unsigned i;
908 struct u_vbuf *vbuf = ctx->vbuf;
909
910 if (vbuf) {
911 u_vbuf_restore_vertex_buffers(vbuf);
912 return;
913 }
914
915 util_copy_vertex_buffers(ctx->vertex_buffers,
916 &ctx->nr_vertex_buffers,
917 ctx->vertex_buffers_saved,
918 ctx->nr_vertex_buffers_saved);
919
920 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
921 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
922 }
923 ctx->nr_vertex_buffers_saved = 0;
924
925 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
926 ctx->vertex_buffers);
927 }
928
929
930 /**************** fragment/vertex sampler view state *************************/
931
932 static enum pipe_error
933 single_sampler(struct cso_context *ctx,
934 struct sampler_info *info,
935 unsigned idx,
936 const struct pipe_sampler_state *templ)
937 {
938 void *handle = NULL;
939
940 if (templ != NULL) {
941 unsigned key_size = sizeof(struct pipe_sampler_state);
942 unsigned hash_key = cso_construct_key((void*)templ, key_size);
943 struct cso_hash_iter iter =
944 cso_find_state_template(ctx->cache,
945 hash_key, CSO_SAMPLER,
946 (void *) templ, key_size);
947
948 if (cso_hash_iter_is_null(iter)) {
949 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
950 if (!cso)
951 return PIPE_ERROR_OUT_OF_MEMORY;
952
953 memcpy(&cso->state, templ, sizeof(*templ));
954 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
955 cso->delete_state = (cso_state_callback)ctx->pipe->delete_sampler_state;
956 cso->context = ctx->pipe;
957
958 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
959 if (cso_hash_iter_is_null(iter)) {
960 FREE(cso);
961 return PIPE_ERROR_OUT_OF_MEMORY;
962 }
963
964 handle = cso->data;
965 }
966 else {
967 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
968 }
969 }
970
971 info->samplers[idx] = handle;
972
973 return PIPE_OK;
974 }
975
976 enum pipe_error
977 cso_single_sampler(struct cso_context *ctx,
978 unsigned idx,
979 const struct pipe_sampler_state *templ)
980 {
981 return single_sampler(ctx, &ctx->fragment_samplers, idx, templ);
982 }
983
984 enum pipe_error
985 cso_single_vertex_sampler(struct cso_context *ctx,
986 unsigned idx,
987 const struct pipe_sampler_state *templ)
988 {
989 return single_sampler(ctx, &ctx->vertex_samplers, idx, templ);
990 }
991
992
993
994 static void
995 single_sampler_done(struct cso_context *ctx,
996 struct sampler_info *info)
997 {
998 unsigned i;
999
1000 /* find highest non-null sampler */
1001 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1002 if (info->samplers[i - 1] != NULL)
1003 break;
1004 }
1005
1006 info->nr_samplers = i;
1007
1008 if (info->hw.nr_samplers != info->nr_samplers ||
1009 memcmp(info->hw.samplers,
1010 info->samplers,
1011 info->nr_samplers * sizeof(void *)) != 0)
1012 {
1013 memcpy(info->hw.samplers,
1014 info->samplers,
1015 info->nr_samplers * sizeof(void *));
1016 info->hw.nr_samplers = info->nr_samplers;
1017
1018 if (info == &ctx->fragment_samplers) {
1019 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1020 info->nr_samplers,
1021 info->samplers);
1022 }
1023 else if (info == &ctx->vertex_samplers) {
1024 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1025 info->nr_samplers,
1026 info->samplers);
1027 }
1028 else {
1029 assert(0);
1030 }
1031 }
1032 }
1033
1034 void
1035 cso_single_sampler_done( struct cso_context *ctx )
1036 {
1037 single_sampler_done(ctx, &ctx->fragment_samplers);
1038 }
1039
1040 void
1041 cso_single_vertex_sampler_done(struct cso_context *ctx)
1042 {
1043 single_sampler_done(ctx, &ctx->vertex_samplers);
1044 }
1045
1046
1047 /*
1048 * If the function encouters any errors it will return the
1049 * last one. Done to always try to set as many samplers
1050 * as possible.
1051 */
1052 static enum pipe_error
1053 set_samplers(struct cso_context *ctx,
1054 struct sampler_info *info,
1055 unsigned nr,
1056 const struct pipe_sampler_state **templates)
1057 {
1058 unsigned i;
1059 enum pipe_error temp, error = PIPE_OK;
1060
1061 /* TODO: fastpath
1062 */
1063
1064 for (i = 0; i < nr; i++) {
1065 temp = single_sampler(ctx, info, i, templates[i]);
1066 if (temp != PIPE_OK)
1067 error = temp;
1068 }
1069
1070 for ( ; i < info->nr_samplers; i++) {
1071 temp = single_sampler(ctx, info, i, NULL);
1072 if (temp != PIPE_OK)
1073 error = temp;
1074 }
1075
1076 single_sampler_done(ctx, info);
1077
1078 return error;
1079 }
1080
1081 enum pipe_error
1082 cso_set_samplers(struct cso_context *ctx,
1083 unsigned nr,
1084 const struct pipe_sampler_state **templates)
1085 {
1086 return set_samplers(ctx, &ctx->fragment_samplers, nr, templates);
1087 }
1088
1089 enum pipe_error
1090 cso_set_vertex_samplers(struct cso_context *ctx,
1091 unsigned nr,
1092 const struct pipe_sampler_state **templates)
1093 {
1094 return set_samplers(ctx, &ctx->vertex_samplers, nr, templates);
1095 }
1096
1097
1098
1099 static void
1100 save_samplers(struct cso_context *ctx, struct sampler_info *info)
1101 {
1102 info->nr_samplers_saved = info->nr_samplers;
1103 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1104 }
1105
1106 void
1107 cso_save_samplers(struct cso_context *ctx)
1108 {
1109 save_samplers(ctx, &ctx->fragment_samplers);
1110 }
1111
1112 void
1113 cso_save_vertex_samplers(struct cso_context *ctx)
1114 {
1115 save_samplers(ctx, &ctx->vertex_samplers);
1116 }
1117
1118
1119
1120 static void
1121 restore_samplers(struct cso_context *ctx, struct sampler_info *info)
1122 {
1123 info->nr_samplers = info->nr_samplers_saved;
1124 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1125 single_sampler_done(ctx, info);
1126 }
1127
1128 void
1129 cso_restore_samplers(struct cso_context *ctx)
1130 {
1131 restore_samplers(ctx, &ctx->fragment_samplers);
1132 }
1133
1134 void
1135 cso_restore_vertex_samplers(struct cso_context *ctx)
1136 {
1137 restore_samplers(ctx, &ctx->vertex_samplers);
1138 }
1139
1140
1141
1142 static void
1143 set_sampler_views(struct cso_context *ctx,
1144 struct sampler_info *info,
1145 void (*set_views)(struct pipe_context *,
1146 unsigned num_views,
1147 struct pipe_sampler_view **),
1148 uint count,
1149 struct pipe_sampler_view **views)
1150 {
1151 uint i;
1152
1153 /* reference new views */
1154 for (i = 0; i < count; i++) {
1155 pipe_sampler_view_reference(&info->views[i], views[i]);
1156 }
1157 /* unref extra old views, if any */
1158 for (; i < info->nr_views; i++) {
1159 pipe_sampler_view_reference(&info->views[i], NULL);
1160 }
1161
1162 info->nr_views = count;
1163
1164 /* bind the new sampler views */
1165 set_views(ctx->pipe, count, info->views);
1166 }
1167
1168 void
1169 cso_set_fragment_sampler_views(struct cso_context *ctx,
1170 uint count,
1171 struct pipe_sampler_view **views)
1172 {
1173 set_sampler_views(ctx, &ctx->fragment_samplers,
1174 ctx->pipe->set_fragment_sampler_views,
1175 count, views);
1176 }
1177
1178 void
1179 cso_set_vertex_sampler_views(struct cso_context *ctx,
1180 uint count,
1181 struct pipe_sampler_view **views)
1182 {
1183 set_sampler_views(ctx, &ctx->vertex_samplers,
1184 ctx->pipe->set_vertex_sampler_views,
1185 count, views);
1186 }
1187
1188
1189
1190 static void
1191 save_sampler_views(struct cso_context *ctx,
1192 struct sampler_info *info)
1193 {
1194 uint i;
1195
1196 info->nr_views_saved = info->nr_views;
1197
1198 for (i = 0; i < info->nr_views; i++) {
1199 assert(!info->views_saved[i]);
1200 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1201 }
1202 }
1203
1204 void
1205 cso_save_fragment_sampler_views(struct cso_context *ctx)
1206 {
1207 save_sampler_views(ctx, &ctx->fragment_samplers);
1208 }
1209
1210 void
1211 cso_save_vertex_sampler_views(struct cso_context *ctx)
1212 {
1213 save_sampler_views(ctx, &ctx->vertex_samplers);
1214 }
1215
1216
1217 static void
1218 restore_sampler_views(struct cso_context *ctx,
1219 struct sampler_info *info,
1220 void (*set_views)(struct pipe_context *,
1221 unsigned num_views,
1222 struct pipe_sampler_view **))
1223 {
1224 uint i;
1225
1226 for (i = 0; i < info->nr_views_saved; i++) {
1227 pipe_sampler_view_reference(&info->views[i], NULL);
1228 /* move the reference from one pointer to another */
1229 info->views[i] = info->views_saved[i];
1230 info->views_saved[i] = NULL;
1231 }
1232 for (; i < info->nr_views; i++) {
1233 pipe_sampler_view_reference(&info->views[i], NULL);
1234 }
1235
1236 /* bind the old/saved sampler views */
1237 set_views(ctx->pipe, info->nr_views_saved, info->views);
1238
1239 info->nr_views = info->nr_views_saved;
1240 info->nr_views_saved = 0;
1241 }
1242
1243 void
1244 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1245 {
1246 restore_sampler_views(ctx, &ctx->fragment_samplers,
1247 ctx->pipe->set_fragment_sampler_views);
1248 }
1249
1250 void
1251 cso_restore_vertex_sampler_views(struct cso_context *ctx)
1252 {
1253 restore_sampler_views(ctx, &ctx->vertex_samplers,
1254 ctx->pipe->set_vertex_sampler_views);
1255 }
1256
1257
1258 void
1259 cso_set_stream_outputs(struct cso_context *ctx,
1260 unsigned num_targets,
1261 struct pipe_stream_output_target **targets,
1262 unsigned append_bitmask)
1263 {
1264 struct pipe_context *pipe = ctx->pipe;
1265 uint i;
1266
1267 if (!ctx->has_streamout) {
1268 assert(num_targets == 0);
1269 return;
1270 }
1271
1272 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1273 /* Nothing to do. */
1274 return;
1275 }
1276
1277 /* reference new targets */
1278 for (i = 0; i < num_targets; i++) {
1279 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1280 }
1281 /* unref extra old targets, if any */
1282 for (; i < ctx->nr_so_targets; i++) {
1283 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1284 }
1285
1286 pipe->set_stream_output_targets(pipe, num_targets, targets,
1287 append_bitmask);
1288 ctx->nr_so_targets = num_targets;
1289 }
1290
1291 void
1292 cso_save_stream_outputs(struct cso_context *ctx)
1293 {
1294 uint i;
1295
1296 if (!ctx->has_streamout) {
1297 return;
1298 }
1299
1300 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1301
1302 for (i = 0; i < ctx->nr_so_targets; i++) {
1303 assert(!ctx->so_targets_saved[i]);
1304 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1305 }
1306 }
1307
1308 void
1309 cso_restore_stream_outputs(struct cso_context *ctx)
1310 {
1311 struct pipe_context *pipe = ctx->pipe;
1312 uint i;
1313
1314 if (!ctx->has_streamout) {
1315 return;
1316 }
1317
1318 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1319 /* Nothing to do. */
1320 return;
1321 }
1322
1323 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1324 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1325 /* move the reference from one pointer to another */
1326 ctx->so_targets[i] = ctx->so_targets_saved[i];
1327 ctx->so_targets_saved[i] = NULL;
1328 }
1329 for (; i < ctx->nr_so_targets; i++) {
1330 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1331 }
1332
1333 /* ~0 means append */
1334 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1335 ctx->so_targets, ~0);
1336
1337 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1338 ctx->nr_so_targets_saved = 0;
1339 }
1340
1341 /* drawing */
1342
1343 void
1344 cso_set_index_buffer(struct cso_context *cso,
1345 const struct pipe_index_buffer *ib)
1346 {
1347 struct u_vbuf *vbuf = cso->vbuf;
1348
1349 if (vbuf) {
1350 u_vbuf_set_index_buffer(vbuf, ib);
1351 } else {
1352 struct pipe_context *pipe = cso->pipe;
1353 pipe->set_index_buffer(pipe, ib);
1354 }
1355 }
1356
1357 void
1358 cso_draw_vbo(struct cso_context *cso,
1359 const struct pipe_draw_info *info)
1360 {
1361 struct u_vbuf *vbuf = cso->vbuf;
1362
1363 if (vbuf) {
1364 u_vbuf_draw_vbo(vbuf, info);
1365 } else {
1366 struct pipe_context *pipe = cso->pipe;
1367 pipe->draw_vbo(pipe, info);
1368 }
1369 }
1370
1371 void
1372 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1373 {
1374 struct pipe_draw_info info;
1375
1376 util_draw_init_info(&info);
1377
1378 info.mode = mode;
1379 info.start = start;
1380 info.count = count;
1381 info.min_index = start;
1382 info.max_index = start + count - 1;
1383
1384 cso_draw_vbo(cso, &info);
1385 }