b3decc58f0e0ddefac179cf403bccd485fe3e6a2
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 struct pipe_vertex_buffer aux_vertex_buffer_current;
90 struct pipe_vertex_buffer aux_vertex_buffer_saved;
91 unsigned aux_vertex_buffer_index;
92
93 unsigned nr_so_targets;
94 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
95
96 unsigned nr_so_targets_saved;
97 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
98
99 /** Current and saved state.
100 * The saved state is used as a 1-deep stack.
101 */
102 void *blend, *blend_saved;
103 void *depth_stencil, *depth_stencil_saved;
104 void *rasterizer, *rasterizer_saved;
105 void *fragment_shader, *fragment_shader_saved;
106 void *vertex_shader, *vertex_shader_saved;
107 void *geometry_shader, *geometry_shader_saved;
108 void *velements, *velements_saved;
109
110 struct pipe_clip_state clip;
111 struct pipe_clip_state clip_saved;
112
113 struct pipe_framebuffer_state fb, fb_saved;
114 struct pipe_viewport_state vp, vp_saved;
115 struct pipe_blend_color blend_color;
116 unsigned sample_mask, sample_mask_saved;
117 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
118 };
119
120
121 static boolean delete_blend_state(struct cso_context *ctx, void *state)
122 {
123 struct cso_blend *cso = (struct cso_blend *)state;
124
125 if (ctx->blend == cso->data)
126 return FALSE;
127
128 if (cso->delete_state)
129 cso->delete_state(cso->context, cso->data);
130 FREE(state);
131 return TRUE;
132 }
133
134 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
135 {
136 struct cso_depth_stencil_alpha *cso =
137 (struct cso_depth_stencil_alpha *)state;
138
139 if (ctx->depth_stencil == cso->data)
140 return FALSE;
141
142 if (cso->delete_state)
143 cso->delete_state(cso->context, cso->data);
144 FREE(state);
145
146 return TRUE;
147 }
148
149 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
150 {
151 struct cso_sampler *cso = (struct cso_sampler *)state;
152 if (cso->delete_state)
153 cso->delete_state(cso->context, cso->data);
154 FREE(state);
155 return TRUE;
156 }
157
158 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
159 {
160 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
161
162 if (ctx->rasterizer == cso->data)
163 return FALSE;
164 if (cso->delete_state)
165 cso->delete_state(cso->context, cso->data);
166 FREE(state);
167 return TRUE;
168 }
169
170 static boolean delete_vertex_elements(struct cso_context *ctx,
171 void *state)
172 {
173 struct cso_velements *cso = (struct cso_velements *)state;
174
175 if (ctx->velements == cso->data)
176 return FALSE;
177
178 if (cso->delete_state)
179 cso->delete_state(cso->context, cso->data);
180 FREE(state);
181 return TRUE;
182 }
183
184
185 static INLINE boolean delete_cso(struct cso_context *ctx,
186 void *state, enum cso_cache_type type)
187 {
188 switch (type) {
189 case CSO_BLEND:
190 return delete_blend_state(ctx, state);
191 case CSO_SAMPLER:
192 return delete_sampler_state(ctx, state);
193 case CSO_DEPTH_STENCIL_ALPHA:
194 return delete_depth_stencil_state(ctx, state);
195 case CSO_RASTERIZER:
196 return delete_rasterizer_state(ctx, state);
197 case CSO_VELEMENTS:
198 return delete_vertex_elements(ctx, state);
199 default:
200 assert(0);
201 FREE(state);
202 }
203 return FALSE;
204 }
205
206 static INLINE void
207 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
208 int max_size, void *user_data)
209 {
210 struct cso_context *ctx = (struct cso_context *)user_data;
211 /* if we're approach the maximum size, remove fourth of the entries
212 * otherwise every subsequent call will go through the same */
213 int hash_size = cso_hash_size(hash);
214 int max_entries = (max_size > hash_size) ? max_size : hash_size;
215 int to_remove = (max_size < max_entries) * max_entries/4;
216 struct cso_hash_iter iter = cso_hash_first_node(hash);
217 if (hash_size > max_size)
218 to_remove += hash_size - max_size;
219 while (to_remove) {
220 /*remove elements until we're good */
221 /*fixme: currently we pick the nodes to remove at random*/
222 void *cso = cso_hash_iter_data(iter);
223 if (delete_cso(ctx, cso, type)) {
224 iter = cso_hash_erase(hash, iter);
225 --to_remove;
226 } else
227 iter = cso_hash_iter_next(iter);
228 }
229 }
230
231 static void cso_init_vbuf(struct cso_context *cso)
232 {
233 struct u_vbuf_caps caps;
234
235 u_vbuf_get_caps(cso->pipe->screen, &caps);
236
237 /* Install u_vbuf if there is anything unsupported. */
238 if (!caps.buffer_offset_unaligned ||
239 !caps.buffer_stride_unaligned ||
240 !caps.velem_src_offset_unaligned ||
241 !caps.format_fixed32 ||
242 !caps.format_float16 ||
243 !caps.format_float64 ||
244 !caps.format_norm32 ||
245 !caps.format_scaled32 ||
246 !caps.user_vertex_buffers) {
247 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
248 cso->aux_vertex_buffer_index);
249 }
250 }
251
252 struct cso_context *cso_create_context( struct pipe_context *pipe )
253 {
254 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
255 if (ctx == NULL)
256 goto out;
257
258 ctx->cache = cso_cache_create();
259 if (ctx->cache == NULL)
260 goto out;
261 cso_cache_set_sanitize_callback(ctx->cache,
262 sanitize_hash,
263 ctx);
264
265 ctx->pipe = pipe;
266 ctx->sample_mask_saved = ~0;
267
268 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
269
270 cso_init_vbuf(ctx);
271
272 /* Enable for testing: */
273 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
274
275 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
276 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
277 ctx->has_geometry_shader = TRUE;
278 }
279 if (pipe->screen->get_param(pipe->screen,
280 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
281 ctx->has_streamout = TRUE;
282 }
283
284 return ctx;
285
286 out:
287 cso_destroy_context( ctx );
288 return NULL;
289 }
290
291 /**
292 * Prior to context destruction, this function unbinds all state objects.
293 */
294 void cso_release_all( struct cso_context *ctx )
295 {
296 unsigned i, shader;
297
298 if (ctx->pipe) {
299 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
300 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
301 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
302 if (ctx->pipe->bind_vertex_sampler_states)
303 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
304 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
305 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
306 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
307 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
308 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
309 if (ctx->pipe->set_vertex_sampler_views)
310 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
311 if (ctx->pipe->set_stream_output_targets)
312 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
313 }
314
315 /* free fragment samplers, views */
316 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
317 struct sampler_info *info = &ctx->samplers[shader];
318 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
319 pipe_sampler_view_reference(&info->views[i], NULL);
320 pipe_sampler_view_reference(&info->views_saved[i], NULL);
321 }
322 }
323
324 util_unreference_framebuffer_state(&ctx->fb);
325 util_unreference_framebuffer_state(&ctx->fb_saved);
326
327 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
328 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
329
330 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
331 pipe_so_target_reference(&ctx->so_targets[i], NULL);
332 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
333 }
334
335 if (ctx->cache) {
336 cso_cache_delete( ctx->cache );
337 ctx->cache = NULL;
338 }
339 }
340
341
342 /**
343 * Free the CSO context. NOTE: the state tracker should have previously called
344 * cso_release_all().
345 */
346 void cso_destroy_context( struct cso_context *ctx )
347 {
348 if (ctx) {
349 if (ctx->vbuf)
350 u_vbuf_destroy(ctx->vbuf);
351 FREE( ctx );
352 }
353 }
354
355
356 /* Those function will either find the state of the given template
357 * in the cache or they will create a new state from the given
358 * template, insert it in the cache and return it.
359 */
360
361 /*
362 * If the driver returns 0 from the create method then they will assign
363 * the data member of the cso to be the template itself.
364 */
365
366 enum pipe_error cso_set_blend(struct cso_context *ctx,
367 const struct pipe_blend_state *templ)
368 {
369 unsigned key_size, hash_key;
370 struct cso_hash_iter iter;
371 void *handle;
372
373 key_size = templ->independent_blend_enable ?
374 sizeof(struct pipe_blend_state) :
375 (char *)&(templ->rt[1]) - (char *)templ;
376 hash_key = cso_construct_key((void*)templ, key_size);
377 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
378 (void*)templ, key_size);
379
380 if (cso_hash_iter_is_null(iter)) {
381 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
382 if (!cso)
383 return PIPE_ERROR_OUT_OF_MEMORY;
384
385 memset(&cso->state, 0, sizeof cso->state);
386 memcpy(&cso->state, templ, key_size);
387 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
388 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
389 cso->context = ctx->pipe;
390
391 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
392 if (cso_hash_iter_is_null(iter)) {
393 FREE(cso);
394 return PIPE_ERROR_OUT_OF_MEMORY;
395 }
396
397 handle = cso->data;
398 }
399 else {
400 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
401 }
402
403 if (ctx->blend != handle) {
404 ctx->blend = handle;
405 ctx->pipe->bind_blend_state(ctx->pipe, handle);
406 }
407 return PIPE_OK;
408 }
409
410 void cso_save_blend(struct cso_context *ctx)
411 {
412 assert(!ctx->blend_saved);
413 ctx->blend_saved = ctx->blend;
414 }
415
416 void cso_restore_blend(struct cso_context *ctx)
417 {
418 if (ctx->blend != ctx->blend_saved) {
419 ctx->blend = ctx->blend_saved;
420 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
421 }
422 ctx->blend_saved = NULL;
423 }
424
425
426
427 enum pipe_error
428 cso_set_depth_stencil_alpha(struct cso_context *ctx,
429 const struct pipe_depth_stencil_alpha_state *templ)
430 {
431 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
432 unsigned hash_key = cso_construct_key((void*)templ, key_size);
433 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
434 hash_key,
435 CSO_DEPTH_STENCIL_ALPHA,
436 (void*)templ, key_size);
437 void *handle;
438
439 if (cso_hash_iter_is_null(iter)) {
440 struct cso_depth_stencil_alpha *cso =
441 MALLOC(sizeof(struct cso_depth_stencil_alpha));
442 if (!cso)
443 return PIPE_ERROR_OUT_OF_MEMORY;
444
445 memcpy(&cso->state, templ, sizeof(*templ));
446 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
447 &cso->state);
448 cso->delete_state =
449 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
450 cso->context = ctx->pipe;
451
452 iter = cso_insert_state(ctx->cache, hash_key,
453 CSO_DEPTH_STENCIL_ALPHA, cso);
454 if (cso_hash_iter_is_null(iter)) {
455 FREE(cso);
456 return PIPE_ERROR_OUT_OF_MEMORY;
457 }
458
459 handle = cso->data;
460 }
461 else {
462 handle = ((struct cso_depth_stencil_alpha *)
463 cso_hash_iter_data(iter))->data;
464 }
465
466 if (ctx->depth_stencil != handle) {
467 ctx->depth_stencil = handle;
468 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
469 }
470 return PIPE_OK;
471 }
472
473 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
474 {
475 assert(!ctx->depth_stencil_saved);
476 ctx->depth_stencil_saved = ctx->depth_stencil;
477 }
478
479 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
480 {
481 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
482 ctx->depth_stencil = ctx->depth_stencil_saved;
483 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
484 ctx->depth_stencil_saved);
485 }
486 ctx->depth_stencil_saved = NULL;
487 }
488
489
490
491 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
492 const struct pipe_rasterizer_state *templ)
493 {
494 unsigned key_size = sizeof(struct pipe_rasterizer_state);
495 unsigned hash_key = cso_construct_key((void*)templ, key_size);
496 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
497 hash_key,
498 CSO_RASTERIZER,
499 (void*)templ, key_size);
500 void *handle = NULL;
501
502 if (cso_hash_iter_is_null(iter)) {
503 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
504 if (!cso)
505 return PIPE_ERROR_OUT_OF_MEMORY;
506
507 memcpy(&cso->state, templ, sizeof(*templ));
508 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
509 cso->delete_state =
510 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
511 cso->context = ctx->pipe;
512
513 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
514 if (cso_hash_iter_is_null(iter)) {
515 FREE(cso);
516 return PIPE_ERROR_OUT_OF_MEMORY;
517 }
518
519 handle = cso->data;
520 }
521 else {
522 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
523 }
524
525 if (ctx->rasterizer != handle) {
526 ctx->rasterizer = handle;
527 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
528 }
529 return PIPE_OK;
530 }
531
532 void cso_save_rasterizer(struct cso_context *ctx)
533 {
534 assert(!ctx->rasterizer_saved);
535 ctx->rasterizer_saved = ctx->rasterizer;
536 }
537
538 void cso_restore_rasterizer(struct cso_context *ctx)
539 {
540 if (ctx->rasterizer != ctx->rasterizer_saved) {
541 ctx->rasterizer = ctx->rasterizer_saved;
542 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
543 }
544 ctx->rasterizer_saved = NULL;
545 }
546
547
548
549 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
550 void *handle )
551 {
552 if (ctx->fragment_shader != handle) {
553 ctx->fragment_shader = handle;
554 ctx->pipe->bind_fs_state(ctx->pipe, handle);
555 }
556 return PIPE_OK;
557 }
558
559 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
560 {
561 if (handle == ctx->fragment_shader) {
562 /* unbind before deleting */
563 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
564 ctx->fragment_shader = NULL;
565 }
566 ctx->pipe->delete_fs_state(ctx->pipe, handle);
567 }
568
569 void cso_save_fragment_shader(struct cso_context *ctx)
570 {
571 assert(!ctx->fragment_shader_saved);
572 ctx->fragment_shader_saved = ctx->fragment_shader;
573 }
574
575 void cso_restore_fragment_shader(struct cso_context *ctx)
576 {
577 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
578 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
579 ctx->fragment_shader = ctx->fragment_shader_saved;
580 }
581 ctx->fragment_shader_saved = NULL;
582 }
583
584
585 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
586 void *handle )
587 {
588 if (ctx->vertex_shader != handle) {
589 ctx->vertex_shader = handle;
590 ctx->pipe->bind_vs_state(ctx->pipe, handle);
591 }
592 return PIPE_OK;
593 }
594
595 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
596 {
597 if (handle == ctx->vertex_shader) {
598 /* unbind before deleting */
599 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
600 ctx->vertex_shader = NULL;
601 }
602 ctx->pipe->delete_vs_state(ctx->pipe, handle);
603 }
604
605 void cso_save_vertex_shader(struct cso_context *ctx)
606 {
607 assert(!ctx->vertex_shader_saved);
608 ctx->vertex_shader_saved = ctx->vertex_shader;
609 }
610
611 void cso_restore_vertex_shader(struct cso_context *ctx)
612 {
613 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
614 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
615 ctx->vertex_shader = ctx->vertex_shader_saved;
616 }
617 ctx->vertex_shader_saved = NULL;
618 }
619
620
621 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
622 const struct pipe_framebuffer_state *fb)
623 {
624 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
625 util_copy_framebuffer_state(&ctx->fb, fb);
626 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
627 }
628 return PIPE_OK;
629 }
630
631 void cso_save_framebuffer(struct cso_context *ctx)
632 {
633 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
634 }
635
636 void cso_restore_framebuffer(struct cso_context *ctx)
637 {
638 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
639 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
640 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
641 util_unreference_framebuffer_state(&ctx->fb_saved);
642 }
643 }
644
645
646 enum pipe_error cso_set_viewport(struct cso_context *ctx,
647 const struct pipe_viewport_state *vp)
648 {
649 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
650 ctx->vp = *vp;
651 ctx->pipe->set_viewport_state(ctx->pipe, vp);
652 }
653 return PIPE_OK;
654 }
655
656 void cso_save_viewport(struct cso_context *ctx)
657 {
658 ctx->vp_saved = ctx->vp;
659 }
660
661
662 void cso_restore_viewport(struct cso_context *ctx)
663 {
664 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
665 ctx->vp = ctx->vp_saved;
666 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
667 }
668 }
669
670
671 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
672 const struct pipe_blend_color *bc)
673 {
674 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
675 ctx->blend_color = *bc;
676 ctx->pipe->set_blend_color(ctx->pipe, bc);
677 }
678 return PIPE_OK;
679 }
680
681 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
682 unsigned sample_mask)
683 {
684 if (ctx->sample_mask != sample_mask) {
685 ctx->sample_mask = sample_mask;
686 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
687 }
688 return PIPE_OK;
689 }
690
691 void cso_save_sample_mask(struct cso_context *ctx)
692 {
693 ctx->sample_mask_saved = ctx->sample_mask;
694 }
695
696 void cso_restore_sample_mask(struct cso_context *ctx)
697 {
698 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
699 }
700
701 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
702 const struct pipe_stencil_ref *sr)
703 {
704 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
705 ctx->stencil_ref = *sr;
706 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
707 }
708 return PIPE_OK;
709 }
710
711 void cso_save_stencil_ref(struct cso_context *ctx)
712 {
713 ctx->stencil_ref_saved = ctx->stencil_ref;
714 }
715
716
717 void cso_restore_stencil_ref(struct cso_context *ctx)
718 {
719 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
720 sizeof(ctx->stencil_ref))) {
721 ctx->stencil_ref = ctx->stencil_ref_saved;
722 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
723 }
724 }
725
726 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
727 void *handle)
728 {
729 assert(ctx->has_geometry_shader || !handle);
730
731 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
732 ctx->geometry_shader = handle;
733 ctx->pipe->bind_gs_state(ctx->pipe, handle);
734 }
735 return PIPE_OK;
736 }
737
738 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
739 {
740 if (handle == ctx->geometry_shader) {
741 /* unbind before deleting */
742 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
743 ctx->geometry_shader = NULL;
744 }
745 ctx->pipe->delete_gs_state(ctx->pipe, handle);
746 }
747
748 void cso_save_geometry_shader(struct cso_context *ctx)
749 {
750 if (!ctx->has_geometry_shader) {
751 return;
752 }
753
754 assert(!ctx->geometry_shader_saved);
755 ctx->geometry_shader_saved = ctx->geometry_shader;
756 }
757
758 void cso_restore_geometry_shader(struct cso_context *ctx)
759 {
760 if (!ctx->has_geometry_shader) {
761 return;
762 }
763
764 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
765 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
766 ctx->geometry_shader = ctx->geometry_shader_saved;
767 }
768 ctx->geometry_shader_saved = NULL;
769 }
770
771 /* clip state */
772
773 static INLINE void
774 clip_state_cpy(struct pipe_clip_state *dst,
775 const struct pipe_clip_state *src)
776 {
777 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
778 }
779
780 static INLINE int
781 clip_state_cmp(const struct pipe_clip_state *a,
782 const struct pipe_clip_state *b)
783 {
784 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
785 }
786
787 void
788 cso_set_clip(struct cso_context *ctx,
789 const struct pipe_clip_state *clip)
790 {
791 if (clip_state_cmp(&ctx->clip, clip)) {
792 clip_state_cpy(&ctx->clip, clip);
793 ctx->pipe->set_clip_state(ctx->pipe, clip);
794 }
795 }
796
797 void
798 cso_save_clip(struct cso_context *ctx)
799 {
800 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
801 }
802
803 void
804 cso_restore_clip(struct cso_context *ctx)
805 {
806 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
807 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
808 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
809 }
810 }
811
812 enum pipe_error
813 cso_set_vertex_elements(struct cso_context *ctx,
814 unsigned count,
815 const struct pipe_vertex_element *states)
816 {
817 struct u_vbuf *vbuf = ctx->vbuf;
818 unsigned key_size, hash_key;
819 struct cso_hash_iter iter;
820 void *handle;
821 struct cso_velems_state velems_state;
822
823 if (vbuf) {
824 u_vbuf_set_vertex_elements(vbuf, count, states);
825 return PIPE_OK;
826 }
827
828 /* Need to include the count into the stored state data too.
829 * Otherwise first few count pipe_vertex_elements could be identical
830 * even if count is different, and there's no guarantee the hash would
831 * be different in that case neither.
832 */
833 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
834 velems_state.count = count;
835 memcpy(velems_state.velems, states,
836 sizeof(struct pipe_vertex_element) * count);
837 hash_key = cso_construct_key((void*)&velems_state, key_size);
838 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
839 (void*)&velems_state, key_size);
840
841 if (cso_hash_iter_is_null(iter)) {
842 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
843 if (!cso)
844 return PIPE_ERROR_OUT_OF_MEMORY;
845
846 memcpy(&cso->state, &velems_state, key_size);
847 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
848 &cso->state.velems[0]);
849 cso->delete_state =
850 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
851 cso->context = ctx->pipe;
852
853 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
854 if (cso_hash_iter_is_null(iter)) {
855 FREE(cso);
856 return PIPE_ERROR_OUT_OF_MEMORY;
857 }
858
859 handle = cso->data;
860 }
861 else {
862 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
863 }
864
865 if (ctx->velements != handle) {
866 ctx->velements = handle;
867 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
868 }
869 return PIPE_OK;
870 }
871
872 void cso_save_vertex_elements(struct cso_context *ctx)
873 {
874 struct u_vbuf *vbuf = ctx->vbuf;
875
876 if (vbuf) {
877 u_vbuf_save_vertex_elements(vbuf);
878 return;
879 }
880
881 assert(!ctx->velements_saved);
882 ctx->velements_saved = ctx->velements;
883 }
884
885 void cso_restore_vertex_elements(struct cso_context *ctx)
886 {
887 struct u_vbuf *vbuf = ctx->vbuf;
888
889 if (vbuf) {
890 u_vbuf_restore_vertex_elements(vbuf);
891 return;
892 }
893
894 if (ctx->velements != ctx->velements_saved) {
895 ctx->velements = ctx->velements_saved;
896 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
897 }
898 ctx->velements_saved = NULL;
899 }
900
901 /* vertex buffers */
902
903 void cso_set_vertex_buffers(struct cso_context *ctx,
904 unsigned start_slot, unsigned count,
905 const struct pipe_vertex_buffer *buffers)
906 {
907 struct u_vbuf *vbuf = ctx->vbuf;
908
909 if (vbuf) {
910 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
911 return;
912 }
913
914 /* Save what's in the auxiliary slot, so that we can save and restore it
915 * for meta ops. */
916 if (start_slot <= ctx->aux_vertex_buffer_index &&
917 start_slot+count > ctx->aux_vertex_buffer_index) {
918 if (buffers) {
919 const struct pipe_vertex_buffer *vb =
920 buffers + (ctx->aux_vertex_buffer_index - start_slot);
921
922 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
923 vb->buffer);
924 memcpy(&ctx->aux_vertex_buffer_current, vb,
925 sizeof(struct pipe_vertex_buffer));
926 }
927 else {
928 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
929 NULL);
930 ctx->aux_vertex_buffer_current.user_buffer = NULL;
931 }
932 }
933
934 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
935 }
936
937 void cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
938 {
939 struct u_vbuf *vbuf = ctx->vbuf;
940
941 if (vbuf) {
942 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
943 return;
944 }
945
946 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
947 ctx->aux_vertex_buffer_current.buffer);
948 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
949 sizeof(struct pipe_vertex_buffer));
950 }
951
952 void cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
953 {
954 struct u_vbuf *vbuf = ctx->vbuf;
955
956 if (vbuf) {
957 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
958 return;
959 }
960
961 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
962 &ctx->aux_vertex_buffer_saved);
963 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
964 }
965
966 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
967 {
968 return ctx->aux_vertex_buffer_index;
969 }
970
971
972 /**************** fragment/vertex sampler view state *************************/
973
974 static enum pipe_error
975 single_sampler(struct cso_context *ctx,
976 struct sampler_info *info,
977 unsigned idx,
978 const struct pipe_sampler_state *templ)
979 {
980 void *handle = NULL;
981
982 if (templ != NULL) {
983 unsigned key_size = sizeof(struct pipe_sampler_state);
984 unsigned hash_key = cso_construct_key((void*)templ, key_size);
985 struct cso_hash_iter iter =
986 cso_find_state_template(ctx->cache,
987 hash_key, CSO_SAMPLER,
988 (void *) templ, key_size);
989
990 if (cso_hash_iter_is_null(iter)) {
991 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
992 if (!cso)
993 return PIPE_ERROR_OUT_OF_MEMORY;
994
995 memcpy(&cso->state, templ, sizeof(*templ));
996 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
997 cso->delete_state =
998 (cso_state_callback) ctx->pipe->delete_sampler_state;
999 cso->context = ctx->pipe;
1000
1001 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1002 if (cso_hash_iter_is_null(iter)) {
1003 FREE(cso);
1004 return PIPE_ERROR_OUT_OF_MEMORY;
1005 }
1006
1007 handle = cso->data;
1008 }
1009 else {
1010 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1011 }
1012 }
1013
1014 info->samplers[idx] = handle;
1015
1016 return PIPE_OK;
1017 }
1018
1019 enum pipe_error
1020 cso_single_sampler(struct cso_context *ctx,
1021 unsigned shader_stage,
1022 unsigned idx,
1023 const struct pipe_sampler_state *templ)
1024 {
1025 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1026 }
1027
1028
1029
1030 static void
1031 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1032 {
1033 struct sampler_info *info = &ctx->samplers[shader_stage];
1034 unsigned i;
1035
1036 /* find highest non-null sampler */
1037 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1038 if (info->samplers[i - 1] != NULL)
1039 break;
1040 }
1041
1042 info->nr_samplers = i;
1043
1044 if (info->hw.nr_samplers != info->nr_samplers ||
1045 memcmp(info->hw.samplers,
1046 info->samplers,
1047 info->nr_samplers * sizeof(void *)) != 0)
1048 {
1049 memcpy(info->hw.samplers,
1050 info->samplers,
1051 info->nr_samplers * sizeof(void *));
1052 info->hw.nr_samplers = info->nr_samplers;
1053
1054 switch (shader_stage) {
1055 case PIPE_SHADER_FRAGMENT:
1056 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1057 info->nr_samplers,
1058 info->samplers);
1059 break;
1060 case PIPE_SHADER_VERTEX:
1061 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1062 info->nr_samplers,
1063 info->samplers);
1064 break;
1065 case PIPE_SHADER_GEOMETRY:
1066 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1067 info->nr_samplers,
1068 info->samplers);
1069 break;
1070 default:
1071 assert(!"bad shader type in single_sampler_done()");
1072 }
1073 }
1074 }
1075
1076 void
1077 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1078 {
1079 single_sampler_done(ctx, shader_stage);
1080 }
1081
1082
1083 /*
1084 * If the function encouters any errors it will return the
1085 * last one. Done to always try to set as many samplers
1086 * as possible.
1087 */
1088 enum pipe_error
1089 cso_set_samplers(struct cso_context *ctx,
1090 unsigned shader_stage,
1091 unsigned nr,
1092 const struct pipe_sampler_state **templates)
1093 {
1094 struct sampler_info *info = &ctx->samplers[shader_stage];
1095 unsigned i;
1096 enum pipe_error temp, error = PIPE_OK;
1097
1098 /* TODO: fastpath
1099 */
1100
1101 for (i = 0; i < nr; i++) {
1102 temp = single_sampler(ctx, info, i, templates[i]);
1103 if (temp != PIPE_OK)
1104 error = temp;
1105 }
1106
1107 for ( ; i < info->nr_samplers; i++) {
1108 temp = single_sampler(ctx, info, i, NULL);
1109 if (temp != PIPE_OK)
1110 error = temp;
1111 }
1112
1113 single_sampler_done(ctx, shader_stage);
1114
1115 return error;
1116 }
1117
1118 void
1119 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1120 {
1121 struct sampler_info *info = &ctx->samplers[shader_stage];
1122 info->nr_samplers_saved = info->nr_samplers;
1123 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1124 }
1125
1126
1127 void
1128 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1129 {
1130 struct sampler_info *info = &ctx->samplers[shader_stage];
1131 info->nr_samplers = info->nr_samplers_saved;
1132 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1133 single_sampler_done(ctx, shader_stage);
1134 }
1135
1136
1137 void
1138 cso_set_sampler_views(struct cso_context *ctx,
1139 unsigned shader_stage,
1140 unsigned count,
1141 struct pipe_sampler_view **views)
1142 {
1143 struct sampler_info *info = &ctx->samplers[shader_stage];
1144 unsigned i;
1145
1146 /* reference new views */
1147 for (i = 0; i < count; i++) {
1148 pipe_sampler_view_reference(&info->views[i], views[i]);
1149 }
1150 /* unref extra old views, if any */
1151 for (; i < info->nr_views; i++) {
1152 pipe_sampler_view_reference(&info->views[i], NULL);
1153 }
1154
1155 info->nr_views = count;
1156
1157 /* bind the new sampler views */
1158 switch (shader_stage) {
1159 case PIPE_SHADER_FRAGMENT:
1160 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1161 break;
1162 case PIPE_SHADER_VERTEX:
1163 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1164 break;
1165 case PIPE_SHADER_GEOMETRY:
1166 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1167 break;
1168 default:
1169 assert(!"bad shader type in cso_set_sampler_views()");
1170 }
1171 }
1172
1173
1174 void
1175 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1176 {
1177 struct sampler_info *info = &ctx->samplers[shader_stage];
1178 unsigned i;
1179
1180 info->nr_views_saved = info->nr_views;
1181
1182 for (i = 0; i < info->nr_views; i++) {
1183 assert(!info->views_saved[i]);
1184 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1185 }
1186 }
1187
1188
1189 void
1190 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1191 {
1192 struct sampler_info *info = &ctx->samplers[shader_stage];
1193 unsigned i, nr_saved = info->nr_views_saved;
1194
1195 for (i = 0; i < nr_saved; i++) {
1196 pipe_sampler_view_reference(&info->views[i], NULL);
1197 /* move the reference from one pointer to another */
1198 info->views[i] = info->views_saved[i];
1199 info->views_saved[i] = NULL;
1200 }
1201 for (; i < info->nr_views; i++) {
1202 pipe_sampler_view_reference(&info->views[i], NULL);
1203 }
1204
1205 /* bind the old/saved sampler views */
1206 switch (shader_stage) {
1207 case PIPE_SHADER_FRAGMENT:
1208 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1209 break;
1210 case PIPE_SHADER_VERTEX:
1211 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1212 break;
1213 case PIPE_SHADER_GEOMETRY:
1214 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1215 break;
1216 default:
1217 assert(!"bad shader type in cso_restore_sampler_views()");
1218 }
1219
1220 info->nr_views = nr_saved;
1221 info->nr_views_saved = 0;
1222 }
1223
1224
1225 void
1226 cso_set_stream_outputs(struct cso_context *ctx,
1227 unsigned num_targets,
1228 struct pipe_stream_output_target **targets,
1229 unsigned append_bitmask)
1230 {
1231 struct pipe_context *pipe = ctx->pipe;
1232 uint i;
1233
1234 if (!ctx->has_streamout) {
1235 assert(num_targets == 0);
1236 return;
1237 }
1238
1239 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1240 /* Nothing to do. */
1241 return;
1242 }
1243
1244 /* reference new targets */
1245 for (i = 0; i < num_targets; i++) {
1246 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1247 }
1248 /* unref extra old targets, if any */
1249 for (; i < ctx->nr_so_targets; i++) {
1250 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1251 }
1252
1253 pipe->set_stream_output_targets(pipe, num_targets, targets,
1254 append_bitmask);
1255 ctx->nr_so_targets = num_targets;
1256 }
1257
1258 void
1259 cso_save_stream_outputs(struct cso_context *ctx)
1260 {
1261 uint i;
1262
1263 if (!ctx->has_streamout) {
1264 return;
1265 }
1266
1267 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1268
1269 for (i = 0; i < ctx->nr_so_targets; i++) {
1270 assert(!ctx->so_targets_saved[i]);
1271 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1272 }
1273 }
1274
1275 void
1276 cso_restore_stream_outputs(struct cso_context *ctx)
1277 {
1278 struct pipe_context *pipe = ctx->pipe;
1279 uint i;
1280
1281 if (!ctx->has_streamout) {
1282 return;
1283 }
1284
1285 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1286 /* Nothing to do. */
1287 return;
1288 }
1289
1290 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1291 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1292 /* move the reference from one pointer to another */
1293 ctx->so_targets[i] = ctx->so_targets_saved[i];
1294 ctx->so_targets_saved[i] = NULL;
1295 }
1296 for (; i < ctx->nr_so_targets; i++) {
1297 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1298 }
1299
1300 /* ~0 means append */
1301 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1302 ctx->so_targets, ~0);
1303
1304 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1305 ctx->nr_so_targets_saved = 0;
1306 }
1307
1308 /* drawing */
1309
1310 void
1311 cso_set_index_buffer(struct cso_context *cso,
1312 const struct pipe_index_buffer *ib)
1313 {
1314 struct u_vbuf *vbuf = cso->vbuf;
1315
1316 if (vbuf) {
1317 u_vbuf_set_index_buffer(vbuf, ib);
1318 } else {
1319 struct pipe_context *pipe = cso->pipe;
1320 pipe->set_index_buffer(pipe, ib);
1321 }
1322 }
1323
1324 void
1325 cso_draw_vbo(struct cso_context *cso,
1326 const struct pipe_draw_info *info)
1327 {
1328 struct u_vbuf *vbuf = cso->vbuf;
1329
1330 if (vbuf) {
1331 u_vbuf_draw_vbo(vbuf, info);
1332 } else {
1333 struct pipe_context *pipe = cso->pipe;
1334 pipe->draw_vbo(pipe, info);
1335 }
1336 }
1337
1338 void
1339 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1340 {
1341 struct pipe_draw_info info;
1342
1343 util_draw_init_info(&info);
1344
1345 info.mode = mode;
1346 info.start = start;
1347 info.count = count;
1348 info.min_index = start;
1349 info.max_index = start + count - 1;
1350
1351 cso_draw_vbo(cso, &info);
1352 }