cso: rearrange some structure fields for consistency
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 uint nr_vertex_buffers;
90 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
91
92 uint nr_vertex_buffers_saved;
93 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved;
108 void *vertex_shader, *vertex_shader_saved;
109 void *geometry_shader, *geometry_shader_saved;
110 void *velements, *velements_saved;
111
112 struct pipe_clip_state clip;
113 struct pipe_clip_state clip_saved;
114
115 struct pipe_framebuffer_state fb, fb_saved;
116 struct pipe_viewport_state vp, vp_saved;
117 struct pipe_blend_color blend_color;
118 unsigned sample_mask, sample_mask_saved;
119 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
120 };
121
122
123 static boolean delete_blend_state(struct cso_context *ctx, void *state)
124 {
125 struct cso_blend *cso = (struct cso_blend *)state;
126
127 if (ctx->blend == cso->data)
128 return FALSE;
129
130 if (cso->delete_state)
131 cso->delete_state(cso->context, cso->data);
132 FREE(state);
133 return TRUE;
134 }
135
136 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
137 {
138 struct cso_depth_stencil_alpha *cso =
139 (struct cso_depth_stencil_alpha *)state;
140
141 if (ctx->depth_stencil == cso->data)
142 return FALSE;
143
144 if (cso->delete_state)
145 cso->delete_state(cso->context, cso->data);
146 FREE(state);
147
148 return TRUE;
149 }
150
151 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
152 {
153 struct cso_sampler *cso = (struct cso_sampler *)state;
154 if (cso->delete_state)
155 cso->delete_state(cso->context, cso->data);
156 FREE(state);
157 return TRUE;
158 }
159
160 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
161 {
162 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
163
164 if (ctx->rasterizer == cso->data)
165 return FALSE;
166 if (cso->delete_state)
167 cso->delete_state(cso->context, cso->data);
168 FREE(state);
169 return TRUE;
170 }
171
172 static boolean delete_vertex_elements(struct cso_context *ctx,
173 void *state)
174 {
175 struct cso_velements *cso = (struct cso_velements *)state;
176
177 if (ctx->velements == cso->data)
178 return FALSE;
179
180 if (cso->delete_state)
181 cso->delete_state(cso->context, cso->data);
182 FREE(state);
183 return TRUE;
184 }
185
186
187 static INLINE boolean delete_cso(struct cso_context *ctx,
188 void *state, enum cso_cache_type type)
189 {
190 switch (type) {
191 case CSO_BLEND:
192 return delete_blend_state(ctx, state);
193 case CSO_SAMPLER:
194 return delete_sampler_state(ctx, state);
195 case CSO_DEPTH_STENCIL_ALPHA:
196 return delete_depth_stencil_state(ctx, state);
197 case CSO_RASTERIZER:
198 return delete_rasterizer_state(ctx, state);
199 case CSO_VELEMENTS:
200 return delete_vertex_elements(ctx, state);
201 default:
202 assert(0);
203 FREE(state);
204 }
205 return FALSE;
206 }
207
208 static INLINE void
209 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
210 int max_size, void *user_data)
211 {
212 struct cso_context *ctx = (struct cso_context *)user_data;
213 /* if we're approach the maximum size, remove fourth of the entries
214 * otherwise every subsequent call will go through the same */
215 int hash_size = cso_hash_size(hash);
216 int max_entries = (max_size > hash_size) ? max_size : hash_size;
217 int to_remove = (max_size < max_entries) * max_entries/4;
218 struct cso_hash_iter iter = cso_hash_first_node(hash);
219 if (hash_size > max_size)
220 to_remove += hash_size - max_size;
221 while (to_remove) {
222 /*remove elements until we're good */
223 /*fixme: currently we pick the nodes to remove at random*/
224 void *cso = cso_hash_iter_data(iter);
225 if (delete_cso(ctx, cso, type)) {
226 iter = cso_hash_erase(hash, iter);
227 --to_remove;
228 } else
229 iter = cso_hash_iter_next(iter);
230 }
231 }
232
233 static void cso_init_vbuf(struct cso_context *cso)
234 {
235 struct u_vbuf_caps caps;
236
237 u_vbuf_get_caps(cso->pipe->screen, &caps);
238
239 /* Install u_vbuf if there is anything unsupported. */
240 if (!caps.buffer_offset_unaligned ||
241 !caps.buffer_stride_unaligned ||
242 !caps.velem_src_offset_unaligned ||
243 !caps.format_fixed32 ||
244 !caps.format_float16 ||
245 !caps.format_float64 ||
246 !caps.format_norm32 ||
247 !caps.format_scaled32 ||
248 !caps.user_vertex_buffers) {
249 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
250 }
251 }
252
253 struct cso_context *cso_create_context( struct pipe_context *pipe )
254 {
255 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
256 if (ctx == NULL)
257 goto out;
258
259 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
260
261 ctx->cache = cso_cache_create();
262 if (ctx->cache == NULL)
263 goto out;
264 cso_cache_set_sanitize_callback(ctx->cache,
265 sanitize_hash,
266 ctx);
267
268 ctx->pipe = pipe;
269 ctx->sample_mask_saved = ~0;
270
271 cso_init_vbuf(ctx);
272
273 /* Enable for testing: */
274 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
275
276 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
277 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
278 ctx->has_geometry_shader = TRUE;
279 }
280 if (pipe->screen->get_param(pipe->screen,
281 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
282 ctx->has_streamout = TRUE;
283 }
284
285 return ctx;
286
287 out:
288 cso_destroy_context( ctx );
289 return NULL;
290 }
291
292 /**
293 * Prior to context destruction, this function unbinds all state objects.
294 */
295 void cso_release_all( struct cso_context *ctx )
296 {
297 unsigned i, shader;
298
299 if (ctx->pipe) {
300 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
301 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
302 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
303 if (ctx->pipe->bind_vertex_sampler_states)
304 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
305 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
306 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
307 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
308 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
309 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
310 if (ctx->pipe->set_vertex_sampler_views)
311 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
312 if (ctx->pipe->set_stream_output_targets)
313 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
314 }
315
316 /* free fragment samplers, views */
317 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
318 struct sampler_info *info = &ctx->samplers[shader];
319 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
320 pipe_sampler_view_reference(&info->views[i], NULL);
321 pipe_sampler_view_reference(&info->views_saved[i], NULL);
322 }
323 }
324
325 util_unreference_framebuffer_state(&ctx->fb);
326 util_unreference_framebuffer_state(&ctx->fb_saved);
327
328 util_copy_vertex_buffers(ctx->vertex_buffers,
329 &ctx->nr_vertex_buffers,
330 NULL, 0);
331 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
332 &ctx->nr_vertex_buffers_saved,
333 NULL, 0);
334
335 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
336 pipe_so_target_reference(&ctx->so_targets[i], NULL);
337 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
338 }
339
340 if (ctx->cache) {
341 cso_cache_delete( ctx->cache );
342 ctx->cache = NULL;
343 }
344 }
345
346
347 /**
348 * Free the CSO context. NOTE: the state tracker should have previously called
349 * cso_release_all().
350 */
351 void cso_destroy_context( struct cso_context *ctx )
352 {
353 if (ctx) {
354 if (ctx->vbuf)
355 u_vbuf_destroy(ctx->vbuf);
356 FREE( ctx );
357 }
358 }
359
360
361 /* Those function will either find the state of the given template
362 * in the cache or they will create a new state from the given
363 * template, insert it in the cache and return it.
364 */
365
366 /*
367 * If the driver returns 0 from the create method then they will assign
368 * the data member of the cso to be the template itself.
369 */
370
371 enum pipe_error cso_set_blend(struct cso_context *ctx,
372 const struct pipe_blend_state *templ)
373 {
374 unsigned key_size, hash_key;
375 struct cso_hash_iter iter;
376 void *handle;
377
378 key_size = templ->independent_blend_enable ?
379 sizeof(struct pipe_blend_state) :
380 (char *)&(templ->rt[1]) - (char *)templ;
381 hash_key = cso_construct_key((void*)templ, key_size);
382 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
383 (void*)templ, key_size);
384
385 if (cso_hash_iter_is_null(iter)) {
386 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
387 if (!cso)
388 return PIPE_ERROR_OUT_OF_MEMORY;
389
390 memset(&cso->state, 0, sizeof cso->state);
391 memcpy(&cso->state, templ, key_size);
392 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
393 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
394 cso->context = ctx->pipe;
395
396 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
397 if (cso_hash_iter_is_null(iter)) {
398 FREE(cso);
399 return PIPE_ERROR_OUT_OF_MEMORY;
400 }
401
402 handle = cso->data;
403 }
404 else {
405 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
406 }
407
408 if (ctx->blend != handle) {
409 ctx->blend = handle;
410 ctx->pipe->bind_blend_state(ctx->pipe, handle);
411 }
412 return PIPE_OK;
413 }
414
415 void cso_save_blend(struct cso_context *ctx)
416 {
417 assert(!ctx->blend_saved);
418 ctx->blend_saved = ctx->blend;
419 }
420
421 void cso_restore_blend(struct cso_context *ctx)
422 {
423 if (ctx->blend != ctx->blend_saved) {
424 ctx->blend = ctx->blend_saved;
425 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
426 }
427 ctx->blend_saved = NULL;
428 }
429
430
431
432 enum pipe_error
433 cso_set_depth_stencil_alpha(struct cso_context *ctx,
434 const struct pipe_depth_stencil_alpha_state *templ)
435 {
436 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
437 unsigned hash_key = cso_construct_key((void*)templ, key_size);
438 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
439 hash_key,
440 CSO_DEPTH_STENCIL_ALPHA,
441 (void*)templ, key_size);
442 void *handle;
443
444 if (cso_hash_iter_is_null(iter)) {
445 struct cso_depth_stencil_alpha *cso =
446 MALLOC(sizeof(struct cso_depth_stencil_alpha));
447 if (!cso)
448 return PIPE_ERROR_OUT_OF_MEMORY;
449
450 memcpy(&cso->state, templ, sizeof(*templ));
451 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
452 &cso->state);
453 cso->delete_state =
454 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
455 cso->context = ctx->pipe;
456
457 iter = cso_insert_state(ctx->cache, hash_key,
458 CSO_DEPTH_STENCIL_ALPHA, cso);
459 if (cso_hash_iter_is_null(iter)) {
460 FREE(cso);
461 return PIPE_ERROR_OUT_OF_MEMORY;
462 }
463
464 handle = cso->data;
465 }
466 else {
467 handle = ((struct cso_depth_stencil_alpha *)
468 cso_hash_iter_data(iter))->data;
469 }
470
471 if (ctx->depth_stencil != handle) {
472 ctx->depth_stencil = handle;
473 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
474 }
475 return PIPE_OK;
476 }
477
478 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
479 {
480 assert(!ctx->depth_stencil_saved);
481 ctx->depth_stencil_saved = ctx->depth_stencil;
482 }
483
484 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
485 {
486 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
487 ctx->depth_stencil = ctx->depth_stencil_saved;
488 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
489 ctx->depth_stencil_saved);
490 }
491 ctx->depth_stencil_saved = NULL;
492 }
493
494
495
496 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
497 const struct pipe_rasterizer_state *templ)
498 {
499 unsigned key_size = sizeof(struct pipe_rasterizer_state);
500 unsigned hash_key = cso_construct_key((void*)templ, key_size);
501 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
502 hash_key,
503 CSO_RASTERIZER,
504 (void*)templ, key_size);
505 void *handle = NULL;
506
507 if (cso_hash_iter_is_null(iter)) {
508 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
509 if (!cso)
510 return PIPE_ERROR_OUT_OF_MEMORY;
511
512 memcpy(&cso->state, templ, sizeof(*templ));
513 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
514 cso->delete_state =
515 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
516 cso->context = ctx->pipe;
517
518 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
519 if (cso_hash_iter_is_null(iter)) {
520 FREE(cso);
521 return PIPE_ERROR_OUT_OF_MEMORY;
522 }
523
524 handle = cso->data;
525 }
526 else {
527 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
528 }
529
530 if (ctx->rasterizer != handle) {
531 ctx->rasterizer = handle;
532 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
533 }
534 return PIPE_OK;
535 }
536
537 void cso_save_rasterizer(struct cso_context *ctx)
538 {
539 assert(!ctx->rasterizer_saved);
540 ctx->rasterizer_saved = ctx->rasterizer;
541 }
542
543 void cso_restore_rasterizer(struct cso_context *ctx)
544 {
545 if (ctx->rasterizer != ctx->rasterizer_saved) {
546 ctx->rasterizer = ctx->rasterizer_saved;
547 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
548 }
549 ctx->rasterizer_saved = NULL;
550 }
551
552
553
554 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
555 void *handle )
556 {
557 if (ctx->fragment_shader != handle) {
558 ctx->fragment_shader = handle;
559 ctx->pipe->bind_fs_state(ctx->pipe, handle);
560 }
561 return PIPE_OK;
562 }
563
564 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
565 {
566 if (handle == ctx->fragment_shader) {
567 /* unbind before deleting */
568 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
569 ctx->fragment_shader = NULL;
570 }
571 ctx->pipe->delete_fs_state(ctx->pipe, handle);
572 }
573
574 void cso_save_fragment_shader(struct cso_context *ctx)
575 {
576 assert(!ctx->fragment_shader_saved);
577 ctx->fragment_shader_saved = ctx->fragment_shader;
578 }
579
580 void cso_restore_fragment_shader(struct cso_context *ctx)
581 {
582 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
583 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
584 ctx->fragment_shader = ctx->fragment_shader_saved;
585 }
586 ctx->fragment_shader_saved = NULL;
587 }
588
589
590 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
591 void *handle )
592 {
593 if (ctx->vertex_shader != handle) {
594 ctx->vertex_shader = handle;
595 ctx->pipe->bind_vs_state(ctx->pipe, handle);
596 }
597 return PIPE_OK;
598 }
599
600 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
601 {
602 if (handle == ctx->vertex_shader) {
603 /* unbind before deleting */
604 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
605 ctx->vertex_shader = NULL;
606 }
607 ctx->pipe->delete_vs_state(ctx->pipe, handle);
608 }
609
610 void cso_save_vertex_shader(struct cso_context *ctx)
611 {
612 assert(!ctx->vertex_shader_saved);
613 ctx->vertex_shader_saved = ctx->vertex_shader;
614 }
615
616 void cso_restore_vertex_shader(struct cso_context *ctx)
617 {
618 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
619 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
620 ctx->vertex_shader = ctx->vertex_shader_saved;
621 }
622 ctx->vertex_shader_saved = NULL;
623 }
624
625
626 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
627 const struct pipe_framebuffer_state *fb)
628 {
629 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
630 util_copy_framebuffer_state(&ctx->fb, fb);
631 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
632 }
633 return PIPE_OK;
634 }
635
636 void cso_save_framebuffer(struct cso_context *ctx)
637 {
638 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
639 }
640
641 void cso_restore_framebuffer(struct cso_context *ctx)
642 {
643 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
644 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
645 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
646 util_unreference_framebuffer_state(&ctx->fb_saved);
647 }
648 }
649
650
651 enum pipe_error cso_set_viewport(struct cso_context *ctx,
652 const struct pipe_viewport_state *vp)
653 {
654 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
655 ctx->vp = *vp;
656 ctx->pipe->set_viewport_state(ctx->pipe, vp);
657 }
658 return PIPE_OK;
659 }
660
661 void cso_save_viewport(struct cso_context *ctx)
662 {
663 ctx->vp_saved = ctx->vp;
664 }
665
666
667 void cso_restore_viewport(struct cso_context *ctx)
668 {
669 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
670 ctx->vp = ctx->vp_saved;
671 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
672 }
673 }
674
675
676 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
677 const struct pipe_blend_color *bc)
678 {
679 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
680 ctx->blend_color = *bc;
681 ctx->pipe->set_blend_color(ctx->pipe, bc);
682 }
683 return PIPE_OK;
684 }
685
686 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
687 unsigned sample_mask)
688 {
689 if (ctx->sample_mask != sample_mask) {
690 ctx->sample_mask = sample_mask;
691 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
692 }
693 return PIPE_OK;
694 }
695
696 void cso_save_sample_mask(struct cso_context *ctx)
697 {
698 ctx->sample_mask_saved = ctx->sample_mask;
699 }
700
701 void cso_restore_sample_mask(struct cso_context *ctx)
702 {
703 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
704 }
705
706 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
707 const struct pipe_stencil_ref *sr)
708 {
709 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
710 ctx->stencil_ref = *sr;
711 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
712 }
713 return PIPE_OK;
714 }
715
716 void cso_save_stencil_ref(struct cso_context *ctx)
717 {
718 ctx->stencil_ref_saved = ctx->stencil_ref;
719 }
720
721
722 void cso_restore_stencil_ref(struct cso_context *ctx)
723 {
724 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
725 sizeof(ctx->stencil_ref))) {
726 ctx->stencil_ref = ctx->stencil_ref_saved;
727 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
728 }
729 }
730
731 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
732 void *handle)
733 {
734 assert(ctx->has_geometry_shader || !handle);
735
736 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
737 ctx->geometry_shader = handle;
738 ctx->pipe->bind_gs_state(ctx->pipe, handle);
739 }
740 return PIPE_OK;
741 }
742
743 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
744 {
745 if (handle == ctx->geometry_shader) {
746 /* unbind before deleting */
747 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
748 ctx->geometry_shader = NULL;
749 }
750 ctx->pipe->delete_gs_state(ctx->pipe, handle);
751 }
752
753 void cso_save_geometry_shader(struct cso_context *ctx)
754 {
755 if (!ctx->has_geometry_shader) {
756 return;
757 }
758
759 assert(!ctx->geometry_shader_saved);
760 ctx->geometry_shader_saved = ctx->geometry_shader;
761 }
762
763 void cso_restore_geometry_shader(struct cso_context *ctx)
764 {
765 if (!ctx->has_geometry_shader) {
766 return;
767 }
768
769 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
770 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
771 ctx->geometry_shader = ctx->geometry_shader_saved;
772 }
773 ctx->geometry_shader_saved = NULL;
774 }
775
776 /* clip state */
777
778 static INLINE void
779 clip_state_cpy(struct pipe_clip_state *dst,
780 const struct pipe_clip_state *src)
781 {
782 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
783 }
784
785 static INLINE int
786 clip_state_cmp(const struct pipe_clip_state *a,
787 const struct pipe_clip_state *b)
788 {
789 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
790 }
791
792 void
793 cso_set_clip(struct cso_context *ctx,
794 const struct pipe_clip_state *clip)
795 {
796 if (clip_state_cmp(&ctx->clip, clip)) {
797 clip_state_cpy(&ctx->clip, clip);
798 ctx->pipe->set_clip_state(ctx->pipe, clip);
799 }
800 }
801
802 void
803 cso_save_clip(struct cso_context *ctx)
804 {
805 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
806 }
807
808 void
809 cso_restore_clip(struct cso_context *ctx)
810 {
811 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
812 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
813 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
814 }
815 }
816
817 enum pipe_error
818 cso_set_vertex_elements(struct cso_context *ctx,
819 unsigned count,
820 const struct pipe_vertex_element *states)
821 {
822 struct u_vbuf *vbuf = ctx->vbuf;
823 unsigned key_size, hash_key;
824 struct cso_hash_iter iter;
825 void *handle;
826 struct cso_velems_state velems_state;
827
828 if (vbuf) {
829 u_vbuf_set_vertex_elements(vbuf, count, states);
830 return PIPE_OK;
831 }
832
833 /* Need to include the count into the stored state data too.
834 * Otherwise first few count pipe_vertex_elements could be identical
835 * even if count is different, and there's no guarantee the hash would
836 * be different in that case neither.
837 */
838 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
839 velems_state.count = count;
840 memcpy(velems_state.velems, states,
841 sizeof(struct pipe_vertex_element) * count);
842 hash_key = cso_construct_key((void*)&velems_state, key_size);
843 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
844 (void*)&velems_state, key_size);
845
846 if (cso_hash_iter_is_null(iter)) {
847 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
848 if (!cso)
849 return PIPE_ERROR_OUT_OF_MEMORY;
850
851 memcpy(&cso->state, &velems_state, key_size);
852 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
853 &cso->state.velems[0]);
854 cso->delete_state =
855 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
856 cso->context = ctx->pipe;
857
858 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
859 if (cso_hash_iter_is_null(iter)) {
860 FREE(cso);
861 return PIPE_ERROR_OUT_OF_MEMORY;
862 }
863
864 handle = cso->data;
865 }
866 else {
867 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
868 }
869
870 if (ctx->velements != handle) {
871 ctx->velements = handle;
872 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
873 }
874 return PIPE_OK;
875 }
876
877 void cso_save_vertex_elements(struct cso_context *ctx)
878 {
879 struct u_vbuf *vbuf = ctx->vbuf;
880
881 if (vbuf) {
882 u_vbuf_save_vertex_elements(vbuf);
883 return;
884 }
885
886 assert(!ctx->velements_saved);
887 ctx->velements_saved = ctx->velements;
888 }
889
890 void cso_restore_vertex_elements(struct cso_context *ctx)
891 {
892 struct u_vbuf *vbuf = ctx->vbuf;
893
894 if (vbuf) {
895 u_vbuf_restore_vertex_elements(vbuf);
896 return;
897 }
898
899 if (ctx->velements != ctx->velements_saved) {
900 ctx->velements = ctx->velements_saved;
901 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
902 }
903 ctx->velements_saved = NULL;
904 }
905
906 /* vertex buffers */
907
908 void cso_set_vertex_buffers(struct cso_context *ctx,
909 unsigned count,
910 const struct pipe_vertex_buffer *buffers)
911 {
912 struct u_vbuf *vbuf = ctx->vbuf;
913
914 if (vbuf) {
915 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
916 return;
917 }
918
919 if (count != ctx->nr_vertex_buffers ||
920 memcmp(buffers, ctx->vertex_buffers,
921 sizeof(struct pipe_vertex_buffer) * count) != 0) {
922 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
923 buffers, count);
924 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
925 }
926 }
927
928 void cso_save_vertex_buffers(struct cso_context *ctx)
929 {
930 struct u_vbuf *vbuf = ctx->vbuf;
931
932 if (vbuf) {
933 u_vbuf_save_vertex_buffers(vbuf);
934 return;
935 }
936
937 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
938 &ctx->nr_vertex_buffers_saved,
939 ctx->vertex_buffers,
940 ctx->nr_vertex_buffers);
941 }
942
943 void cso_restore_vertex_buffers(struct cso_context *ctx)
944 {
945 unsigned i;
946 struct u_vbuf *vbuf = ctx->vbuf;
947
948 if (vbuf) {
949 u_vbuf_restore_vertex_buffers(vbuf);
950 return;
951 }
952
953 util_copy_vertex_buffers(ctx->vertex_buffers,
954 &ctx->nr_vertex_buffers,
955 ctx->vertex_buffers_saved,
956 ctx->nr_vertex_buffers_saved);
957
958 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
959 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
960 }
961 ctx->nr_vertex_buffers_saved = 0;
962
963 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
964 ctx->vertex_buffers);
965 }
966
967
968 /**************** fragment/vertex sampler view state *************************/
969
970 static enum pipe_error
971 single_sampler(struct cso_context *ctx,
972 struct sampler_info *info,
973 unsigned idx,
974 const struct pipe_sampler_state *templ)
975 {
976 void *handle = NULL;
977
978 if (templ != NULL) {
979 unsigned key_size = sizeof(struct pipe_sampler_state);
980 unsigned hash_key = cso_construct_key((void*)templ, key_size);
981 struct cso_hash_iter iter =
982 cso_find_state_template(ctx->cache,
983 hash_key, CSO_SAMPLER,
984 (void *) templ, key_size);
985
986 if (cso_hash_iter_is_null(iter)) {
987 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
988 if (!cso)
989 return PIPE_ERROR_OUT_OF_MEMORY;
990
991 memcpy(&cso->state, templ, sizeof(*templ));
992 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
993 cso->delete_state =
994 (cso_state_callback) ctx->pipe->delete_sampler_state;
995 cso->context = ctx->pipe;
996
997 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
998 if (cso_hash_iter_is_null(iter)) {
999 FREE(cso);
1000 return PIPE_ERROR_OUT_OF_MEMORY;
1001 }
1002
1003 handle = cso->data;
1004 }
1005 else {
1006 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1007 }
1008 }
1009
1010 info->samplers[idx] = handle;
1011
1012 return PIPE_OK;
1013 }
1014
1015 enum pipe_error
1016 cso_single_sampler(struct cso_context *ctx,
1017 unsigned shader_stage,
1018 unsigned idx,
1019 const struct pipe_sampler_state *templ)
1020 {
1021 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1022 }
1023
1024
1025
1026 static void
1027 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1028 {
1029 struct sampler_info *info = &ctx->samplers[shader_stage];
1030 unsigned i;
1031
1032 /* find highest non-null sampler */
1033 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1034 if (info->samplers[i - 1] != NULL)
1035 break;
1036 }
1037
1038 info->nr_samplers = i;
1039
1040 if (info->hw.nr_samplers != info->nr_samplers ||
1041 memcmp(info->hw.samplers,
1042 info->samplers,
1043 info->nr_samplers * sizeof(void *)) != 0)
1044 {
1045 memcpy(info->hw.samplers,
1046 info->samplers,
1047 info->nr_samplers * sizeof(void *));
1048 info->hw.nr_samplers = info->nr_samplers;
1049
1050 switch (shader_stage) {
1051 case PIPE_SHADER_FRAGMENT:
1052 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1053 info->nr_samplers,
1054 info->samplers);
1055 break;
1056 case PIPE_SHADER_VERTEX:
1057 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1058 info->nr_samplers,
1059 info->samplers);
1060 break;
1061 case PIPE_SHADER_GEOMETRY:
1062 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1063 info->nr_samplers,
1064 info->samplers);
1065 break;
1066 default:
1067 assert(!"bad shader type in single_sampler_done()");
1068 }
1069 }
1070 }
1071
1072 void
1073 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1074 {
1075 single_sampler_done(ctx, shader_stage);
1076 }
1077
1078
1079 /*
1080 * If the function encouters any errors it will return the
1081 * last one. Done to always try to set as many samplers
1082 * as possible.
1083 */
1084 enum pipe_error
1085 cso_set_samplers(struct cso_context *ctx,
1086 unsigned shader_stage,
1087 unsigned nr,
1088 const struct pipe_sampler_state **templates)
1089 {
1090 struct sampler_info *info = &ctx->samplers[shader_stage];
1091 unsigned i;
1092 enum pipe_error temp, error = PIPE_OK;
1093
1094 /* TODO: fastpath
1095 */
1096
1097 for (i = 0; i < nr; i++) {
1098 temp = single_sampler(ctx, info, i, templates[i]);
1099 if (temp != PIPE_OK)
1100 error = temp;
1101 }
1102
1103 for ( ; i < info->nr_samplers; i++) {
1104 temp = single_sampler(ctx, info, i, NULL);
1105 if (temp != PIPE_OK)
1106 error = temp;
1107 }
1108
1109 single_sampler_done(ctx, shader_stage);
1110
1111 return error;
1112 }
1113
1114 void
1115 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1116 {
1117 struct sampler_info *info = &ctx->samplers[shader_stage];
1118 info->nr_samplers_saved = info->nr_samplers;
1119 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1120 }
1121
1122
1123 void
1124 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1125 {
1126 struct sampler_info *info = &ctx->samplers[shader_stage];
1127 info->nr_samplers = info->nr_samplers_saved;
1128 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1129 single_sampler_done(ctx, shader_stage);
1130 }
1131
1132
1133 void
1134 cso_set_sampler_views(struct cso_context *ctx,
1135 unsigned shader_stage,
1136 unsigned count,
1137 struct pipe_sampler_view **views)
1138 {
1139 struct sampler_info *info = &ctx->samplers[shader_stage];
1140 unsigned i;
1141
1142 /* reference new views */
1143 for (i = 0; i < count; i++) {
1144 pipe_sampler_view_reference(&info->views[i], views[i]);
1145 }
1146 /* unref extra old views, if any */
1147 for (; i < info->nr_views; i++) {
1148 pipe_sampler_view_reference(&info->views[i], NULL);
1149 }
1150
1151 info->nr_views = count;
1152
1153 /* bind the new sampler views */
1154 switch (shader_stage) {
1155 case PIPE_SHADER_FRAGMENT:
1156 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1157 break;
1158 case PIPE_SHADER_VERTEX:
1159 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1160 break;
1161 case PIPE_SHADER_GEOMETRY:
1162 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1163 break;
1164 default:
1165 assert(!"bad shader type in cso_set_sampler_views()");
1166 }
1167 }
1168
1169
1170 void
1171 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1172 {
1173 struct sampler_info *info = &ctx->samplers[shader_stage];
1174 unsigned i;
1175
1176 info->nr_views_saved = info->nr_views;
1177
1178 for (i = 0; i < info->nr_views; i++) {
1179 assert(!info->views_saved[i]);
1180 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1181 }
1182 }
1183
1184
1185 void
1186 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1187 {
1188 struct sampler_info *info = &ctx->samplers[shader_stage];
1189 unsigned i, nr_saved = info->nr_views_saved;
1190
1191 for (i = 0; i < nr_saved; i++) {
1192 pipe_sampler_view_reference(&info->views[i], NULL);
1193 /* move the reference from one pointer to another */
1194 info->views[i] = info->views_saved[i];
1195 info->views_saved[i] = NULL;
1196 }
1197 for (; i < info->nr_views; i++) {
1198 pipe_sampler_view_reference(&info->views[i], NULL);
1199 }
1200
1201 /* bind the old/saved sampler views */
1202 switch (shader_stage) {
1203 case PIPE_SHADER_FRAGMENT:
1204 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1205 break;
1206 case PIPE_SHADER_VERTEX:
1207 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1208 break;
1209 case PIPE_SHADER_GEOMETRY:
1210 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1211 break;
1212 default:
1213 assert(!"bad shader type in cso_restore_sampler_views()");
1214 }
1215
1216 info->nr_views = nr_saved;
1217 info->nr_views_saved = 0;
1218 }
1219
1220
1221 void
1222 cso_set_stream_outputs(struct cso_context *ctx,
1223 unsigned num_targets,
1224 struct pipe_stream_output_target **targets,
1225 unsigned append_bitmask)
1226 {
1227 struct pipe_context *pipe = ctx->pipe;
1228 uint i;
1229
1230 if (!ctx->has_streamout) {
1231 assert(num_targets == 0);
1232 return;
1233 }
1234
1235 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1236 /* Nothing to do. */
1237 return;
1238 }
1239
1240 /* reference new targets */
1241 for (i = 0; i < num_targets; i++) {
1242 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1243 }
1244 /* unref extra old targets, if any */
1245 for (; i < ctx->nr_so_targets; i++) {
1246 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1247 }
1248
1249 pipe->set_stream_output_targets(pipe, num_targets, targets,
1250 append_bitmask);
1251 ctx->nr_so_targets = num_targets;
1252 }
1253
1254 void
1255 cso_save_stream_outputs(struct cso_context *ctx)
1256 {
1257 uint i;
1258
1259 if (!ctx->has_streamout) {
1260 return;
1261 }
1262
1263 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1264
1265 for (i = 0; i < ctx->nr_so_targets; i++) {
1266 assert(!ctx->so_targets_saved[i]);
1267 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1268 }
1269 }
1270
1271 void
1272 cso_restore_stream_outputs(struct cso_context *ctx)
1273 {
1274 struct pipe_context *pipe = ctx->pipe;
1275 uint i;
1276
1277 if (!ctx->has_streamout) {
1278 return;
1279 }
1280
1281 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1282 /* Nothing to do. */
1283 return;
1284 }
1285
1286 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1287 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1288 /* move the reference from one pointer to another */
1289 ctx->so_targets[i] = ctx->so_targets_saved[i];
1290 ctx->so_targets_saved[i] = NULL;
1291 }
1292 for (; i < ctx->nr_so_targets; i++) {
1293 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1294 }
1295
1296 /* ~0 means append */
1297 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1298 ctx->so_targets, ~0);
1299
1300 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1301 ctx->nr_so_targets_saved = 0;
1302 }
1303
1304 /* drawing */
1305
1306 void
1307 cso_set_index_buffer(struct cso_context *cso,
1308 const struct pipe_index_buffer *ib)
1309 {
1310 struct u_vbuf *vbuf = cso->vbuf;
1311
1312 if (vbuf) {
1313 u_vbuf_set_index_buffer(vbuf, ib);
1314 } else {
1315 struct pipe_context *pipe = cso->pipe;
1316 pipe->set_index_buffer(pipe, ib);
1317 }
1318 }
1319
1320 void
1321 cso_draw_vbo(struct cso_context *cso,
1322 const struct pipe_draw_info *info)
1323 {
1324 struct u_vbuf *vbuf = cso->vbuf;
1325
1326 if (vbuf) {
1327 u_vbuf_draw_vbo(vbuf, info);
1328 } else {
1329 struct pipe_context *pipe = cso->pipe;
1330 pipe->draw_vbo(pipe, info);
1331 }
1332 }
1333
1334 void
1335 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1336 {
1337 struct pipe_draw_info info;
1338
1339 util_draw_init_info(&info);
1340
1341 info.mode = mode;
1342 info.start = start;
1343 info.count = count;
1344 info.min_index = start;
1345 info.max_index = start + count - 1;
1346
1347 cso_draw_vbo(cso, &info);
1348 }