gallium: set sample mask to ~0 for clear, blit and gen_mipmap
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 uint nr_vertex_buffers;
90 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
91
92 uint nr_vertex_buffers_saved;
93 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved, *geometry_shader;
108 void *vertex_shader, *vertex_shader_saved, *geometry_shader_saved;
109 void *velements, *velements_saved;
110
111 struct pipe_clip_state clip;
112 struct pipe_clip_state clip_saved;
113
114 struct pipe_framebuffer_state fb, fb_saved;
115 struct pipe_viewport_state vp, vp_saved;
116 struct pipe_blend_color blend_color;
117 unsigned sample_mask, sample_mask_saved;
118 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
119 };
120
121
122 static boolean delete_blend_state(struct cso_context *ctx, void *state)
123 {
124 struct cso_blend *cso = (struct cso_blend *)state;
125
126 if (ctx->blend == cso->data)
127 return FALSE;
128
129 if (cso->delete_state)
130 cso->delete_state(cso->context, cso->data);
131 FREE(state);
132 return TRUE;
133 }
134
135 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
136 {
137 struct cso_depth_stencil_alpha *cso =
138 (struct cso_depth_stencil_alpha *)state;
139
140 if (ctx->depth_stencil == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146
147 return TRUE;
148 }
149
150 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
151 {
152 struct cso_sampler *cso = (struct cso_sampler *)state;
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156 return TRUE;
157 }
158
159 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
160 {
161 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
162
163 if (ctx->rasterizer == cso->data)
164 return FALSE;
165 if (cso->delete_state)
166 cso->delete_state(cso->context, cso->data);
167 FREE(state);
168 return TRUE;
169 }
170
171 static boolean delete_vertex_elements(struct cso_context *ctx,
172 void *state)
173 {
174 struct cso_velements *cso = (struct cso_velements *)state;
175
176 if (ctx->velements == cso->data)
177 return FALSE;
178
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185
186 static INLINE boolean delete_cso(struct cso_context *ctx,
187 void *state, enum cso_cache_type type)
188 {
189 switch (type) {
190 case CSO_BLEND:
191 return delete_blend_state(ctx, state);
192 case CSO_SAMPLER:
193 return delete_sampler_state(ctx, state);
194 case CSO_DEPTH_STENCIL_ALPHA:
195 return delete_depth_stencil_state(ctx, state);
196 case CSO_RASTERIZER:
197 return delete_rasterizer_state(ctx, state);
198 case CSO_VELEMENTS:
199 return delete_vertex_elements(ctx, state);
200 default:
201 assert(0);
202 FREE(state);
203 }
204 return FALSE;
205 }
206
207 static INLINE void
208 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
209 int max_size, void *user_data)
210 {
211 struct cso_context *ctx = (struct cso_context *)user_data;
212 /* if we're approach the maximum size, remove fourth of the entries
213 * otherwise every subsequent call will go through the same */
214 int hash_size = cso_hash_size(hash);
215 int max_entries = (max_size > hash_size) ? max_size : hash_size;
216 int to_remove = (max_size < max_entries) * max_entries/4;
217 struct cso_hash_iter iter = cso_hash_first_node(hash);
218 if (hash_size > max_size)
219 to_remove += hash_size - max_size;
220 while (to_remove) {
221 /*remove elements until we're good */
222 /*fixme: currently we pick the nodes to remove at random*/
223 void *cso = cso_hash_iter_data(iter);
224 if (delete_cso(ctx, cso, type)) {
225 iter = cso_hash_erase(hash, iter);
226 --to_remove;
227 } else
228 iter = cso_hash_iter_next(iter);
229 }
230 }
231
232 static void cso_init_vbuf(struct cso_context *cso)
233 {
234 struct u_vbuf_caps caps;
235
236 u_vbuf_get_caps(cso->pipe->screen, &caps);
237
238 /* Install u_vbuf if there is anything unsupported. */
239 if (!caps.buffer_offset_unaligned ||
240 !caps.buffer_stride_unaligned ||
241 !caps.velem_src_offset_unaligned ||
242 !caps.format_fixed32 ||
243 !caps.format_float16 ||
244 !caps.format_float64 ||
245 !caps.format_norm32 ||
246 !caps.format_scaled32 ||
247 !caps.user_vertex_buffers) {
248 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
249 }
250 }
251
252 struct cso_context *cso_create_context( struct pipe_context *pipe )
253 {
254 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
255 if (ctx == NULL)
256 goto out;
257
258 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
259
260 ctx->cache = cso_cache_create();
261 if (ctx->cache == NULL)
262 goto out;
263 cso_cache_set_sanitize_callback(ctx->cache,
264 sanitize_hash,
265 ctx);
266
267 ctx->pipe = pipe;
268 ctx->sample_mask_saved = ~0;
269
270 cso_init_vbuf(ctx);
271
272 /* Enable for testing: */
273 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
274
275 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
276 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
277 ctx->has_geometry_shader = TRUE;
278 }
279 if (pipe->screen->get_param(pipe->screen,
280 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
281 ctx->has_streamout = TRUE;
282 }
283
284 return ctx;
285
286 out:
287 cso_destroy_context( ctx );
288 return NULL;
289 }
290
291 /**
292 * Prior to context destruction, this function unbinds all state objects.
293 */
294 void cso_release_all( struct cso_context *ctx )
295 {
296 unsigned i, shader;
297
298 if (ctx->pipe) {
299 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
300 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
301 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
302 if (ctx->pipe->bind_vertex_sampler_states)
303 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
304 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
305 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
306 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
307 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
308 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
309 if (ctx->pipe->set_vertex_sampler_views)
310 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
311 if (ctx->pipe->set_stream_output_targets)
312 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
313 }
314
315 /* free fragment samplers, views */
316 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
317 struct sampler_info *info = &ctx->samplers[shader];
318 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
319 pipe_sampler_view_reference(&info->views[i], NULL);
320 pipe_sampler_view_reference(&info->views_saved[i], NULL);
321 }
322 }
323
324 util_unreference_framebuffer_state(&ctx->fb);
325 util_unreference_framebuffer_state(&ctx->fb_saved);
326
327 util_copy_vertex_buffers(ctx->vertex_buffers,
328 &ctx->nr_vertex_buffers,
329 NULL, 0);
330 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
331 &ctx->nr_vertex_buffers_saved,
332 NULL, 0);
333
334 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
335 pipe_so_target_reference(&ctx->so_targets[i], NULL);
336 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
337 }
338
339 if (ctx->cache) {
340 cso_cache_delete( ctx->cache );
341 ctx->cache = NULL;
342 }
343 }
344
345
346 /**
347 * Free the CSO context. NOTE: the state tracker should have previously called
348 * cso_release_all().
349 */
350 void cso_destroy_context( struct cso_context *ctx )
351 {
352 if (ctx) {
353 if (ctx->vbuf)
354 u_vbuf_destroy(ctx->vbuf);
355 FREE( ctx );
356 }
357 }
358
359
360 /* Those function will either find the state of the given template
361 * in the cache or they will create a new state from the given
362 * template, insert it in the cache and return it.
363 */
364
365 /*
366 * If the driver returns 0 from the create method then they will assign
367 * the data member of the cso to be the template itself.
368 */
369
370 enum pipe_error cso_set_blend(struct cso_context *ctx,
371 const struct pipe_blend_state *templ)
372 {
373 unsigned key_size, hash_key;
374 struct cso_hash_iter iter;
375 void *handle;
376
377 key_size = templ->independent_blend_enable ?
378 sizeof(struct pipe_blend_state) :
379 (char *)&(templ->rt[1]) - (char *)templ;
380 hash_key = cso_construct_key((void*)templ, key_size);
381 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
382 (void*)templ, key_size);
383
384 if (cso_hash_iter_is_null(iter)) {
385 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
386 if (!cso)
387 return PIPE_ERROR_OUT_OF_MEMORY;
388
389 memset(&cso->state, 0, sizeof cso->state);
390 memcpy(&cso->state, templ, key_size);
391 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
392 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
393 cso->context = ctx->pipe;
394
395 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
396 if (cso_hash_iter_is_null(iter)) {
397 FREE(cso);
398 return PIPE_ERROR_OUT_OF_MEMORY;
399 }
400
401 handle = cso->data;
402 }
403 else {
404 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
405 }
406
407 if (ctx->blend != handle) {
408 ctx->blend = handle;
409 ctx->pipe->bind_blend_state(ctx->pipe, handle);
410 }
411 return PIPE_OK;
412 }
413
414 void cso_save_blend(struct cso_context *ctx)
415 {
416 assert(!ctx->blend_saved);
417 ctx->blend_saved = ctx->blend;
418 }
419
420 void cso_restore_blend(struct cso_context *ctx)
421 {
422 if (ctx->blend != ctx->blend_saved) {
423 ctx->blend = ctx->blend_saved;
424 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
425 }
426 ctx->blend_saved = NULL;
427 }
428
429
430
431 enum pipe_error
432 cso_set_depth_stencil_alpha(struct cso_context *ctx,
433 const struct pipe_depth_stencil_alpha_state *templ)
434 {
435 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
436 unsigned hash_key = cso_construct_key((void*)templ, key_size);
437 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
438 hash_key,
439 CSO_DEPTH_STENCIL_ALPHA,
440 (void*)templ, key_size);
441 void *handle;
442
443 if (cso_hash_iter_is_null(iter)) {
444 struct cso_depth_stencil_alpha *cso =
445 MALLOC(sizeof(struct cso_depth_stencil_alpha));
446 if (!cso)
447 return PIPE_ERROR_OUT_OF_MEMORY;
448
449 memcpy(&cso->state, templ, sizeof(*templ));
450 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
451 &cso->state);
452 cso->delete_state =
453 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
454 cso->context = ctx->pipe;
455
456 iter = cso_insert_state(ctx->cache, hash_key,
457 CSO_DEPTH_STENCIL_ALPHA, cso);
458 if (cso_hash_iter_is_null(iter)) {
459 FREE(cso);
460 return PIPE_ERROR_OUT_OF_MEMORY;
461 }
462
463 handle = cso->data;
464 }
465 else {
466 handle = ((struct cso_depth_stencil_alpha *)
467 cso_hash_iter_data(iter))->data;
468 }
469
470 if (ctx->depth_stencil != handle) {
471 ctx->depth_stencil = handle;
472 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
473 }
474 return PIPE_OK;
475 }
476
477 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
478 {
479 assert(!ctx->depth_stencil_saved);
480 ctx->depth_stencil_saved = ctx->depth_stencil;
481 }
482
483 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
484 {
485 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
486 ctx->depth_stencil = ctx->depth_stencil_saved;
487 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
488 ctx->depth_stencil_saved);
489 }
490 ctx->depth_stencil_saved = NULL;
491 }
492
493
494
495 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
496 const struct pipe_rasterizer_state *templ)
497 {
498 unsigned key_size = sizeof(struct pipe_rasterizer_state);
499 unsigned hash_key = cso_construct_key((void*)templ, key_size);
500 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
501 hash_key,
502 CSO_RASTERIZER,
503 (void*)templ, key_size);
504 void *handle = NULL;
505
506 if (cso_hash_iter_is_null(iter)) {
507 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
508 if (!cso)
509 return PIPE_ERROR_OUT_OF_MEMORY;
510
511 memcpy(&cso->state, templ, sizeof(*templ));
512 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
513 cso->delete_state =
514 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
515 cso->context = ctx->pipe;
516
517 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
518 if (cso_hash_iter_is_null(iter)) {
519 FREE(cso);
520 return PIPE_ERROR_OUT_OF_MEMORY;
521 }
522
523 handle = cso->data;
524 }
525 else {
526 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
527 }
528
529 if (ctx->rasterizer != handle) {
530 ctx->rasterizer = handle;
531 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
532 }
533 return PIPE_OK;
534 }
535
536 void cso_save_rasterizer(struct cso_context *ctx)
537 {
538 assert(!ctx->rasterizer_saved);
539 ctx->rasterizer_saved = ctx->rasterizer;
540 }
541
542 void cso_restore_rasterizer(struct cso_context *ctx)
543 {
544 if (ctx->rasterizer != ctx->rasterizer_saved) {
545 ctx->rasterizer = ctx->rasterizer_saved;
546 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
547 }
548 ctx->rasterizer_saved = NULL;
549 }
550
551
552
553 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
554 void *handle )
555 {
556 if (ctx->fragment_shader != handle) {
557 ctx->fragment_shader = handle;
558 ctx->pipe->bind_fs_state(ctx->pipe, handle);
559 }
560 return PIPE_OK;
561 }
562
563 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
564 {
565 if (handle == ctx->fragment_shader) {
566 /* unbind before deleting */
567 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
568 ctx->fragment_shader = NULL;
569 }
570 ctx->pipe->delete_fs_state(ctx->pipe, handle);
571 }
572
573 void cso_save_fragment_shader(struct cso_context *ctx)
574 {
575 assert(!ctx->fragment_shader_saved);
576 ctx->fragment_shader_saved = ctx->fragment_shader;
577 }
578
579 void cso_restore_fragment_shader(struct cso_context *ctx)
580 {
581 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
582 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
583 ctx->fragment_shader = ctx->fragment_shader_saved;
584 }
585 ctx->fragment_shader_saved = NULL;
586 }
587
588
589 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
590 void *handle )
591 {
592 if (ctx->vertex_shader != handle) {
593 ctx->vertex_shader = handle;
594 ctx->pipe->bind_vs_state(ctx->pipe, handle);
595 }
596 return PIPE_OK;
597 }
598
599 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
600 {
601 if (handle == ctx->vertex_shader) {
602 /* unbind before deleting */
603 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
604 ctx->vertex_shader = NULL;
605 }
606 ctx->pipe->delete_vs_state(ctx->pipe, handle);
607 }
608
609 void cso_save_vertex_shader(struct cso_context *ctx)
610 {
611 assert(!ctx->vertex_shader_saved);
612 ctx->vertex_shader_saved = ctx->vertex_shader;
613 }
614
615 void cso_restore_vertex_shader(struct cso_context *ctx)
616 {
617 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
618 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
619 ctx->vertex_shader = ctx->vertex_shader_saved;
620 }
621 ctx->vertex_shader_saved = NULL;
622 }
623
624
625 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
626 const struct pipe_framebuffer_state *fb)
627 {
628 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
629 util_copy_framebuffer_state(&ctx->fb, fb);
630 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
631 }
632 return PIPE_OK;
633 }
634
635 void cso_save_framebuffer(struct cso_context *ctx)
636 {
637 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
638 }
639
640 void cso_restore_framebuffer(struct cso_context *ctx)
641 {
642 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
643 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
644 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
645 util_unreference_framebuffer_state(&ctx->fb_saved);
646 }
647 }
648
649
650 enum pipe_error cso_set_viewport(struct cso_context *ctx,
651 const struct pipe_viewport_state *vp)
652 {
653 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
654 ctx->vp = *vp;
655 ctx->pipe->set_viewport_state(ctx->pipe, vp);
656 }
657 return PIPE_OK;
658 }
659
660 void cso_save_viewport(struct cso_context *ctx)
661 {
662 ctx->vp_saved = ctx->vp;
663 }
664
665
666 void cso_restore_viewport(struct cso_context *ctx)
667 {
668 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
669 ctx->vp = ctx->vp_saved;
670 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
671 }
672 }
673
674
675 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
676 const struct pipe_blend_color *bc)
677 {
678 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
679 ctx->blend_color = *bc;
680 ctx->pipe->set_blend_color(ctx->pipe, bc);
681 }
682 return PIPE_OK;
683 }
684
685 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
686 unsigned sample_mask)
687 {
688 if (ctx->sample_mask != sample_mask) {
689 ctx->sample_mask = sample_mask;
690 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
691 }
692 return PIPE_OK;
693 }
694
695 void cso_save_sample_mask(struct cso_context *ctx)
696 {
697 ctx->sample_mask_saved = ctx->sample_mask;
698 }
699
700 void cso_restore_sample_mask(struct cso_context *ctx)
701 {
702 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
703 }
704
705 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
706 const struct pipe_stencil_ref *sr)
707 {
708 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
709 ctx->stencil_ref = *sr;
710 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
711 }
712 return PIPE_OK;
713 }
714
715 void cso_save_stencil_ref(struct cso_context *ctx)
716 {
717 ctx->stencil_ref_saved = ctx->stencil_ref;
718 }
719
720
721 void cso_restore_stencil_ref(struct cso_context *ctx)
722 {
723 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
724 sizeof(ctx->stencil_ref))) {
725 ctx->stencil_ref = ctx->stencil_ref_saved;
726 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
727 }
728 }
729
730 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
731 void *handle)
732 {
733 assert(ctx->has_geometry_shader || !handle);
734
735 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
736 ctx->geometry_shader = handle;
737 ctx->pipe->bind_gs_state(ctx->pipe, handle);
738 }
739 return PIPE_OK;
740 }
741
742 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
743 {
744 if (handle == ctx->geometry_shader) {
745 /* unbind before deleting */
746 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
747 ctx->geometry_shader = NULL;
748 }
749 ctx->pipe->delete_gs_state(ctx->pipe, handle);
750 }
751
752 void cso_save_geometry_shader(struct cso_context *ctx)
753 {
754 if (!ctx->has_geometry_shader) {
755 return;
756 }
757
758 assert(!ctx->geometry_shader_saved);
759 ctx->geometry_shader_saved = ctx->geometry_shader;
760 }
761
762 void cso_restore_geometry_shader(struct cso_context *ctx)
763 {
764 if (!ctx->has_geometry_shader) {
765 return;
766 }
767
768 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
769 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
770 ctx->geometry_shader = ctx->geometry_shader_saved;
771 }
772 ctx->geometry_shader_saved = NULL;
773 }
774
775 /* clip state */
776
777 static INLINE void
778 clip_state_cpy(struct pipe_clip_state *dst,
779 const struct pipe_clip_state *src)
780 {
781 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
782 }
783
784 static INLINE int
785 clip_state_cmp(const struct pipe_clip_state *a,
786 const struct pipe_clip_state *b)
787 {
788 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
789 }
790
791 void
792 cso_set_clip(struct cso_context *ctx,
793 const struct pipe_clip_state *clip)
794 {
795 if (clip_state_cmp(&ctx->clip, clip)) {
796 clip_state_cpy(&ctx->clip, clip);
797 ctx->pipe->set_clip_state(ctx->pipe, clip);
798 }
799 }
800
801 void
802 cso_save_clip(struct cso_context *ctx)
803 {
804 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
805 }
806
807 void
808 cso_restore_clip(struct cso_context *ctx)
809 {
810 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
811 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
812 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
813 }
814 }
815
816 enum pipe_error
817 cso_set_vertex_elements(struct cso_context *ctx,
818 unsigned count,
819 const struct pipe_vertex_element *states)
820 {
821 struct u_vbuf *vbuf = ctx->vbuf;
822 unsigned key_size, hash_key;
823 struct cso_hash_iter iter;
824 void *handle;
825 struct cso_velems_state velems_state;
826
827 if (vbuf) {
828 u_vbuf_set_vertex_elements(vbuf, count, states);
829 return PIPE_OK;
830 }
831
832 /* Need to include the count into the stored state data too.
833 * Otherwise first few count pipe_vertex_elements could be identical
834 * even if count is different, and there's no guarantee the hash would
835 * be different in that case neither.
836 */
837 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
838 velems_state.count = count;
839 memcpy(velems_state.velems, states,
840 sizeof(struct pipe_vertex_element) * count);
841 hash_key = cso_construct_key((void*)&velems_state, key_size);
842 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
843 (void*)&velems_state, key_size);
844
845 if (cso_hash_iter_is_null(iter)) {
846 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
847 if (!cso)
848 return PIPE_ERROR_OUT_OF_MEMORY;
849
850 memcpy(&cso->state, &velems_state, key_size);
851 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
852 &cso->state.velems[0]);
853 cso->delete_state =
854 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
855 cso->context = ctx->pipe;
856
857 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
858 if (cso_hash_iter_is_null(iter)) {
859 FREE(cso);
860 return PIPE_ERROR_OUT_OF_MEMORY;
861 }
862
863 handle = cso->data;
864 }
865 else {
866 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
867 }
868
869 if (ctx->velements != handle) {
870 ctx->velements = handle;
871 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
872 }
873 return PIPE_OK;
874 }
875
876 void cso_save_vertex_elements(struct cso_context *ctx)
877 {
878 struct u_vbuf *vbuf = ctx->vbuf;
879
880 if (vbuf) {
881 u_vbuf_save_vertex_elements(vbuf);
882 return;
883 }
884
885 assert(!ctx->velements_saved);
886 ctx->velements_saved = ctx->velements;
887 }
888
889 void cso_restore_vertex_elements(struct cso_context *ctx)
890 {
891 struct u_vbuf *vbuf = ctx->vbuf;
892
893 if (vbuf) {
894 u_vbuf_restore_vertex_elements(vbuf);
895 return;
896 }
897
898 if (ctx->velements != ctx->velements_saved) {
899 ctx->velements = ctx->velements_saved;
900 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
901 }
902 ctx->velements_saved = NULL;
903 }
904
905 /* vertex buffers */
906
907 void cso_set_vertex_buffers(struct cso_context *ctx,
908 unsigned count,
909 const struct pipe_vertex_buffer *buffers)
910 {
911 struct u_vbuf *vbuf = ctx->vbuf;
912
913 if (vbuf) {
914 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
915 return;
916 }
917
918 if (count != ctx->nr_vertex_buffers ||
919 memcmp(buffers, ctx->vertex_buffers,
920 sizeof(struct pipe_vertex_buffer) * count) != 0) {
921 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
922 buffers, count);
923 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
924 }
925 }
926
927 void cso_save_vertex_buffers(struct cso_context *ctx)
928 {
929 struct u_vbuf *vbuf = ctx->vbuf;
930
931 if (vbuf) {
932 u_vbuf_save_vertex_buffers(vbuf);
933 return;
934 }
935
936 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
937 &ctx->nr_vertex_buffers_saved,
938 ctx->vertex_buffers,
939 ctx->nr_vertex_buffers);
940 }
941
942 void cso_restore_vertex_buffers(struct cso_context *ctx)
943 {
944 unsigned i;
945 struct u_vbuf *vbuf = ctx->vbuf;
946
947 if (vbuf) {
948 u_vbuf_restore_vertex_buffers(vbuf);
949 return;
950 }
951
952 util_copy_vertex_buffers(ctx->vertex_buffers,
953 &ctx->nr_vertex_buffers,
954 ctx->vertex_buffers_saved,
955 ctx->nr_vertex_buffers_saved);
956
957 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
958 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
959 }
960 ctx->nr_vertex_buffers_saved = 0;
961
962 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
963 ctx->vertex_buffers);
964 }
965
966
967 /**************** fragment/vertex sampler view state *************************/
968
969 static enum pipe_error
970 single_sampler(struct cso_context *ctx,
971 struct sampler_info *info,
972 unsigned idx,
973 const struct pipe_sampler_state *templ)
974 {
975 void *handle = NULL;
976
977 if (templ != NULL) {
978 unsigned key_size = sizeof(struct pipe_sampler_state);
979 unsigned hash_key = cso_construct_key((void*)templ, key_size);
980 struct cso_hash_iter iter =
981 cso_find_state_template(ctx->cache,
982 hash_key, CSO_SAMPLER,
983 (void *) templ, key_size);
984
985 if (cso_hash_iter_is_null(iter)) {
986 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
987 if (!cso)
988 return PIPE_ERROR_OUT_OF_MEMORY;
989
990 memcpy(&cso->state, templ, sizeof(*templ));
991 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
992 cso->delete_state =
993 (cso_state_callback) ctx->pipe->delete_sampler_state;
994 cso->context = ctx->pipe;
995
996 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
997 if (cso_hash_iter_is_null(iter)) {
998 FREE(cso);
999 return PIPE_ERROR_OUT_OF_MEMORY;
1000 }
1001
1002 handle = cso->data;
1003 }
1004 else {
1005 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1006 }
1007 }
1008
1009 info->samplers[idx] = handle;
1010
1011 return PIPE_OK;
1012 }
1013
1014 enum pipe_error
1015 cso_single_sampler(struct cso_context *ctx,
1016 unsigned shader_stage,
1017 unsigned idx,
1018 const struct pipe_sampler_state *templ)
1019 {
1020 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1021 }
1022
1023
1024
1025 static void
1026 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1027 {
1028 struct sampler_info *info = &ctx->samplers[shader_stage];
1029 unsigned i;
1030
1031 /* find highest non-null sampler */
1032 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1033 if (info->samplers[i - 1] != NULL)
1034 break;
1035 }
1036
1037 info->nr_samplers = i;
1038
1039 if (info->hw.nr_samplers != info->nr_samplers ||
1040 memcmp(info->hw.samplers,
1041 info->samplers,
1042 info->nr_samplers * sizeof(void *)) != 0)
1043 {
1044 memcpy(info->hw.samplers,
1045 info->samplers,
1046 info->nr_samplers * sizeof(void *));
1047 info->hw.nr_samplers = info->nr_samplers;
1048
1049 switch (shader_stage) {
1050 case PIPE_SHADER_FRAGMENT:
1051 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1052 info->nr_samplers,
1053 info->samplers);
1054 break;
1055 case PIPE_SHADER_VERTEX:
1056 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1057 info->nr_samplers,
1058 info->samplers);
1059 break;
1060 case PIPE_SHADER_GEOMETRY:
1061 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1062 info->nr_samplers,
1063 info->samplers);
1064 break;
1065 default:
1066 assert(!"bad shader type in single_sampler_done()");
1067 }
1068 }
1069 }
1070
1071 void
1072 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1073 {
1074 single_sampler_done(ctx, shader_stage);
1075 }
1076
1077
1078 /*
1079 * If the function encouters any errors it will return the
1080 * last one. Done to always try to set as many samplers
1081 * as possible.
1082 */
1083 enum pipe_error
1084 cso_set_samplers(struct cso_context *ctx,
1085 unsigned shader_stage,
1086 unsigned nr,
1087 const struct pipe_sampler_state **templates)
1088 {
1089 struct sampler_info *info = &ctx->samplers[shader_stage];
1090 unsigned i;
1091 enum pipe_error temp, error = PIPE_OK;
1092
1093 /* TODO: fastpath
1094 */
1095
1096 for (i = 0; i < nr; i++) {
1097 temp = single_sampler(ctx, info, i, templates[i]);
1098 if (temp != PIPE_OK)
1099 error = temp;
1100 }
1101
1102 for ( ; i < info->nr_samplers; i++) {
1103 temp = single_sampler(ctx, info, i, NULL);
1104 if (temp != PIPE_OK)
1105 error = temp;
1106 }
1107
1108 single_sampler_done(ctx, shader_stage);
1109
1110 return error;
1111 }
1112
1113 void
1114 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1115 {
1116 struct sampler_info *info = &ctx->samplers[shader_stage];
1117 info->nr_samplers_saved = info->nr_samplers;
1118 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1119 }
1120
1121
1122 void
1123 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1124 {
1125 struct sampler_info *info = &ctx->samplers[shader_stage];
1126 info->nr_samplers = info->nr_samplers_saved;
1127 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1128 single_sampler_done(ctx, shader_stage);
1129 }
1130
1131
1132 void
1133 cso_set_sampler_views(struct cso_context *ctx,
1134 unsigned shader_stage,
1135 unsigned count,
1136 struct pipe_sampler_view **views)
1137 {
1138 struct sampler_info *info = &ctx->samplers[shader_stage];
1139 unsigned i;
1140
1141 /* reference new views */
1142 for (i = 0; i < count; i++) {
1143 pipe_sampler_view_reference(&info->views[i], views[i]);
1144 }
1145 /* unref extra old views, if any */
1146 for (; i < info->nr_views; i++) {
1147 pipe_sampler_view_reference(&info->views[i], NULL);
1148 }
1149
1150 info->nr_views = count;
1151
1152 /* bind the new sampler views */
1153 switch (shader_stage) {
1154 case PIPE_SHADER_FRAGMENT:
1155 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1156 break;
1157 case PIPE_SHADER_VERTEX:
1158 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1159 break;
1160 case PIPE_SHADER_GEOMETRY:
1161 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1162 break;
1163 default:
1164 assert(!"bad shader type in cso_set_sampler_views()");
1165 }
1166 }
1167
1168
1169 void
1170 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1171 {
1172 struct sampler_info *info = &ctx->samplers[shader_stage];
1173 unsigned i;
1174
1175 info->nr_views_saved = info->nr_views;
1176
1177 for (i = 0; i < info->nr_views; i++) {
1178 assert(!info->views_saved[i]);
1179 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1180 }
1181 }
1182
1183
1184 void
1185 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1186 {
1187 struct sampler_info *info = &ctx->samplers[shader_stage];
1188 unsigned i, nr_saved = info->nr_views_saved;
1189
1190 for (i = 0; i < nr_saved; i++) {
1191 pipe_sampler_view_reference(&info->views[i], NULL);
1192 /* move the reference from one pointer to another */
1193 info->views[i] = info->views_saved[i];
1194 info->views_saved[i] = NULL;
1195 }
1196 for (; i < info->nr_views; i++) {
1197 pipe_sampler_view_reference(&info->views[i], NULL);
1198 }
1199
1200 /* bind the old/saved sampler views */
1201 switch (shader_stage) {
1202 case PIPE_SHADER_FRAGMENT:
1203 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1204 break;
1205 case PIPE_SHADER_VERTEX:
1206 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1207 break;
1208 case PIPE_SHADER_GEOMETRY:
1209 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1210 break;
1211 default:
1212 assert(!"bad shader type in cso_restore_sampler_views()");
1213 }
1214
1215 info->nr_views = nr_saved;
1216 info->nr_views_saved = 0;
1217 }
1218
1219
1220 void
1221 cso_set_stream_outputs(struct cso_context *ctx,
1222 unsigned num_targets,
1223 struct pipe_stream_output_target **targets,
1224 unsigned append_bitmask)
1225 {
1226 struct pipe_context *pipe = ctx->pipe;
1227 uint i;
1228
1229 if (!ctx->has_streamout) {
1230 assert(num_targets == 0);
1231 return;
1232 }
1233
1234 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1235 /* Nothing to do. */
1236 return;
1237 }
1238
1239 /* reference new targets */
1240 for (i = 0; i < num_targets; i++) {
1241 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1242 }
1243 /* unref extra old targets, if any */
1244 for (; i < ctx->nr_so_targets; i++) {
1245 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1246 }
1247
1248 pipe->set_stream_output_targets(pipe, num_targets, targets,
1249 append_bitmask);
1250 ctx->nr_so_targets = num_targets;
1251 }
1252
1253 void
1254 cso_save_stream_outputs(struct cso_context *ctx)
1255 {
1256 uint i;
1257
1258 if (!ctx->has_streamout) {
1259 return;
1260 }
1261
1262 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1263
1264 for (i = 0; i < ctx->nr_so_targets; i++) {
1265 assert(!ctx->so_targets_saved[i]);
1266 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1267 }
1268 }
1269
1270 void
1271 cso_restore_stream_outputs(struct cso_context *ctx)
1272 {
1273 struct pipe_context *pipe = ctx->pipe;
1274 uint i;
1275
1276 if (!ctx->has_streamout) {
1277 return;
1278 }
1279
1280 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1281 /* Nothing to do. */
1282 return;
1283 }
1284
1285 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1286 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1287 /* move the reference from one pointer to another */
1288 ctx->so_targets[i] = ctx->so_targets_saved[i];
1289 ctx->so_targets_saved[i] = NULL;
1290 }
1291 for (; i < ctx->nr_so_targets; i++) {
1292 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1293 }
1294
1295 /* ~0 means append */
1296 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1297 ctx->so_targets, ~0);
1298
1299 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1300 ctx->nr_so_targets_saved = 0;
1301 }
1302
1303 /* drawing */
1304
1305 void
1306 cso_set_index_buffer(struct cso_context *cso,
1307 const struct pipe_index_buffer *ib)
1308 {
1309 struct u_vbuf *vbuf = cso->vbuf;
1310
1311 if (vbuf) {
1312 u_vbuf_set_index_buffer(vbuf, ib);
1313 } else {
1314 struct pipe_context *pipe = cso->pipe;
1315 pipe->set_index_buffer(pipe, ib);
1316 }
1317 }
1318
1319 void
1320 cso_draw_vbo(struct cso_context *cso,
1321 const struct pipe_draw_info *info)
1322 {
1323 struct u_vbuf *vbuf = cso->vbuf;
1324
1325 if (vbuf) {
1326 u_vbuf_draw_vbo(vbuf, info);
1327 } else {
1328 struct pipe_context *pipe = cso->pipe;
1329 pipe->draw_vbo(pipe, info);
1330 }
1331 }
1332
1333 void
1334 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1335 {
1336 struct pipe_draw_info info;
1337
1338 util_draw_init_info(&info);
1339
1340 info.mode = mode;
1341 info.start = start;
1342 info.count = count;
1343 info.min_index = start;
1344 info.max_index = start + count - 1;
1345
1346 cso_draw_vbo(cso, &info);
1347 }