cso: cso_context should install u_vbuf by itself and not st/mesa
[mesa.git] / src / gallium / auxiliary / cso_cache / cso_context.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info fragment_samplers;
88 struct sampler_info vertex_samplers;
89
90 uint nr_vertex_buffers;
91 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
92
93 uint nr_vertex_buffers_saved;
94 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved, *geometry_shader;
109 void *vertex_shader, *vertex_shader_saved, *geometry_shader_saved;
110 void *velements, *velements_saved;
111
112 struct pipe_clip_state clip;
113 struct pipe_clip_state clip_saved;
114
115 struct pipe_framebuffer_state fb, fb_saved;
116 struct pipe_viewport_state vp, vp_saved;
117 struct pipe_blend_color blend_color;
118 unsigned sample_mask;
119 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
120 };
121
122
123 static boolean delete_blend_state(struct cso_context *ctx, void *state)
124 {
125 struct cso_blend *cso = (struct cso_blend *)state;
126
127 if (ctx->blend == cso->data)
128 return FALSE;
129
130 if (cso->delete_state)
131 cso->delete_state(cso->context, cso->data);
132 FREE(state);
133 return TRUE;
134 }
135
136 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
137 {
138 struct cso_depth_stencil_alpha *cso = (struct cso_depth_stencil_alpha *)state;
139
140 if (ctx->depth_stencil == cso->data)
141 return FALSE;
142
143 if (cso->delete_state)
144 cso->delete_state(cso->context, cso->data);
145 FREE(state);
146
147 return TRUE;
148 }
149
150 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
151 {
152 struct cso_sampler *cso = (struct cso_sampler *)state;
153 if (cso->delete_state)
154 cso->delete_state(cso->context, cso->data);
155 FREE(state);
156 return TRUE;
157 }
158
159 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
160 {
161 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
162
163 if (ctx->rasterizer == cso->data)
164 return FALSE;
165 if (cso->delete_state)
166 cso->delete_state(cso->context, cso->data);
167 FREE(state);
168 return TRUE;
169 }
170
171 static boolean delete_vertex_elements(struct cso_context *ctx,
172 void *state)
173 {
174 struct cso_velements *cso = (struct cso_velements *)state;
175
176 if (ctx->velements == cso->data)
177 return FALSE;
178
179 if (cso->delete_state)
180 cso->delete_state(cso->context, cso->data);
181 FREE(state);
182 return TRUE;
183 }
184
185
186 static INLINE boolean delete_cso(struct cso_context *ctx,
187 void *state, enum cso_cache_type type)
188 {
189 switch (type) {
190 case CSO_BLEND:
191 return delete_blend_state(ctx, state);
192 break;
193 case CSO_SAMPLER:
194 return delete_sampler_state(ctx, state);
195 break;
196 case CSO_DEPTH_STENCIL_ALPHA:
197 return delete_depth_stencil_state(ctx, state);
198 break;
199 case CSO_RASTERIZER:
200 return delete_rasterizer_state(ctx, state);
201 break;
202 case CSO_VELEMENTS:
203 return delete_vertex_elements(ctx, state);
204 break;
205 default:
206 assert(0);
207 FREE(state);
208 }
209 return FALSE;
210 }
211
212 static INLINE void sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
213 int max_size, void *user_data)
214 {
215 struct cso_context *ctx = (struct cso_context *)user_data;
216 /* if we're approach the maximum size, remove fourth of the entries
217 * otherwise every subsequent call will go through the same */
218 int hash_size = cso_hash_size(hash);
219 int max_entries = (max_size > hash_size) ? max_size : hash_size;
220 int to_remove = (max_size < max_entries) * max_entries/4;
221 struct cso_hash_iter iter = cso_hash_first_node(hash);
222 if (hash_size > max_size)
223 to_remove += hash_size - max_size;
224 while (to_remove) {
225 /*remove elements until we're good */
226 /*fixme: currently we pick the nodes to remove at random*/
227 void *cso = cso_hash_iter_data(iter);
228 if (delete_cso(ctx, cso, type)) {
229 iter = cso_hash_erase(hash, iter);
230 --to_remove;
231 } else
232 iter = cso_hash_iter_next(iter);
233 }
234 }
235
236 static void cso_init_vbuf(struct cso_context *cso)
237 {
238 struct u_vbuf_caps caps;
239
240 u_vbuf_get_caps(cso->pipe->screen, &caps);
241
242 /* Install u_vbuf if there is anything unsupported. */
243 if (!caps.buffer_offset_unaligned ||
244 !caps.buffer_stride_unaligned ||
245 !caps.velem_src_offset_unaligned ||
246 !caps.format_fixed32 ||
247 !caps.format_float16 ||
248 !caps.format_float64 ||
249 !caps.format_norm32 ||
250 !caps.format_scaled32 ||
251 !caps.user_vertex_buffers) {
252 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
253 }
254 }
255
256 struct cso_context *cso_create_context( struct pipe_context *pipe )
257 {
258 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
259 if (ctx == NULL)
260 goto out;
261
262 assert(PIPE_MAX_SAMPLERS == PIPE_MAX_VERTEX_SAMPLERS);
263
264 ctx->cache = cso_cache_create();
265 if (ctx->cache == NULL)
266 goto out;
267 cso_cache_set_sanitize_callback(ctx->cache,
268 sanitize_hash,
269 ctx);
270
271 ctx->pipe = pipe;
272
273 cso_init_vbuf(ctx);
274
275 /* Enable for testing: */
276 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
277
278 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
279 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
280 ctx->has_geometry_shader = TRUE;
281 }
282 if (pipe->screen->get_param(pipe->screen,
283 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
284 ctx->has_streamout = TRUE;
285 }
286
287 return ctx;
288
289 out:
290 cso_destroy_context( ctx );
291 return NULL;
292 }
293
294 /**
295 * Prior to context destruction, this function unbinds all state objects.
296 */
297 void cso_release_all( struct cso_context *ctx )
298 {
299 unsigned i;
300 struct sampler_info *info;
301
302 if (ctx->pipe) {
303 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
304 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
305 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
306 if (ctx->pipe->bind_vertex_sampler_states)
307 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
308 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
309 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
310 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
311 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
312 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
313 if (ctx->pipe->set_vertex_sampler_views)
314 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
315 if (ctx->pipe->set_stream_output_targets)
316 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
317 }
318
319 /* free fragment samplers, views */
320 info = &ctx->fragment_samplers;
321 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
322 pipe_sampler_view_reference(&info->views[i], NULL);
323 pipe_sampler_view_reference(&info->views_saved[i], NULL);
324 }
325
326 /* free vertex samplers, views */
327 info = &ctx->vertex_samplers;
328 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
329 pipe_sampler_view_reference(&info->views[i], NULL);
330 pipe_sampler_view_reference(&info->views_saved[i], NULL);
331 }
332
333 util_unreference_framebuffer_state(&ctx->fb);
334 util_unreference_framebuffer_state(&ctx->fb_saved);
335
336 util_copy_vertex_buffers(ctx->vertex_buffers,
337 &ctx->nr_vertex_buffers,
338 NULL, 0);
339 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
340 &ctx->nr_vertex_buffers_saved,
341 NULL, 0);
342
343 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
344 pipe_so_target_reference(&ctx->so_targets[i], NULL);
345 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
346 }
347
348 if (ctx->cache) {
349 cso_cache_delete( ctx->cache );
350 ctx->cache = NULL;
351 }
352 }
353
354
355 /**
356 * Free the CSO context. NOTE: the state tracker should have previously called
357 * cso_release_all().
358 */
359 void cso_destroy_context( struct cso_context *ctx )
360 {
361 if (ctx) {
362 if (ctx->vbuf)
363 u_vbuf_destroy(ctx->vbuf);
364 FREE( ctx );
365 }
366 }
367
368
369 /* Those function will either find the state of the given template
370 * in the cache or they will create a new state from the given
371 * template, insert it in the cache and return it.
372 */
373
374 /*
375 * If the driver returns 0 from the create method then they will assign
376 * the data member of the cso to be the template itself.
377 */
378
379 enum pipe_error cso_set_blend(struct cso_context *ctx,
380 const struct pipe_blend_state *templ)
381 {
382 unsigned key_size, hash_key;
383 struct cso_hash_iter iter;
384 void *handle;
385
386 key_size = templ->independent_blend_enable ? sizeof(struct pipe_blend_state) :
387 (char *)&(templ->rt[1]) - (char *)templ;
388 hash_key = cso_construct_key((void*)templ, key_size);
389 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND, (void*)templ, key_size);
390
391 if (cso_hash_iter_is_null(iter)) {
392 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
393 if (!cso)
394 return PIPE_ERROR_OUT_OF_MEMORY;
395
396 memset(&cso->state, 0, sizeof cso->state);
397 memcpy(&cso->state, templ, key_size);
398 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
399 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
400 cso->context = ctx->pipe;
401
402 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
403 if (cso_hash_iter_is_null(iter)) {
404 FREE(cso);
405 return PIPE_ERROR_OUT_OF_MEMORY;
406 }
407
408 handle = cso->data;
409 }
410 else {
411 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
412 }
413
414 if (ctx->blend != handle) {
415 ctx->blend = handle;
416 ctx->pipe->bind_blend_state(ctx->pipe, handle);
417 }
418 return PIPE_OK;
419 }
420
421 void cso_save_blend(struct cso_context *ctx)
422 {
423 assert(!ctx->blend_saved);
424 ctx->blend_saved = ctx->blend;
425 }
426
427 void cso_restore_blend(struct cso_context *ctx)
428 {
429 if (ctx->blend != ctx->blend_saved) {
430 ctx->blend = ctx->blend_saved;
431 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
432 }
433 ctx->blend_saved = NULL;
434 }
435
436
437
438 enum pipe_error cso_set_depth_stencil_alpha(struct cso_context *ctx,
439 const struct pipe_depth_stencil_alpha_state *templ)
440 {
441 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
442 unsigned hash_key = cso_construct_key((void*)templ, key_size);
443 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
444 hash_key,
445 CSO_DEPTH_STENCIL_ALPHA,
446 (void*)templ, key_size);
447 void *handle;
448
449 if (cso_hash_iter_is_null(iter)) {
450 struct cso_depth_stencil_alpha *cso = MALLOC(sizeof(struct cso_depth_stencil_alpha));
451 if (!cso)
452 return PIPE_ERROR_OUT_OF_MEMORY;
453
454 memcpy(&cso->state, templ, sizeof(*templ));
455 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe, &cso->state);
456 cso->delete_state = (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
457 cso->context = ctx->pipe;
458
459 iter = cso_insert_state(ctx->cache, hash_key, CSO_DEPTH_STENCIL_ALPHA, cso);
460 if (cso_hash_iter_is_null(iter)) {
461 FREE(cso);
462 return PIPE_ERROR_OUT_OF_MEMORY;
463 }
464
465 handle = cso->data;
466 }
467 else {
468 handle = ((struct cso_depth_stencil_alpha *)cso_hash_iter_data(iter))->data;
469 }
470
471 if (ctx->depth_stencil != handle) {
472 ctx->depth_stencil = handle;
473 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
474 }
475 return PIPE_OK;
476 }
477
478 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
479 {
480 assert(!ctx->depth_stencil_saved);
481 ctx->depth_stencil_saved = ctx->depth_stencil;
482 }
483
484 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
485 {
486 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
487 ctx->depth_stencil = ctx->depth_stencil_saved;
488 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, ctx->depth_stencil_saved);
489 }
490 ctx->depth_stencil_saved = NULL;
491 }
492
493
494
495 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
496 const struct pipe_rasterizer_state *templ)
497 {
498 unsigned key_size = sizeof(struct pipe_rasterizer_state);
499 unsigned hash_key = cso_construct_key((void*)templ, key_size);
500 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
501 hash_key, CSO_RASTERIZER,
502 (void*)templ, key_size);
503 void *handle = NULL;
504
505 if (cso_hash_iter_is_null(iter)) {
506 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
507 if (!cso)
508 return PIPE_ERROR_OUT_OF_MEMORY;
509
510 memcpy(&cso->state, templ, sizeof(*templ));
511 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
512 cso->delete_state = (cso_state_callback)ctx->pipe->delete_rasterizer_state;
513 cso->context = ctx->pipe;
514
515 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
516 if (cso_hash_iter_is_null(iter)) {
517 FREE(cso);
518 return PIPE_ERROR_OUT_OF_MEMORY;
519 }
520
521 handle = cso->data;
522 }
523 else {
524 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
525 }
526
527 if (ctx->rasterizer != handle) {
528 ctx->rasterizer = handle;
529 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
530 }
531 return PIPE_OK;
532 }
533
534 void cso_save_rasterizer(struct cso_context *ctx)
535 {
536 assert(!ctx->rasterizer_saved);
537 ctx->rasterizer_saved = ctx->rasterizer;
538 }
539
540 void cso_restore_rasterizer(struct cso_context *ctx)
541 {
542 if (ctx->rasterizer != ctx->rasterizer_saved) {
543 ctx->rasterizer = ctx->rasterizer_saved;
544 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
545 }
546 ctx->rasterizer_saved = NULL;
547 }
548
549
550
551 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
552 void *handle )
553 {
554 if (ctx->fragment_shader != handle) {
555 ctx->fragment_shader = handle;
556 ctx->pipe->bind_fs_state(ctx->pipe, handle);
557 }
558 return PIPE_OK;
559 }
560
561 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
562 {
563 if (handle == ctx->fragment_shader) {
564 /* unbind before deleting */
565 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
566 ctx->fragment_shader = NULL;
567 }
568 ctx->pipe->delete_fs_state(ctx->pipe, handle);
569 }
570
571 void cso_save_fragment_shader(struct cso_context *ctx)
572 {
573 assert(!ctx->fragment_shader_saved);
574 ctx->fragment_shader_saved = ctx->fragment_shader;
575 }
576
577 void cso_restore_fragment_shader(struct cso_context *ctx)
578 {
579 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
580 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
581 ctx->fragment_shader = ctx->fragment_shader_saved;
582 }
583 ctx->fragment_shader_saved = NULL;
584 }
585
586
587 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
588 void *handle )
589 {
590 if (ctx->vertex_shader != handle) {
591 ctx->vertex_shader = handle;
592 ctx->pipe->bind_vs_state(ctx->pipe, handle);
593 }
594 return PIPE_OK;
595 }
596
597 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
598 {
599 if (handle == ctx->vertex_shader) {
600 /* unbind before deleting */
601 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
602 ctx->vertex_shader = NULL;
603 }
604 ctx->pipe->delete_vs_state(ctx->pipe, handle);
605 }
606
607 void cso_save_vertex_shader(struct cso_context *ctx)
608 {
609 assert(!ctx->vertex_shader_saved);
610 ctx->vertex_shader_saved = ctx->vertex_shader;
611 }
612
613 void cso_restore_vertex_shader(struct cso_context *ctx)
614 {
615 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
616 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
617 ctx->vertex_shader = ctx->vertex_shader_saved;
618 }
619 ctx->vertex_shader_saved = NULL;
620 }
621
622
623 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
624 const struct pipe_framebuffer_state *fb)
625 {
626 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
627 util_copy_framebuffer_state(&ctx->fb, fb);
628 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
629 }
630 return PIPE_OK;
631 }
632
633 void cso_save_framebuffer(struct cso_context *ctx)
634 {
635 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
636 }
637
638 void cso_restore_framebuffer(struct cso_context *ctx)
639 {
640 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
641 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
642 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
643 util_unreference_framebuffer_state(&ctx->fb_saved);
644 }
645 }
646
647
648 enum pipe_error cso_set_viewport(struct cso_context *ctx,
649 const struct pipe_viewport_state *vp)
650 {
651 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
652 ctx->vp = *vp;
653 ctx->pipe->set_viewport_state(ctx->pipe, vp);
654 }
655 return PIPE_OK;
656 }
657
658 void cso_save_viewport(struct cso_context *ctx)
659 {
660 ctx->vp_saved = ctx->vp;
661 }
662
663
664 void cso_restore_viewport(struct cso_context *ctx)
665 {
666 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
667 ctx->vp = ctx->vp_saved;
668 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
669 }
670 }
671
672
673 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
674 const struct pipe_blend_color *bc)
675 {
676 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
677 ctx->blend_color = *bc;
678 ctx->pipe->set_blend_color(ctx->pipe, bc);
679 }
680 return PIPE_OK;
681 }
682
683 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
684 unsigned sample_mask)
685 {
686 if (ctx->sample_mask != sample_mask) {
687 ctx->sample_mask = sample_mask;
688 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
689 }
690 return PIPE_OK;
691 }
692
693 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
694 const struct pipe_stencil_ref *sr)
695 {
696 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
697 ctx->stencil_ref = *sr;
698 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
699 }
700 return PIPE_OK;
701 }
702
703 void cso_save_stencil_ref(struct cso_context *ctx)
704 {
705 ctx->stencil_ref_saved = ctx->stencil_ref;
706 }
707
708
709 void cso_restore_stencil_ref(struct cso_context *ctx)
710 {
711 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved, sizeof(ctx->stencil_ref))) {
712 ctx->stencil_ref = ctx->stencil_ref_saved;
713 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
714 }
715 }
716
717 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
718 void *handle)
719 {
720 assert(ctx->has_geometry_shader || !handle);
721
722 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
723 ctx->geometry_shader = handle;
724 ctx->pipe->bind_gs_state(ctx->pipe, handle);
725 }
726 return PIPE_OK;
727 }
728
729 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
730 {
731 if (handle == ctx->geometry_shader) {
732 /* unbind before deleting */
733 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
734 ctx->geometry_shader = NULL;
735 }
736 ctx->pipe->delete_gs_state(ctx->pipe, handle);
737 }
738
739 void cso_save_geometry_shader(struct cso_context *ctx)
740 {
741 if (!ctx->has_geometry_shader) {
742 return;
743 }
744
745 assert(!ctx->geometry_shader_saved);
746 ctx->geometry_shader_saved = ctx->geometry_shader;
747 }
748
749 void cso_restore_geometry_shader(struct cso_context *ctx)
750 {
751 if (!ctx->has_geometry_shader) {
752 return;
753 }
754
755 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
756 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
757 ctx->geometry_shader = ctx->geometry_shader_saved;
758 }
759 ctx->geometry_shader_saved = NULL;
760 }
761
762 /* clip state */
763
764 static INLINE void
765 clip_state_cpy(struct pipe_clip_state *dst,
766 const struct pipe_clip_state *src)
767 {
768 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
769 }
770
771 static INLINE int
772 clip_state_cmp(const struct pipe_clip_state *a,
773 const struct pipe_clip_state *b)
774 {
775 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
776 }
777
778 void
779 cso_set_clip(struct cso_context *ctx,
780 const struct pipe_clip_state *clip)
781 {
782 if (clip_state_cmp(&ctx->clip, clip)) {
783 clip_state_cpy(&ctx->clip, clip);
784 ctx->pipe->set_clip_state(ctx->pipe, clip);
785 }
786 }
787
788 void
789 cso_save_clip(struct cso_context *ctx)
790 {
791 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
792 }
793
794 void
795 cso_restore_clip(struct cso_context *ctx)
796 {
797 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
798 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
799 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
800 }
801 }
802
803 enum pipe_error cso_set_vertex_elements(struct cso_context *ctx,
804 unsigned count,
805 const struct pipe_vertex_element *states)
806 {
807 struct u_vbuf *vbuf = ctx->vbuf;
808 unsigned key_size, hash_key;
809 struct cso_hash_iter iter;
810 void *handle;
811 struct cso_velems_state velems_state;
812
813 if (vbuf) {
814 u_vbuf_set_vertex_elements(vbuf, count, states);
815 return PIPE_OK;
816 }
817
818 /* need to include the count into the stored state data too.
819 Otherwise first few count pipe_vertex_elements could be identical even if count
820 is different, and there's no guarantee the hash would be different in that
821 case neither */
822 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
823 velems_state.count = count;
824 memcpy(velems_state.velems, states, sizeof(struct pipe_vertex_element) * count);
825 hash_key = cso_construct_key((void*)&velems_state, key_size);
826 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS, (void*)&velems_state, key_size);
827
828 if (cso_hash_iter_is_null(iter)) {
829 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
830 if (!cso)
831 return PIPE_ERROR_OUT_OF_MEMORY;
832
833 memcpy(&cso->state, &velems_state, key_size);
834 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count, &cso->state.velems[0]);
835 cso->delete_state = (cso_state_callback)ctx->pipe->delete_vertex_elements_state;
836 cso->context = ctx->pipe;
837
838 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
839 if (cso_hash_iter_is_null(iter)) {
840 FREE(cso);
841 return PIPE_ERROR_OUT_OF_MEMORY;
842 }
843
844 handle = cso->data;
845 }
846 else {
847 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
848 }
849
850 if (ctx->velements != handle) {
851 ctx->velements = handle;
852 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
853 }
854 return PIPE_OK;
855 }
856
857 void cso_save_vertex_elements(struct cso_context *ctx)
858 {
859 struct u_vbuf *vbuf = ctx->vbuf;
860
861 if (vbuf) {
862 u_vbuf_save_vertex_elements(vbuf);
863 return;
864 }
865
866 assert(!ctx->velements_saved);
867 ctx->velements_saved = ctx->velements;
868 }
869
870 void cso_restore_vertex_elements(struct cso_context *ctx)
871 {
872 struct u_vbuf *vbuf = ctx->vbuf;
873
874 if (vbuf) {
875 u_vbuf_restore_vertex_elements(vbuf);
876 return;
877 }
878
879 if (ctx->velements != ctx->velements_saved) {
880 ctx->velements = ctx->velements_saved;
881 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
882 }
883 ctx->velements_saved = NULL;
884 }
885
886 /* vertex buffers */
887
888 void cso_set_vertex_buffers(struct cso_context *ctx,
889 unsigned count,
890 const struct pipe_vertex_buffer *buffers)
891 {
892 struct u_vbuf *vbuf = ctx->vbuf;
893
894 if (vbuf) {
895 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
896 return;
897 }
898
899 if (count != ctx->nr_vertex_buffers ||
900 memcmp(buffers, ctx->vertex_buffers,
901 sizeof(struct pipe_vertex_buffer) * count) != 0) {
902 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
903 buffers, count);
904 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
905 }
906 }
907
908 void cso_save_vertex_buffers(struct cso_context *ctx)
909 {
910 struct u_vbuf *vbuf = ctx->vbuf;
911
912 if (vbuf) {
913 u_vbuf_save_vertex_buffers(vbuf);
914 return;
915 }
916
917 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
918 &ctx->nr_vertex_buffers_saved,
919 ctx->vertex_buffers,
920 ctx->nr_vertex_buffers);
921 }
922
923 void cso_restore_vertex_buffers(struct cso_context *ctx)
924 {
925 unsigned i;
926 struct u_vbuf *vbuf = ctx->vbuf;
927
928 if (vbuf) {
929 u_vbuf_restore_vertex_buffers(vbuf);
930 return;
931 }
932
933 util_copy_vertex_buffers(ctx->vertex_buffers,
934 &ctx->nr_vertex_buffers,
935 ctx->vertex_buffers_saved,
936 ctx->nr_vertex_buffers_saved);
937
938 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
939 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
940 }
941 ctx->nr_vertex_buffers_saved = 0;
942
943 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
944 ctx->vertex_buffers);
945 }
946
947
948 /**************** fragment/vertex sampler view state *************************/
949
950 static enum pipe_error
951 single_sampler(struct cso_context *ctx,
952 struct sampler_info *info,
953 unsigned idx,
954 const struct pipe_sampler_state *templ)
955 {
956 void *handle = NULL;
957
958 if (templ != NULL) {
959 unsigned key_size = sizeof(struct pipe_sampler_state);
960 unsigned hash_key = cso_construct_key((void*)templ, key_size);
961 struct cso_hash_iter iter =
962 cso_find_state_template(ctx->cache,
963 hash_key, CSO_SAMPLER,
964 (void *) templ, key_size);
965
966 if (cso_hash_iter_is_null(iter)) {
967 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
968 if (!cso)
969 return PIPE_ERROR_OUT_OF_MEMORY;
970
971 memcpy(&cso->state, templ, sizeof(*templ));
972 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
973 cso->delete_state = (cso_state_callback)ctx->pipe->delete_sampler_state;
974 cso->context = ctx->pipe;
975
976 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
977 if (cso_hash_iter_is_null(iter)) {
978 FREE(cso);
979 return PIPE_ERROR_OUT_OF_MEMORY;
980 }
981
982 handle = cso->data;
983 }
984 else {
985 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
986 }
987 }
988
989 info->samplers[idx] = handle;
990
991 return PIPE_OK;
992 }
993
994 enum pipe_error
995 cso_single_sampler(struct cso_context *ctx,
996 unsigned idx,
997 const struct pipe_sampler_state *templ)
998 {
999 return single_sampler(ctx, &ctx->fragment_samplers, idx, templ);
1000 }
1001
1002 enum pipe_error
1003 cso_single_vertex_sampler(struct cso_context *ctx,
1004 unsigned idx,
1005 const struct pipe_sampler_state *templ)
1006 {
1007 return single_sampler(ctx, &ctx->vertex_samplers, idx, templ);
1008 }
1009
1010
1011
1012 static void
1013 single_sampler_done(struct cso_context *ctx,
1014 struct sampler_info *info)
1015 {
1016 unsigned i;
1017
1018 /* find highest non-null sampler */
1019 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1020 if (info->samplers[i - 1] != NULL)
1021 break;
1022 }
1023
1024 info->nr_samplers = i;
1025
1026 if (info->hw.nr_samplers != info->nr_samplers ||
1027 memcmp(info->hw.samplers,
1028 info->samplers,
1029 info->nr_samplers * sizeof(void *)) != 0)
1030 {
1031 memcpy(info->hw.samplers,
1032 info->samplers,
1033 info->nr_samplers * sizeof(void *));
1034 info->hw.nr_samplers = info->nr_samplers;
1035
1036 if (info == &ctx->fragment_samplers) {
1037 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1038 info->nr_samplers,
1039 info->samplers);
1040 }
1041 else if (info == &ctx->vertex_samplers) {
1042 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1043 info->nr_samplers,
1044 info->samplers);
1045 }
1046 else {
1047 assert(0);
1048 }
1049 }
1050 }
1051
1052 void
1053 cso_single_sampler_done( struct cso_context *ctx )
1054 {
1055 single_sampler_done(ctx, &ctx->fragment_samplers);
1056 }
1057
1058 void
1059 cso_single_vertex_sampler_done(struct cso_context *ctx)
1060 {
1061 single_sampler_done(ctx, &ctx->vertex_samplers);
1062 }
1063
1064
1065 /*
1066 * If the function encouters any errors it will return the
1067 * last one. Done to always try to set as many samplers
1068 * as possible.
1069 */
1070 static enum pipe_error
1071 set_samplers(struct cso_context *ctx,
1072 struct sampler_info *info,
1073 unsigned nr,
1074 const struct pipe_sampler_state **templates)
1075 {
1076 unsigned i;
1077 enum pipe_error temp, error = PIPE_OK;
1078
1079 /* TODO: fastpath
1080 */
1081
1082 for (i = 0; i < nr; i++) {
1083 temp = single_sampler(ctx, info, i, templates[i]);
1084 if (temp != PIPE_OK)
1085 error = temp;
1086 }
1087
1088 for ( ; i < info->nr_samplers; i++) {
1089 temp = single_sampler(ctx, info, i, NULL);
1090 if (temp != PIPE_OK)
1091 error = temp;
1092 }
1093
1094 single_sampler_done(ctx, info);
1095
1096 return error;
1097 }
1098
1099 enum pipe_error
1100 cso_set_samplers(struct cso_context *ctx,
1101 unsigned nr,
1102 const struct pipe_sampler_state **templates)
1103 {
1104 return set_samplers(ctx, &ctx->fragment_samplers, nr, templates);
1105 }
1106
1107 enum pipe_error
1108 cso_set_vertex_samplers(struct cso_context *ctx,
1109 unsigned nr,
1110 const struct pipe_sampler_state **templates)
1111 {
1112 return set_samplers(ctx, &ctx->vertex_samplers, nr, templates);
1113 }
1114
1115
1116
1117 static void
1118 save_samplers(struct cso_context *ctx, struct sampler_info *info)
1119 {
1120 info->nr_samplers_saved = info->nr_samplers;
1121 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1122 }
1123
1124 void
1125 cso_save_samplers(struct cso_context *ctx)
1126 {
1127 save_samplers(ctx, &ctx->fragment_samplers);
1128 }
1129
1130 void
1131 cso_save_vertex_samplers(struct cso_context *ctx)
1132 {
1133 save_samplers(ctx, &ctx->vertex_samplers);
1134 }
1135
1136
1137
1138 static void
1139 restore_samplers(struct cso_context *ctx, struct sampler_info *info)
1140 {
1141 info->nr_samplers = info->nr_samplers_saved;
1142 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1143 single_sampler_done(ctx, info);
1144 }
1145
1146 void
1147 cso_restore_samplers(struct cso_context *ctx)
1148 {
1149 restore_samplers(ctx, &ctx->fragment_samplers);
1150 }
1151
1152 void
1153 cso_restore_vertex_samplers(struct cso_context *ctx)
1154 {
1155 restore_samplers(ctx, &ctx->vertex_samplers);
1156 }
1157
1158
1159
1160 static void
1161 set_sampler_views(struct cso_context *ctx,
1162 struct sampler_info *info,
1163 void (*set_views)(struct pipe_context *,
1164 unsigned num_views,
1165 struct pipe_sampler_view **),
1166 uint count,
1167 struct pipe_sampler_view **views)
1168 {
1169 uint i;
1170
1171 /* reference new views */
1172 for (i = 0; i < count; i++) {
1173 pipe_sampler_view_reference(&info->views[i], views[i]);
1174 }
1175 /* unref extra old views, if any */
1176 for (; i < info->nr_views; i++) {
1177 pipe_sampler_view_reference(&info->views[i], NULL);
1178 }
1179
1180 info->nr_views = count;
1181
1182 /* bind the new sampler views */
1183 set_views(ctx->pipe, count, info->views);
1184 }
1185
1186 void
1187 cso_set_fragment_sampler_views(struct cso_context *ctx,
1188 uint count,
1189 struct pipe_sampler_view **views)
1190 {
1191 set_sampler_views(ctx, &ctx->fragment_samplers,
1192 ctx->pipe->set_fragment_sampler_views,
1193 count, views);
1194 }
1195
1196 void
1197 cso_set_vertex_sampler_views(struct cso_context *ctx,
1198 uint count,
1199 struct pipe_sampler_view **views)
1200 {
1201 set_sampler_views(ctx, &ctx->vertex_samplers,
1202 ctx->pipe->set_vertex_sampler_views,
1203 count, views);
1204 }
1205
1206
1207
1208 static void
1209 save_sampler_views(struct cso_context *ctx,
1210 struct sampler_info *info)
1211 {
1212 uint i;
1213
1214 info->nr_views_saved = info->nr_views;
1215
1216 for (i = 0; i < info->nr_views; i++) {
1217 assert(!info->views_saved[i]);
1218 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1219 }
1220 }
1221
1222 void
1223 cso_save_fragment_sampler_views(struct cso_context *ctx)
1224 {
1225 save_sampler_views(ctx, &ctx->fragment_samplers);
1226 }
1227
1228 void
1229 cso_save_vertex_sampler_views(struct cso_context *ctx)
1230 {
1231 save_sampler_views(ctx, &ctx->vertex_samplers);
1232 }
1233
1234
1235 static void
1236 restore_sampler_views(struct cso_context *ctx,
1237 struct sampler_info *info,
1238 void (*set_views)(struct pipe_context *,
1239 unsigned num_views,
1240 struct pipe_sampler_view **))
1241 {
1242 uint i;
1243
1244 for (i = 0; i < info->nr_views_saved; i++) {
1245 pipe_sampler_view_reference(&info->views[i], NULL);
1246 /* move the reference from one pointer to another */
1247 info->views[i] = info->views_saved[i];
1248 info->views_saved[i] = NULL;
1249 }
1250 for (; i < info->nr_views; i++) {
1251 pipe_sampler_view_reference(&info->views[i], NULL);
1252 }
1253
1254 /* bind the old/saved sampler views */
1255 set_views(ctx->pipe, info->nr_views_saved, info->views);
1256
1257 info->nr_views = info->nr_views_saved;
1258 info->nr_views_saved = 0;
1259 }
1260
1261 void
1262 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1263 {
1264 restore_sampler_views(ctx, &ctx->fragment_samplers,
1265 ctx->pipe->set_fragment_sampler_views);
1266 }
1267
1268 void
1269 cso_restore_vertex_sampler_views(struct cso_context *ctx)
1270 {
1271 restore_sampler_views(ctx, &ctx->vertex_samplers,
1272 ctx->pipe->set_vertex_sampler_views);
1273 }
1274
1275
1276 void
1277 cso_set_stream_outputs(struct cso_context *ctx,
1278 unsigned num_targets,
1279 struct pipe_stream_output_target **targets,
1280 unsigned append_bitmask)
1281 {
1282 struct pipe_context *pipe = ctx->pipe;
1283 uint i;
1284
1285 if (!ctx->has_streamout) {
1286 assert(num_targets == 0);
1287 return;
1288 }
1289
1290 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1291 /* Nothing to do. */
1292 return;
1293 }
1294
1295 /* reference new targets */
1296 for (i = 0; i < num_targets; i++) {
1297 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1298 }
1299 /* unref extra old targets, if any */
1300 for (; i < ctx->nr_so_targets; i++) {
1301 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1302 }
1303
1304 pipe->set_stream_output_targets(pipe, num_targets, targets,
1305 append_bitmask);
1306 ctx->nr_so_targets = num_targets;
1307 }
1308
1309 void
1310 cso_save_stream_outputs(struct cso_context *ctx)
1311 {
1312 uint i;
1313
1314 if (!ctx->has_streamout) {
1315 return;
1316 }
1317
1318 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1319
1320 for (i = 0; i < ctx->nr_so_targets; i++) {
1321 assert(!ctx->so_targets_saved[i]);
1322 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1323 }
1324 }
1325
1326 void
1327 cso_restore_stream_outputs(struct cso_context *ctx)
1328 {
1329 struct pipe_context *pipe = ctx->pipe;
1330 uint i;
1331
1332 if (!ctx->has_streamout) {
1333 return;
1334 }
1335
1336 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1337 /* Nothing to do. */
1338 return;
1339 }
1340
1341 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1342 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1343 /* move the reference from one pointer to another */
1344 ctx->so_targets[i] = ctx->so_targets_saved[i];
1345 ctx->so_targets_saved[i] = NULL;
1346 }
1347 for (; i < ctx->nr_so_targets; i++) {
1348 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1349 }
1350
1351 /* ~0 means append */
1352 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1353 ctx->so_targets, ~0);
1354
1355 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1356 ctx->nr_so_targets_saved = 0;
1357 }
1358
1359 /* drawing */
1360
1361 void
1362 cso_set_index_buffer(struct cso_context *cso,
1363 const struct pipe_index_buffer *ib)
1364 {
1365 struct u_vbuf *vbuf = cso->vbuf;
1366
1367 if (vbuf) {
1368 u_vbuf_set_index_buffer(vbuf, ib);
1369 } else {
1370 struct pipe_context *pipe = cso->pipe;
1371 pipe->set_index_buffer(pipe, ib);
1372 }
1373 }
1374
1375 void
1376 cso_draw_vbo(struct cso_context *cso,
1377 const struct pipe_draw_info *info)
1378 {
1379 struct u_vbuf *vbuf = cso->vbuf;
1380
1381 if (vbuf) {
1382 u_vbuf_draw_vbo(vbuf, info);
1383 } else {
1384 struct pipe_context *pipe = cso->pipe;
1385 pipe->draw_vbo(pipe, info);
1386 }
1387 }
1388
1389 void
1390 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1391 {
1392 struct pipe_draw_info info;
1393
1394 util_draw_init_info(&info);
1395
1396 info.mode = mode;
1397 info.start = start;
1398 info.count = count;
1399 info.min_index = start;
1400 info.max_index = start + count - 1;
1401
1402 cso_draw_vbo(cso, &info);
1403 }