Merge branch 'gallium-msaa'
[mesa.git] / src / gallium / state_trackers / vega / vg_context.c
1 /**************************************************************************
2 *
3 * Copyright 2009 VMware, Inc. All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sub license, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial portions
15 * of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
18 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
20 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
21 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
22 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
23 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 **************************************************************************/
26
27 #include "vg_context.h"
28
29 #include "paint.h"
30 #include "renderer.h"
31 #include "shaders_cache.h"
32 #include "shader.h"
33 #include "asm_util.h"
34 #include "st_inlines.h"
35 #include "vg_manager.h"
36 #include "api.h"
37
38 #include "pipe/p_context.h"
39 #include "util/u_inlines.h"
40 #include "pipe/p_shader_tokens.h"
41
42 #include "cso_cache/cso_context.h"
43
44 #include "util/u_simple_shaders.h"
45 #include "util/u_memory.h"
46 #include "util/u_blit.h"
47 #include "util/u_sampler.h"
48
49 struct vg_context *_vg_context = 0;
50
51 struct vg_context * vg_current_context(void)
52 {
53 return _vg_context;
54 }
55
56 static void init_clear(struct vg_context *st)
57 {
58 struct pipe_context *pipe = st->pipe;
59
60 /* rasterizer state: bypass clipping */
61 memset(&st->clear.raster, 0, sizeof(st->clear.raster));
62 st->clear.raster.gl_rasterization_rules = 1;
63
64 /* fragment shader state: color pass-through program */
65 st->clear.fs =
66 util_make_fragment_passthrough_shader(pipe);
67 }
68 void vg_set_current_context(struct vg_context *ctx)
69 {
70 _vg_context = ctx;
71 api_make_dispatch_current((ctx) ? ctx->dispatch : NULL);
72 }
73
74 struct vg_context * vg_create_context(struct pipe_context *pipe,
75 const void *visual,
76 struct vg_context *share)
77 {
78 struct vg_context *ctx;
79 unsigned i;
80
81 ctx = CALLOC_STRUCT(vg_context);
82
83 ctx->pipe = pipe;
84
85 ctx->dispatch = api_create_dispatch();
86
87 vg_init_state(&ctx->state.vg);
88 ctx->state.dirty = ALL_DIRTY;
89
90 ctx->cso_context = cso_create_context(pipe);
91
92 init_clear(ctx);
93
94 ctx->default_paint = paint_create(ctx);
95 ctx->state.vg.stroke_paint = ctx->default_paint;
96 ctx->state.vg.fill_paint = ctx->default_paint;
97
98
99 ctx->mask.sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
100 ctx->mask.sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
101 ctx->mask.sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
102 ctx->mask.sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
103 ctx->mask.sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
104 ctx->mask.sampler.normalized_coords = 0;
105
106 ctx->blend_sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
107 ctx->blend_sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
108 ctx->blend_sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
109 ctx->blend_sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
110 ctx->blend_sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
111 ctx->blend_sampler.normalized_coords = 0;
112
113 for (i = 0; i < 2; i++) {
114 ctx->velems[i].src_offset = i * 4 * sizeof(float);
115 ctx->velems[i].instance_divisor = 0;
116 ctx->velems[i].vertex_buffer_index = 0;
117 ctx->velems[i].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
118 }
119
120 vg_set_error(ctx, VG_NO_ERROR);
121
122 ctx->owned_objects[VG_OBJECT_PAINT] = cso_hash_create();
123 ctx->owned_objects[VG_OBJECT_IMAGE] = cso_hash_create();
124 ctx->owned_objects[VG_OBJECT_MASK] = cso_hash_create();
125 ctx->owned_objects[VG_OBJECT_FONT] = cso_hash_create();
126 ctx->owned_objects[VG_OBJECT_PATH] = cso_hash_create();
127
128 ctx->renderer = renderer_create(ctx);
129 ctx->sc = shaders_cache_create(ctx);
130 ctx->shader = shader_create(ctx);
131
132 ctx->blit = util_create_blit(ctx->pipe, ctx->cso_context);
133
134 return ctx;
135 }
136
137 void vg_destroy_context(struct vg_context *ctx)
138 {
139 struct pipe_resource **cbuf = &ctx->mask.cbuf;
140 struct pipe_resource **vsbuf = &ctx->vs_const_buffer;
141
142 util_destroy_blit(ctx->blit);
143 renderer_destroy(ctx->renderer);
144 shaders_cache_destroy(ctx->sc);
145 shader_destroy(ctx->shader);
146 paint_destroy(ctx->default_paint);
147
148 if (*cbuf)
149 pipe_resource_reference(cbuf, NULL);
150
151 if (*vsbuf)
152 pipe_resource_reference(vsbuf, NULL);
153
154 if (ctx->clear.fs) {
155 cso_delete_fragment_shader(ctx->cso_context, ctx->clear.fs);
156 ctx->clear.fs = NULL;
157 }
158
159 if (ctx->plain_vs) {
160 vg_shader_destroy(ctx, ctx->plain_vs);
161 ctx->plain_vs = NULL;
162 }
163 if (ctx->clear_vs) {
164 vg_shader_destroy(ctx, ctx->clear_vs);
165 ctx->clear_vs = NULL;
166 }
167 if (ctx->texture_vs) {
168 vg_shader_destroy(ctx, ctx->texture_vs);
169 ctx->texture_vs = NULL;
170 }
171
172 if (ctx->pass_through_depth_fs)
173 vg_shader_destroy(ctx, ctx->pass_through_depth_fs);
174 if (ctx->mask.union_fs)
175 vg_shader_destroy(ctx, ctx->mask.union_fs);
176 if (ctx->mask.intersect_fs)
177 vg_shader_destroy(ctx, ctx->mask.intersect_fs);
178 if (ctx->mask.subtract_fs)
179 vg_shader_destroy(ctx, ctx->mask.subtract_fs);
180 if (ctx->mask.set_fs)
181 vg_shader_destroy(ctx, ctx->mask.set_fs);
182
183 cso_release_all(ctx->cso_context);
184 cso_destroy_context(ctx->cso_context);
185
186 cso_hash_delete(ctx->owned_objects[VG_OBJECT_PAINT]);
187 cso_hash_delete(ctx->owned_objects[VG_OBJECT_IMAGE]);
188 cso_hash_delete(ctx->owned_objects[VG_OBJECT_MASK]);
189 cso_hash_delete(ctx->owned_objects[VG_OBJECT_FONT]);
190 cso_hash_delete(ctx->owned_objects[VG_OBJECT_PATH]);
191
192 api_destroy_dispatch(ctx->dispatch);
193
194 free(ctx);
195 }
196
197 void vg_init_object(struct vg_object *obj, struct vg_context *ctx, enum vg_object_type type)
198 {
199 obj->type = type;
200 obj->ctx = ctx;
201 }
202
203 VGboolean vg_context_is_object_valid(struct vg_context *ctx,
204 enum vg_object_type type,
205 void *ptr)
206 {
207 if (ctx) {
208 struct cso_hash *hash = ctx->owned_objects[type];
209 if (!hash)
210 return VG_FALSE;
211 return cso_hash_contains(hash, (unsigned)(long)ptr);
212 }
213 return VG_FALSE;
214 }
215
216 void vg_context_add_object(struct vg_context *ctx,
217 enum vg_object_type type,
218 void *ptr)
219 {
220 if (ctx) {
221 struct cso_hash *hash = ctx->owned_objects[type];
222 if (!hash)
223 return;
224 cso_hash_insert(hash, (unsigned)(long)ptr, ptr);
225 }
226 }
227
228 void vg_context_remove_object(struct vg_context *ctx,
229 enum vg_object_type type,
230 void *ptr)
231 {
232 if (ctx) {
233 struct cso_hash *hash = ctx->owned_objects[type];
234 if (!hash)
235 return;
236 cso_hash_take(hash, (unsigned)(long)ptr);
237 }
238 }
239
240 static void update_clip_state(struct vg_context *ctx)
241 {
242 struct pipe_depth_stencil_alpha_state *dsa = &ctx->state.g3d.dsa;
243 struct vg_state *state = &ctx->state.vg;
244
245 memset(dsa, 0, sizeof(struct pipe_depth_stencil_alpha_state));
246
247 if (state->scissoring) {
248 struct pipe_blend_state *blend = &ctx->state.g3d.blend;
249 struct pipe_framebuffer_state *fb = &ctx->state.g3d.fb;
250 int i;
251
252 dsa->depth.writemask = 1;/*glDepthMask(TRUE);*/
253 dsa->depth.func = PIPE_FUNC_ALWAYS;
254 dsa->depth.enabled = 1;
255
256 cso_save_blend(ctx->cso_context);
257 cso_save_fragment_shader(ctx->cso_context);
258 /* set a passthrough shader */
259 if (!ctx->pass_through_depth_fs)
260 ctx->pass_through_depth_fs = shader_create_from_text(ctx->pipe,
261 pass_through_depth_asm,
262 40,
263 PIPE_SHADER_FRAGMENT);
264 cso_set_fragment_shader_handle(ctx->cso_context,
265 ctx->pass_through_depth_fs->driver);
266 cso_set_depth_stencil_alpha(ctx->cso_context, dsa);
267
268 ctx->pipe->clear(ctx->pipe, PIPE_CLEAR_DEPTHSTENCIL, NULL, 1.0, 0);
269
270 /* disable color writes */
271 blend->rt[0].colormask = 0; /*disable colorwrites*/
272 cso_set_blend(ctx->cso_context, blend);
273
274 /* enable scissoring */
275 for (i = 0; i < state->scissor_rects_num; ++i) {
276 const float x = state->scissor_rects[i * 4 + 0].f;
277 const float y = state->scissor_rects[i * 4 + 1].f;
278 const float width = state->scissor_rects[i * 4 + 2].f;
279 const float height = state->scissor_rects[i * 4 + 3].f;
280 VGfloat minx, miny, maxx, maxy;
281
282 minx = 0;
283 miny = 0;
284 maxx = fb->width;
285 maxy = fb->height;
286
287 if (x > minx)
288 minx = x;
289 if (y > miny)
290 miny = y;
291
292 if (x + width < maxx)
293 maxx = x + width;
294 if (y + height < maxy)
295 maxy = y + height;
296
297 /* check for null space */
298 if (minx >= maxx || miny >= maxy)
299 minx = miny = maxx = maxy = 0;
300
301 /*glClear(GL_DEPTH_BUFFER_BIT);*/
302 renderer_draw_quad(ctx->renderer, minx, miny, maxx, maxy, 0.0f);
303 }
304
305 cso_restore_blend(ctx->cso_context);
306 cso_restore_fragment_shader(ctx->cso_context);
307
308 dsa->depth.enabled = 1; /* glEnable(GL_DEPTH_TEST); */
309 dsa->depth.writemask = 0;/*glDepthMask(FALSE);*/
310 dsa->depth.func = PIPE_FUNC_GEQUAL;
311 }
312 }
313
314 void vg_validate_state(struct vg_context *ctx)
315 {
316 vg_manager_validate_framebuffer(ctx);
317
318 if ((ctx->state.dirty & BLEND_DIRTY)) {
319 struct pipe_blend_state *blend = &ctx->state.g3d.blend;
320 memset(blend, 0, sizeof(struct pipe_blend_state));
321 blend->rt[0].blend_enable = 1;
322 blend->rt[0].colormask = PIPE_MASK_RGBA;
323
324 switch (ctx->state.vg.blend_mode) {
325 case VG_BLEND_SRC:
326 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_ONE;
327 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
328 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO;
329 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO;
330 blend->rt[0].blend_enable = 0;
331 break;
332 case VG_BLEND_SRC_OVER:
333 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
334 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
335 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
336 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
337 break;
338 case VG_BLEND_DST_OVER:
339 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_INV_DST_ALPHA;
340 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_INV_DST_ALPHA;
341 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_DST_ALPHA;
342 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_DST_ALPHA;
343 break;
344 case VG_BLEND_SRC_IN:
345 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_DST_ALPHA;
346 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_DST_ALPHA;
347 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO;
348 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO;
349 break;
350 case VG_BLEND_DST_IN:
351 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_ZERO;
352 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ZERO;
353 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
354 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
355 break;
356 case VG_BLEND_MULTIPLY:
357 case VG_BLEND_SCREEN:
358 case VG_BLEND_DARKEN:
359 case VG_BLEND_LIGHTEN:
360 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_ONE;
361 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
362 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO;
363 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO;
364 blend->rt[0].blend_enable = 0;
365 break;
366 case VG_BLEND_ADDITIVE:
367 blend->rt[0].rgb_src_factor = PIPE_BLENDFACTOR_ONE;
368 blend->rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
369 blend->rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ONE;
370 blend->rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
371 break;
372 default:
373 assert(!"not implemented blend mode");
374 }
375 cso_set_blend(ctx->cso_context, &ctx->state.g3d.blend);
376 }
377 if ((ctx->state.dirty & RASTERIZER_DIRTY)) {
378 struct pipe_rasterizer_state *raster = &ctx->state.g3d.rasterizer;
379 memset(raster, 0, sizeof(struct pipe_rasterizer_state));
380 raster->gl_rasterization_rules = 1;
381 cso_set_rasterizer(ctx->cso_context, &ctx->state.g3d.rasterizer);
382 }
383 if ((ctx->state.dirty & VIEWPORT_DIRTY)) {
384 struct pipe_framebuffer_state *fb = &ctx->state.g3d.fb;
385 const VGint param_bytes = 8 * sizeof(VGfloat);
386 VGfloat vs_consts[8] = {
387 2.f/fb->width, 2.f/fb->height, 1, 1,
388 -1, -1, 0, 0
389 };
390 struct pipe_resource **cbuf = &ctx->vs_const_buffer;
391
392 vg_set_viewport(ctx, VEGA_Y0_BOTTOM);
393
394 pipe_resource_reference(cbuf, NULL);
395 *cbuf = pipe_buffer_create(ctx->pipe->screen,
396 PIPE_BIND_CONSTANT_BUFFER,
397 param_bytes);
398
399 if (*cbuf) {
400 st_no_flush_pipe_buffer_write(ctx, *cbuf,
401 0, param_bytes, vs_consts);
402 }
403 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, *cbuf);
404 }
405 if ((ctx->state.dirty & VS_DIRTY)) {
406 cso_set_vertex_shader_handle(ctx->cso_context,
407 vg_plain_vs(ctx));
408 }
409
410 /* must be last because it renders to the depth buffer*/
411 if ((ctx->state.dirty & DEPTH_STENCIL_DIRTY)) {
412 update_clip_state(ctx);
413 cso_set_depth_stencil_alpha(ctx->cso_context, &ctx->state.g3d.dsa);
414 }
415
416 shader_set_masking(ctx->shader, ctx->state.vg.masking);
417 shader_set_image_mode(ctx->shader, ctx->state.vg.image_mode);
418
419 ctx->state.dirty = NONE_DIRTY;
420 }
421
422 VGboolean vg_object_is_valid(void *ptr, enum vg_object_type type)
423 {
424 struct vg_object *obj = ptr;
425 if (ptr && is_aligned(obj) && obj->type == type)
426 return VG_TRUE;
427 else
428 return VG_FALSE;
429 }
430
431 void vg_set_error(struct vg_context *ctx,
432 VGErrorCode code)
433 {
434 /*vgGetError returns the oldest error code provided by
435 * an API call on the current context since the previous
436 * call to vgGetError on that context (or since the creation
437 of the context).*/
438 if (ctx->_error == VG_NO_ERROR)
439 ctx->_error = code;
440 }
441
442 void vg_prepare_blend_surface(struct vg_context *ctx)
443 {
444 struct pipe_surface *dest_surface = NULL;
445 struct pipe_context *pipe = ctx->pipe;
446 struct pipe_sampler_view *view;
447 struct pipe_sampler_view view_templ;
448 struct st_framebuffer *stfb = ctx->draw_buffer;
449 struct st_renderbuffer *strb = stfb->strb;
450
451 /* first finish all pending rendering */
452 vgFinish();
453
454 u_sampler_view_default_template(&view_templ, strb->texture, strb->texture->format);
455 view = pipe->create_sampler_view(pipe, strb->texture, &view_templ);
456
457 dest_surface = pipe->screen->get_tex_surface(pipe->screen,
458 stfb->blend_texture_view->texture,
459 0, 0, 0,
460 PIPE_BIND_RENDER_TARGET);
461 /* flip it, because we want to use it as a sampler */
462 util_blit_pixels_tex(ctx->blit,
463 view,
464 0, strb->height,
465 strb->width, 0,
466 dest_surface,
467 0, 0,
468 strb->width, strb->height,
469 0.0, PIPE_TEX_MIPFILTER_NEAREST);
470
471 if (dest_surface)
472 pipe_surface_reference(&dest_surface, NULL);
473
474 /* make sure it's complete */
475 vgFinish();
476
477 pipe_sampler_view_reference(&view, NULL);
478 }
479
480
481 void vg_prepare_blend_surface_from_mask(struct vg_context *ctx)
482 {
483 struct pipe_surface *dest_surface = NULL;
484 struct pipe_context *pipe = ctx->pipe;
485 struct st_framebuffer *stfb = ctx->draw_buffer;
486 struct st_renderbuffer *strb = stfb->strb;
487
488 vg_validate_state(ctx);
489
490 /* first finish all pending rendering */
491 vgFinish();
492
493 dest_surface = pipe->screen->get_tex_surface(pipe->screen,
494 stfb->blend_texture_view->texture,
495 0, 0, 0,
496 PIPE_BIND_RENDER_TARGET);
497
498 /* flip it, because we want to use it as a sampler */
499 util_blit_pixels_tex(ctx->blit,
500 stfb->alpha_mask_view,
501 0, strb->height,
502 strb->width, 0,
503 dest_surface,
504 0, 0,
505 strb->width, strb->height,
506 0.0, PIPE_TEX_MIPFILTER_NEAREST);
507
508 /* make sure it's complete */
509 vgFinish();
510
511 if (dest_surface)
512 pipe_surface_reference(&dest_surface, NULL);
513 }
514
515 void * vg_plain_vs(struct vg_context *ctx)
516 {
517 if (!ctx->plain_vs) {
518 ctx->plain_vs = shader_create_from_text(ctx->pipe,
519 vs_plain_asm,
520 200,
521 PIPE_SHADER_VERTEX);
522 }
523
524 return ctx->plain_vs->driver;
525 }
526
527
528 void * vg_clear_vs(struct vg_context *ctx)
529 {
530 if (!ctx->clear_vs) {
531 ctx->clear_vs = shader_create_from_text(ctx->pipe,
532 vs_clear_asm,
533 200,
534 PIPE_SHADER_VERTEX);
535 }
536
537 return ctx->clear_vs->driver;
538 }
539
540 void * vg_texture_vs(struct vg_context *ctx)
541 {
542 if (!ctx->texture_vs) {
543 ctx->texture_vs = shader_create_from_text(ctx->pipe,
544 vs_texture_asm,
545 200,
546 PIPE_SHADER_VERTEX);
547 }
548
549 return ctx->texture_vs->driver;
550 }
551
552 void vg_set_viewport(struct vg_context *ctx, VegaOrientation orientation)
553 {
554 struct pipe_viewport_state viewport;
555 struct pipe_framebuffer_state *fb = &ctx->state.g3d.fb;
556 VGfloat y_scale = (orientation == VEGA_Y0_BOTTOM) ? -2.f : 2.f;
557
558 viewport.scale[0] = fb->width / 2.f;
559 viewport.scale[1] = fb->height / y_scale;
560 viewport.scale[2] = 1.0;
561 viewport.scale[3] = 1.0;
562 viewport.translate[0] = fb->width / 2.f;
563 viewport.translate[1] = fb->height / 2.f;
564 viewport.translate[2] = 0.0;
565 viewport.translate[3] = 0.0;
566
567 cso_set_viewport(ctx->cso_context, &viewport);
568 }