Merge branch 'gallium-polygon-stipple'
[mesa.git] / src / gallium / auxiliary / vl / vl_compositor.c
1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include <assert.h>
29
30 #include <pipe/p_compiler.h>
31 #include <pipe/p_context.h>
32
33 #include <util/u_memory.h>
34 #include <util/u_draw.h>
35 #include <util/u_surface.h>
36
37 #include <tgsi/tgsi_ureg.h>
38
39 #include "vl_csc.h"
40 #include "vl_types.h"
41 #include "vl_compositor.h"
42
43 typedef float csc_matrix[16];
44
45 static void *
46 create_vert_shader(struct vl_compositor *c)
47 {
48 struct ureg_program *shader;
49 struct ureg_src vpos, vtex;
50 struct ureg_dst o_vpos, o_vtex;
51
52 shader = ureg_create(TGSI_PROCESSOR_VERTEX);
53 if (!shader)
54 return false;
55
56 vpos = ureg_DECL_vs_input(shader, 0);
57 vtex = ureg_DECL_vs_input(shader, 1);
58 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, 0);
59 o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, 1);
60
61 /*
62 * o_vpos = vpos
63 * o_vtex = vtex
64 */
65 ureg_MOV(shader, o_vpos, vpos);
66 ureg_MOV(shader, o_vtex, vtex);
67
68 ureg_END(shader);
69
70 return ureg_create_shader_and_destroy(shader, c->pipe);
71 }
72
73 static void *
74 create_frag_shader_video_buffer(struct vl_compositor *c)
75 {
76 struct ureg_program *shader;
77 struct ureg_src tc;
78 struct ureg_src csc[3];
79 struct ureg_src sampler[3];
80 struct ureg_dst texel;
81 struct ureg_dst fragment;
82 unsigned i;
83
84 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
85 if (!shader)
86 return false;
87
88 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
89 for (i = 0; i < 3; ++i) {
90 csc[i] = ureg_DECL_constant(shader, i);
91 sampler[i] = ureg_DECL_sampler(shader, i);
92 }
93 texel = ureg_DECL_temporary(shader);
94 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
95
96 /*
97 * texel.xyz = tex(tc, sampler[i])
98 * fragment = csc * texel
99 */
100 for (i = 0; i < 3; ++i)
101 ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D, tc, sampler[i]);
102
103 ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
104
105 for (i = 0; i < 3; ++i)
106 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
107
108 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
109
110 ureg_release_temporary(shader, texel);
111 ureg_END(shader);
112
113 return ureg_create_shader_and_destroy(shader, c->pipe);
114 }
115
116 static void *
117 create_frag_shader_palette(struct vl_compositor *c)
118 {
119 struct ureg_program *shader;
120 struct ureg_src csc[3];
121 struct ureg_src tc;
122 struct ureg_src sampler;
123 struct ureg_src palette;
124 struct ureg_dst texel;
125 struct ureg_dst fragment;
126 unsigned i;
127
128 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
129 if (!shader)
130 return false;
131
132 for (i = 0; i < 3; ++i)
133 csc[i] = ureg_DECL_constant(shader, i);
134
135 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
136 sampler = ureg_DECL_sampler(shader, 0);
137 palette = ureg_DECL_sampler(shader, 1);
138 texel = ureg_DECL_temporary(shader);
139 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
140
141 /*
142 * texel = tex(tc, sampler)
143 * fragment.xyz = tex(texel, palette) * csc
144 * fragment.a = texel.a
145 */
146 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
147 ureg_MUL(shader, ureg_writemask(texel, TGSI_WRITEMASK_X), ureg_src(texel), ureg_imm1f(shader, 15.0f / 16.0f));
148 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
149
150 ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
151
152 for (i = 0; i < 3; ++i)
153 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
154
155 ureg_release_temporary(shader, texel);
156 ureg_END(shader);
157
158 return ureg_create_shader_and_destroy(shader, c->pipe);
159 }
160
161 static void *
162 create_frag_shader_rgba(struct vl_compositor *c)
163 {
164 struct ureg_program *shader;
165 struct ureg_src tc;
166 struct ureg_src sampler;
167 struct ureg_dst fragment;
168
169 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
170 if (!shader)
171 return false;
172
173 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
174 sampler = ureg_DECL_sampler(shader, 0);
175 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
176
177 /*
178 * fragment = tex(tc, sampler)
179 */
180 ureg_TEX(shader, fragment, TGSI_TEXTURE_2D, tc, sampler);
181 ureg_END(shader);
182
183 return ureg_create_shader_and_destroy(shader, c->pipe);
184 }
185
186 static bool
187 init_shaders(struct vl_compositor *c)
188 {
189 assert(c);
190
191 c->vs = create_vert_shader(c);
192 if (!c->vs) {
193 debug_printf("Unable to create vertex shader.\n");
194 return false;
195 }
196
197 c->fs_video_buffer = create_frag_shader_video_buffer(c);
198 if (!c->fs_video_buffer) {
199 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
200 return false;
201 }
202
203 c->fs_palette = create_frag_shader_palette(c);
204 if (!c->fs_palette) {
205 debug_printf("Unable to create Palette-to-RGB fragment shader.\n");
206 return false;
207 }
208
209 c->fs_rgba = create_frag_shader_rgba(c);
210 if (!c->fs_rgba) {
211 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
212 return false;
213 }
214
215 return true;
216 }
217
218 static void cleanup_shaders(struct vl_compositor *c)
219 {
220 assert(c);
221
222 c->pipe->delete_vs_state(c->pipe, c->vs);
223 c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
224 c->pipe->delete_fs_state(c->pipe, c->fs_palette);
225 c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
226 }
227
228 static bool
229 init_pipe_state(struct vl_compositor *c)
230 {
231 struct pipe_rasterizer_state rast;
232 struct pipe_sampler_state sampler;
233 struct pipe_blend_state blend;
234 struct pipe_depth_stencil_alpha_state dsa;
235 unsigned i;
236
237 assert(c);
238
239 c->fb_state.nr_cbufs = 1;
240 c->fb_state.zsbuf = NULL;
241
242 c->viewport.scale[2] = 1;
243 c->viewport.scale[3] = 1;
244 c->viewport.translate[2] = 0;
245 c->viewport.translate[3] = 0;
246
247 memset(&sampler, 0, sizeof(sampler));
248 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
249 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
250 sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
251 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
252 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
253 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
254 sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
255 sampler.compare_func = PIPE_FUNC_ALWAYS;
256 sampler.normalized_coords = 1;
257
258 c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
259
260 sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
261 sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
262 c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
263
264 memset(&blend, 0, sizeof blend);
265 blend.independent_blend_enable = 0;
266 blend.rt[0].blend_enable = 1;
267 blend.rt[0].rgb_func = PIPE_BLEND_ADD;
268 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
269 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
270 blend.rt[0].alpha_func = PIPE_BLEND_ADD;
271 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
272 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
273 blend.logicop_enable = 0;
274 blend.logicop_func = PIPE_LOGICOP_CLEAR;
275 blend.rt[0].colormask = PIPE_MASK_RGBA;
276 blend.dither = 0;
277 c->blend = c->pipe->create_blend_state(c->pipe, &blend);
278
279 memset(&rast, 0, sizeof rast);
280 rast.flatshade = 1;
281 rast.front_ccw = 1;
282 rast.cull_face = PIPE_FACE_NONE;
283 rast.fill_back = PIPE_POLYGON_MODE_FILL;
284 rast.fill_front = PIPE_POLYGON_MODE_FILL;
285 rast.scissor = 1;
286 rast.line_width = 1;
287 rast.point_size_per_vertex = 1;
288 rast.offset_units = 1;
289 rast.offset_scale = 1;
290 rast.gl_rasterization_rules = 1;
291
292 c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
293
294 memset(&dsa, 0, sizeof dsa);
295 dsa.depth.enabled = 0;
296 dsa.depth.writemask = 0;
297 dsa.depth.func = PIPE_FUNC_ALWAYS;
298 for (i = 0; i < 2; ++i) {
299 dsa.stencil[i].enabled = 0;
300 dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
301 dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
302 dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
303 dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
304 dsa.stencil[i].valuemask = 0;
305 dsa.stencil[i].writemask = 0;
306 }
307 dsa.alpha.enabled = 0;
308 dsa.alpha.func = PIPE_FUNC_ALWAYS;
309 dsa.alpha.ref_value = 0;
310 c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
311 c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
312 return true;
313 }
314
315 static void cleanup_pipe_state(struct vl_compositor *c)
316 {
317 assert(c);
318
319 /* Asserted in softpipe_delete_fs_state() for some reason */
320 c->pipe->bind_vs_state(c->pipe, NULL);
321 c->pipe->bind_fs_state(c->pipe, NULL);
322
323 c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
324 c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
325 c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
326 c->pipe->delete_blend_state(c->pipe, c->blend);
327 c->pipe->delete_rasterizer_state(c->pipe, c->rast);
328 }
329
330 static bool
331 create_vertex_buffer(struct vl_compositor *c)
332 {
333 assert(c);
334
335 pipe_resource_reference(&c->vertex_buf.buffer, NULL);
336 c->vertex_buf.buffer = pipe_buffer_create
337 (
338 c->pipe->screen,
339 PIPE_BIND_VERTEX_BUFFER,
340 PIPE_USAGE_STREAM,
341 sizeof(struct vertex4f) * VL_COMPOSITOR_MAX_LAYERS * 4
342 );
343 return c->vertex_buf.buffer != NULL;
344 }
345
346 static bool
347 init_buffers(struct vl_compositor *c)
348 {
349 struct pipe_vertex_element vertex_elems[2];
350
351 assert(c);
352
353 /*
354 * Create our vertex buffer and vertex buffer elements
355 */
356 c->vertex_buf.stride = sizeof(struct vertex4f);
357 c->vertex_buf.buffer_offset = 0;
358 create_vertex_buffer(c);
359
360 vertex_elems[0].src_offset = 0;
361 vertex_elems[0].instance_divisor = 0;
362 vertex_elems[0].vertex_buffer_index = 0;
363 vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
364 vertex_elems[1].src_offset = sizeof(struct vertex2f);
365 vertex_elems[1].instance_divisor = 0;
366 vertex_elems[1].vertex_buffer_index = 0;
367 vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;
368 c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 2, vertex_elems);
369
370 /*
371 * Create our fragment shader's constant buffer
372 * Const buffer contains the color conversion matrix and bias vectors
373 */
374 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
375 c->csc_matrix = pipe_buffer_create
376 (
377 c->pipe->screen,
378 PIPE_BIND_CONSTANT_BUFFER,
379 PIPE_USAGE_STATIC,
380 sizeof(csc_matrix)
381 );
382
383 return true;
384 }
385
386 static void
387 cleanup_buffers(struct vl_compositor *c)
388 {
389 assert(c);
390
391 c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
392 pipe_resource_reference(&c->vertex_buf.buffer, NULL);
393 pipe_resource_reference(&c->csc_matrix, NULL);
394 }
395
396 static INLINE struct pipe_video_rect
397 default_rect(struct vl_compositor_layer *layer)
398 {
399 struct pipe_resource *res = layer->sampler_views[0]->texture;
400 struct pipe_video_rect rect = { 0, 0, res->width0, res->height0 };
401 return rect;
402 }
403
404 static INLINE struct vertex2f
405 calc_topleft(struct vertex2f size, struct pipe_video_rect rect)
406 {
407 struct vertex2f res = { rect.x / size.x, rect.y / size.y };
408 return res;
409 }
410
411 static INLINE struct vertex2f
412 calc_bottomright(struct vertex2f size, struct pipe_video_rect rect)
413 {
414 struct vertex2f res = { (rect.x + rect.w) / size.x, (rect.y + rect.h) / size.y };
415 return res;
416 }
417
418 static INLINE void
419 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
420 struct pipe_video_rect src, struct pipe_video_rect dst)
421 {
422 struct vertex2f size = { width, height };
423
424 layer->src.tl = calc_topleft(size, src);
425 layer->src.br = calc_bottomright(size, src);
426 layer->dst.tl = calc_topleft(size, dst);
427 layer->dst.br = calc_bottomright(size, dst);
428 }
429
430 static void
431 gen_rect_verts(struct vertex4f *vb, struct vl_compositor_layer *layer)
432 {
433 assert(vb && layer);
434
435 vb[0].x = layer->dst.tl.x;
436 vb[0].y = layer->dst.tl.y;
437 vb[0].z = layer->src.tl.x;
438 vb[0].w = layer->src.tl.y;
439
440 vb[1].x = layer->dst.br.x;
441 vb[1].y = layer->dst.tl.y;
442 vb[1].z = layer->src.br.x;
443 vb[1].w = layer->src.tl.y;
444
445 vb[2].x = layer->dst.br.x;
446 vb[2].y = layer->dst.br.y;
447 vb[2].z = layer->src.br.x;
448 vb[2].w = layer->src.br.y;
449
450 vb[3].x = layer->dst.tl.x;
451 vb[3].y = layer->dst.br.y;
452 vb[3].z = layer->src.tl.x;
453 vb[3].w = layer->src.br.y;
454 }
455
456 static void
457 gen_vertex_data(struct vl_compositor *c)
458 {
459 struct vertex4f *vb;
460 struct pipe_transfer *buf_transfer;
461 unsigned i;
462
463 assert(c);
464
465 vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
466 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD | PIPE_TRANSFER_DONTBLOCK,
467 &buf_transfer);
468
469 if (!vb) {
470 // If buffer is still locked from last draw create a new one
471 create_vertex_buffer(c);
472 vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
473 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
474 &buf_transfer);
475 }
476
477 for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
478 if (c->used_layers & (1 << i)) {
479 struct vl_compositor_layer *layer = &c->layers[i];
480 gen_rect_verts(vb, layer);
481 vb += 4;
482
483 if (layer->clearing &&
484 c->dirty_tl.x >= layer->dst.tl.x &&
485 c->dirty_tl.y >= layer->dst.tl.y &&
486 c->dirty_br.x <= layer->dst.br.x &&
487 c->dirty_br.y <= layer->dst.br.y) {
488
489 // We clear the dirty area anyway, no need for clear_render_target
490 c->dirty_tl.x = c->dirty_tl.y = 1.0f;
491 c->dirty_br.x = c->dirty_br.y = 0.0f;
492 }
493 }
494 }
495
496 pipe_buffer_unmap(c->pipe, buf_transfer);
497 }
498
499 static void
500 draw_layers(struct vl_compositor *c)
501 {
502 unsigned vb_index, i;
503
504 assert(c);
505
506 for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
507 if (c->used_layers & (1 << i)) {
508 struct vl_compositor_layer *layer = &c->layers[i];
509 struct pipe_sampler_view **samplers = &layer->sampler_views[0];
510 unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
511
512 c->pipe->bind_fs_state(c->pipe, layer->fs);
513 c->pipe->bind_fragment_sampler_states(c->pipe, num_sampler_views, layer->samplers);
514 c->pipe->set_fragment_sampler_views(c->pipe, num_sampler_views, samplers);
515 util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
516 vb_index++;
517
518 // Remember the currently drawn area as dirty for the next draw command
519 c->dirty_tl.x = MIN2(layer->dst.tl.x, c->dirty_tl.x);
520 c->dirty_tl.y = MIN2(layer->dst.tl.y, c->dirty_tl.y);
521 c->dirty_br.x = MAX2(layer->dst.br.x, c->dirty_br.x);
522 c->dirty_br.y = MAX2(layer->dst.br.y, c->dirty_br.y);
523 }
524 }
525 }
526
527 void
528 vl_compositor_reset_dirty_area(struct vl_compositor *c)
529 {
530 assert(c);
531
532 c->dirty_tl.x = c->dirty_tl.y = 0.0f;
533 c->dirty_br.x = c->dirty_br.y = 1.0f;
534 }
535
536 void
537 vl_compositor_set_clear_color(struct vl_compositor *c, float color[4])
538 {
539 unsigned i;
540
541 assert(c);
542
543 for (i = 0; i < 4; ++i)
544 c->clear_color[i] = color[i];
545 }
546
547 void
548 vl_compositor_clear_layers(struct vl_compositor *c)
549 {
550 unsigned i, j;
551
552 assert(c);
553
554 c->used_layers = 0;
555 for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
556 c->layers[i].fs = NULL;
557 for ( j = 0; j < 3; j++)
558 pipe_sampler_view_reference(&c->layers[i].sampler_views[j], NULL);
559 }
560 }
561
562 void
563 vl_compositor_cleanup(struct vl_compositor *c)
564 {
565 assert(c);
566
567 vl_compositor_clear_layers(c);
568
569 cleanup_buffers(c);
570 cleanup_shaders(c);
571 cleanup_pipe_state(c);
572 }
573
574 void
575 vl_compositor_set_csc_matrix(struct vl_compositor *c, const float matrix[16])
576 {
577 struct pipe_transfer *buf_transfer;
578
579 assert(c);
580
581 memcpy
582 (
583 pipe_buffer_map(c->pipe, c->csc_matrix,
584 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
585 &buf_transfer),
586 matrix,
587 sizeof(csc_matrix)
588 );
589
590 pipe_buffer_unmap(c->pipe, buf_transfer);
591 }
592
593 void
594 vl_compositor_set_buffer_layer(struct vl_compositor *c,
595 unsigned layer,
596 struct pipe_video_buffer *buffer,
597 struct pipe_video_rect *src_rect,
598 struct pipe_video_rect *dst_rect)
599 {
600 struct pipe_sampler_view **sampler_views;
601 unsigned i;
602
603 assert(c && buffer);
604
605 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
606
607 c->used_layers |= 1 << layer;
608 c->layers[layer].clearing = true;
609 c->layers[layer].fs = c->fs_video_buffer;
610
611 sampler_views = buffer->get_sampler_view_components(buffer);
612 for (i = 0; i < 3; ++i) {
613 c->layers[layer].samplers[i] = c->sampler_linear;
614 pipe_sampler_view_reference(&c->layers[layer].sampler_views[i], sampler_views[i]);
615 }
616
617 calc_src_and_dst(&c->layers[layer], buffer->width, buffer->height,
618 src_rect ? *src_rect : default_rect(&c->layers[layer]),
619 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
620 }
621
622 void
623 vl_compositor_set_palette_layer(struct vl_compositor *c,
624 unsigned layer,
625 struct pipe_sampler_view *indexes,
626 struct pipe_sampler_view *palette,
627 struct pipe_video_rect *src_rect,
628 struct pipe_video_rect *dst_rect)
629 {
630 assert(c && indexes && palette);
631
632 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
633
634 c->used_layers |= 1 << layer;
635 c->layers[layer].clearing = false;
636 c->layers[layer].fs = c->fs_palette;
637 c->layers[layer].samplers[0] = c->sampler_linear;
638 c->layers[layer].samplers[1] = c->sampler_nearest;
639 c->layers[layer].samplers[2] = NULL;
640 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], indexes);
641 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], palette);
642 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
643 calc_src_and_dst(&c->layers[layer], indexes->texture->width0, indexes->texture->height0,
644 src_rect ? *src_rect : default_rect(&c->layers[layer]),
645 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
646
647 }
648
649 void
650 vl_compositor_set_rgba_layer(struct vl_compositor *c,
651 unsigned layer,
652 struct pipe_sampler_view *rgba,
653 struct pipe_video_rect *src_rect,
654 struct pipe_video_rect *dst_rect)
655 {
656 assert(c && rgba);
657
658 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
659
660 c->used_layers |= 1 << layer;
661 c->layers[layer].clearing = rgba->swizzle_a == PIPE_SWIZZLE_ONE;
662 c->layers[layer].fs = c->fs_rgba;
663 c->layers[layer].samplers[0] = c->sampler_linear;
664 c->layers[layer].samplers[1] = NULL;
665 c->layers[layer].samplers[2] = NULL;
666 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], rgba);
667 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], NULL);
668 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
669 calc_src_and_dst(&c->layers[layer], rgba->texture->width0, rgba->texture->height0,
670 src_rect ? *src_rect : default_rect(&c->layers[layer]),
671 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
672 }
673
674 void
675 vl_compositor_render(struct vl_compositor *c,
676 enum pipe_mpeg12_picture_type picture_type,
677 struct pipe_surface *dst_surface,
678 struct pipe_video_rect *dst_area,
679 struct pipe_video_rect *dst_clip)
680 {
681 struct pipe_scissor_state scissor;
682
683 assert(c);
684 assert(dst_surface);
685
686 c->fb_state.width = dst_surface->width;
687 c->fb_state.height = dst_surface->height;
688 c->fb_state.cbufs[0] = dst_surface;
689
690 if (dst_area) {
691 c->viewport.scale[0] = dst_area->w;
692 c->viewport.scale[1] = dst_area->h;
693 c->viewport.translate[0] = dst_area->x;
694 c->viewport.translate[1] = dst_area->y;
695 } else {
696 c->viewport.scale[0] = dst_surface->width;
697 c->viewport.scale[1] = dst_surface->height;
698 c->viewport.translate[0] = 0;
699 c->viewport.translate[1] = 0;
700 }
701
702 if (dst_clip) {
703 scissor.minx = dst_clip->x;
704 scissor.miny = dst_clip->y;
705 scissor.maxx = dst_clip->x + dst_clip->w;
706 scissor.maxy = dst_clip->y + dst_clip->h;
707 } else {
708 scissor.minx = 0;
709 scissor.miny = 0;
710 scissor.maxx = dst_surface->width;
711 scissor.maxy = dst_surface->height;
712 }
713
714 gen_vertex_data(c);
715
716 if (c->dirty_tl.x < c->dirty_br.x || c->dirty_tl.y < c->dirty_br.y) {
717 util_clear_render_target(c->pipe, dst_surface, c->clear_color, 0, 0, dst_surface->width, dst_surface->height);
718 c->dirty_tl.x = c->dirty_tl.y = 1.0f;
719 c->dirty_br.x = c->dirty_br.y = 0.0f;
720 }
721
722 c->pipe->set_scissor_state(c->pipe, &scissor);
723 c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
724 c->pipe->set_viewport_state(c->pipe, &c->viewport);
725 c->pipe->bind_vs_state(c->pipe, c->vs);
726 c->pipe->set_vertex_buffers(c->pipe, 1, &c->vertex_buf);
727 c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
728 c->pipe->set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, c->csc_matrix);
729 c->pipe->bind_blend_state(c->pipe, c->blend);
730 c->pipe->bind_rasterizer_state(c->pipe, c->rast);
731
732 draw_layers(c);
733 }
734
735 bool
736 vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe)
737 {
738 csc_matrix csc_matrix;
739
740 c->pipe = pipe;
741
742 if (!init_pipe_state(c))
743 return false;
744
745 if (!init_shaders(c)) {
746 cleanup_pipe_state(c);
747 return false;
748 }
749 if (!init_buffers(c)) {
750 cleanup_shaders(c);
751 cleanup_pipe_state(c);
752 return false;
753 }
754
755 vl_compositor_clear_layers(c);
756
757 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, csc_matrix);
758 vl_compositor_set_csc_matrix(c, csc_matrix);
759
760 c->clear_color[0] = c->clear_color[1] = 0.0f;
761 c->clear_color[2] = c->clear_color[3] = 0.0f;
762 vl_compositor_reset_dirty_area(c);
763
764 return true;
765 }