Merge remote-tracking branch 'origin/master' into pipe-video
[mesa.git] / src / gallium / auxiliary / vl / vl_compositor.c
1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include <assert.h>
29
30 #include <pipe/p_context.h>
31
32 #include <util/u_memory.h>
33 #include <util/u_draw.h>
34 #include <util/u_surface.h>
35
36 #include <tgsi/tgsi_ureg.h>
37
38 #include "vl_csc.h"
39 #include "vl_types.h"
40 #include "vl_compositor.h"
41
42 typedef float csc_matrix[16];
43
44 static void *
45 create_vert_shader(struct vl_compositor *c)
46 {
47 struct ureg_program *shader;
48 struct ureg_src vpos, vtex;
49 struct ureg_dst o_vpos, o_vtex;
50
51 shader = ureg_create(TGSI_PROCESSOR_VERTEX);
52 if (!shader)
53 return false;
54
55 vpos = ureg_DECL_vs_input(shader, 0);
56 vtex = ureg_DECL_vs_input(shader, 1);
57 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, 0);
58 o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, 1);
59
60 /*
61 * o_vpos = vpos
62 * o_vtex = vtex
63 */
64 ureg_MOV(shader, o_vpos, vpos);
65 ureg_MOV(shader, o_vtex, vtex);
66
67 ureg_END(shader);
68
69 return ureg_create_shader_and_destroy(shader, c->pipe);
70 }
71
72 static void *
73 create_frag_shader_video_buffer(struct vl_compositor *c)
74 {
75 struct ureg_program *shader;
76 struct ureg_src tc;
77 struct ureg_src csc[3];
78 struct ureg_src sampler[3];
79 struct ureg_dst texel;
80 struct ureg_dst fragment;
81 unsigned i;
82
83 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
84 if (!shader)
85 return false;
86
87 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
88 for (i = 0; i < 3; ++i) {
89 csc[i] = ureg_DECL_constant(shader, i);
90 sampler[i] = ureg_DECL_sampler(shader, i);
91 }
92 texel = ureg_DECL_temporary(shader);
93 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
94
95 /*
96 * texel.xyz = tex(tc, sampler[i])
97 * fragment = csc * texel
98 */
99 for (i = 0; i < 3; ++i)
100 ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D, tc, sampler[i]);
101
102 ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
103
104 for (i = 0; i < 3; ++i)
105 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
106
107 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
108
109 ureg_release_temporary(shader, texel);
110 ureg_END(shader);
111
112 return ureg_create_shader_and_destroy(shader, c->pipe);
113 }
114
115 static void *
116 create_frag_shader_palette(struct vl_compositor *c)
117 {
118 struct ureg_program *shader;
119 struct ureg_src csc[3];
120 struct ureg_src tc;
121 struct ureg_src sampler;
122 struct ureg_src palette;
123 struct ureg_dst texel;
124 struct ureg_dst fragment;
125 unsigned i;
126
127 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
128 if (!shader)
129 return false;
130
131 for (i = 0; i < 3; ++i)
132 csc[i] = ureg_DECL_constant(shader, i);
133
134 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
135 sampler = ureg_DECL_sampler(shader, 0);
136 palette = ureg_DECL_sampler(shader, 1);
137 texel = ureg_DECL_temporary(shader);
138 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
139
140 /*
141 * texel = tex(tc, sampler)
142 * fragment.xyz = tex(texel, palette) * csc
143 * fragment.a = texel.a
144 */
145 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
146 ureg_MUL(shader, ureg_writemask(texel, TGSI_WRITEMASK_X), ureg_src(texel), ureg_imm1f(shader, 15.0f / 16.0f));
147 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
148
149 ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
150
151 for (i = 0; i < 3; ++i)
152 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
153
154 ureg_release_temporary(shader, texel);
155 ureg_END(shader);
156
157 return ureg_create_shader_and_destroy(shader, c->pipe);
158 }
159
160 static void *
161 create_frag_shader_rgba(struct vl_compositor *c)
162 {
163 struct ureg_program *shader;
164 struct ureg_src tc;
165 struct ureg_src sampler;
166 struct ureg_dst fragment;
167
168 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
169 if (!shader)
170 return false;
171
172 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
173 sampler = ureg_DECL_sampler(shader, 0);
174 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
175
176 /*
177 * fragment = tex(tc, sampler)
178 */
179 ureg_TEX(shader, fragment, TGSI_TEXTURE_2D, tc, sampler);
180 ureg_END(shader);
181
182 return ureg_create_shader_and_destroy(shader, c->pipe);
183 }
184
185 static bool
186 init_shaders(struct vl_compositor *c)
187 {
188 assert(c);
189
190 c->vs = create_vert_shader(c);
191 if (!c->vs) {
192 debug_printf("Unable to create vertex shader.\n");
193 return false;
194 }
195
196 c->fs_video_buffer = create_frag_shader_video_buffer(c);
197 if (!c->fs_video_buffer) {
198 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
199 return false;
200 }
201
202 c->fs_palette = create_frag_shader_palette(c);
203 if (!c->fs_palette) {
204 debug_printf("Unable to create Palette-to-RGB fragment shader.\n");
205 return false;
206 }
207
208 c->fs_rgba = create_frag_shader_rgba(c);
209 if (!c->fs_rgba) {
210 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
211 return false;
212 }
213
214 return true;
215 }
216
217 static void cleanup_shaders(struct vl_compositor *c)
218 {
219 assert(c);
220
221 c->pipe->delete_vs_state(c->pipe, c->vs);
222 c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
223 c->pipe->delete_fs_state(c->pipe, c->fs_palette);
224 c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
225 }
226
227 static bool
228 init_pipe_state(struct vl_compositor *c)
229 {
230 struct pipe_rasterizer_state rast;
231 struct pipe_sampler_state sampler;
232 struct pipe_blend_state blend;
233
234 assert(c);
235
236 c->fb_state.nr_cbufs = 1;
237 c->fb_state.zsbuf = NULL;
238
239 c->viewport.scale[2] = 1;
240 c->viewport.scale[3] = 1;
241 c->viewport.translate[0] = 0;
242 c->viewport.translate[1] = 0;
243 c->viewport.translate[2] = 0;
244 c->viewport.translate[3] = 0;
245
246 memset(&sampler, 0, sizeof(sampler));
247 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
248 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
249 sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
250 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
251 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
252 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
253 sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
254 sampler.compare_func = PIPE_FUNC_ALWAYS;
255 sampler.normalized_coords = 1;
256
257 c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
258
259 sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
260 sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
261 c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
262
263 memset(&blend, 0, sizeof blend);
264 blend.independent_blend_enable = 0;
265 blend.rt[0].blend_enable = 1;
266 blend.rt[0].rgb_func = PIPE_BLEND_ADD;
267 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
268 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
269 blend.rt[0].alpha_func = PIPE_BLEND_ADD;
270 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
271 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
272 blend.logicop_enable = 0;
273 blend.logicop_func = PIPE_LOGICOP_CLEAR;
274 blend.rt[0].colormask = PIPE_MASK_RGBA;
275 blend.dither = 0;
276 c->blend = c->pipe->create_blend_state(c->pipe, &blend);
277
278 memset(&rast, 0, sizeof rast);
279 rast.flatshade = 1;
280 rast.front_ccw = 1;
281 rast.cull_face = PIPE_FACE_NONE;
282 rast.fill_back = PIPE_POLYGON_MODE_FILL;
283 rast.fill_front = PIPE_POLYGON_MODE_FILL;
284 rast.scissor = 1;
285 rast.line_width = 1;
286 rast.point_size_per_vertex = 1;
287 rast.offset_units = 1;
288 rast.offset_scale = 1;
289 rast.gl_rasterization_rules = 1;
290
291 c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
292
293 return true;
294 }
295
296 static void cleanup_pipe_state(struct vl_compositor *c)
297 {
298 assert(c);
299
300 c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
301 c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
302 c->pipe->delete_blend_state(c->pipe, c->blend);
303 c->pipe->delete_rasterizer_state(c->pipe, c->rast);
304 }
305
306 static bool
307 create_vertex_buffer(struct vl_compositor *c)
308 {
309 assert(c);
310
311 pipe_resource_reference(&c->vertex_buf.buffer, NULL);
312 c->vertex_buf.buffer = pipe_buffer_create
313 (
314 c->pipe->screen,
315 PIPE_BIND_VERTEX_BUFFER,
316 PIPE_USAGE_STREAM,
317 sizeof(struct vertex4f) * VL_COMPOSITOR_MAX_LAYERS * 4
318 );
319 return c->vertex_buf.buffer != NULL;
320 }
321
322 static bool
323 init_buffers(struct vl_compositor *c)
324 {
325 struct pipe_vertex_element vertex_elems[2];
326
327 assert(c);
328
329 /*
330 * Create our vertex buffer and vertex buffer elements
331 */
332 c->vertex_buf.stride = sizeof(struct vertex4f);
333 c->vertex_buf.buffer_offset = 0;
334 create_vertex_buffer(c);
335
336 vertex_elems[0].src_offset = 0;
337 vertex_elems[0].instance_divisor = 0;
338 vertex_elems[0].vertex_buffer_index = 0;
339 vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
340 vertex_elems[1].src_offset = sizeof(struct vertex2f);
341 vertex_elems[1].instance_divisor = 0;
342 vertex_elems[1].vertex_buffer_index = 0;
343 vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;
344 c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 2, vertex_elems);
345
346 /*
347 * Create our fragment shader's constant buffer
348 * Const buffer contains the color conversion matrix and bias vectors
349 */
350 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
351 c->csc_matrix = pipe_buffer_create
352 (
353 c->pipe->screen,
354 PIPE_BIND_CONSTANT_BUFFER,
355 PIPE_USAGE_STATIC,
356 sizeof(csc_matrix)
357 );
358
359 return true;
360 }
361
362 static void
363 cleanup_buffers(struct vl_compositor *c)
364 {
365 assert(c);
366
367 c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
368 pipe_resource_reference(&c->vertex_buf.buffer, NULL);
369 pipe_resource_reference(&c->csc_matrix, NULL);
370 }
371
372 static inline struct pipe_video_rect
373 default_rect(struct vl_compositor_layer *layer)
374 {
375 struct pipe_resource *res = layer->sampler_views[0]->texture;
376 struct pipe_video_rect rect = { 0, 0, res->width0, res->height0 };
377 return rect;
378 }
379
380 static inline struct vertex2f
381 calc_topleft(struct vertex2f inv_size, struct pipe_video_rect rect)
382 {
383 struct vertex2f res = { rect.x * inv_size.x, rect.y * inv_size.y };
384 return res;
385 }
386
387 static inline struct vertex2f
388 calc_bottomright(struct vertex2f inv_size, struct pipe_video_rect rect)
389 {
390 struct vertex2f res = { (rect.x + rect.w) * inv_size.x, (rect.y + rect.h) * inv_size.y };
391 return res;
392 }
393
394 static inline void
395 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
396 struct pipe_video_rect src, struct pipe_video_rect dst)
397 {
398 struct vertex2f inv_size = { 1.0f / width, 1.0f / height };
399
400 layer->src.tl = calc_topleft(inv_size, src);
401 layer->src.br = calc_bottomright(inv_size, src);
402 layer->dst.tl = calc_topleft(inv_size, dst);
403 layer->dst.br = calc_bottomright(inv_size, dst);
404 }
405
406 static void
407 gen_rect_verts(struct vertex4f *vb, struct vl_compositor_layer *layer)
408 {
409 assert(vb && layer);
410
411 vb[0].x = layer->dst.tl.x;
412 vb[0].y = layer->dst.tl.y;
413 vb[0].z = layer->src.tl.x;
414 vb[0].w = layer->src.tl.y;
415
416 vb[1].x = layer->dst.br.x;
417 vb[1].y = layer->dst.tl.y;
418 vb[1].z = layer->src.br.x;
419 vb[1].w = layer->src.tl.y;
420
421 vb[2].x = layer->dst.br.x;
422 vb[2].y = layer->dst.br.y;
423 vb[2].z = layer->src.br.x;
424 vb[2].w = layer->src.br.y;
425
426 vb[3].x = layer->dst.tl.x;
427 vb[3].y = layer->dst.br.y;
428 vb[3].z = layer->src.tl.x;
429 vb[3].w = layer->src.br.y;
430 }
431
432 static void
433 gen_vertex_data(struct vl_compositor *c)
434 {
435 struct vertex4f *vb;
436 struct pipe_transfer *buf_transfer;
437 unsigned i;
438
439 assert(c);
440
441 vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
442 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD | PIPE_TRANSFER_DONTBLOCK,
443 &buf_transfer);
444
445 if (!vb) {
446 // If buffer is still locked from last draw create a new one
447 create_vertex_buffer(c);
448 vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
449 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
450 &buf_transfer);
451 }
452
453 for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
454 if (c->used_layers & (1 << i)) {
455 struct vl_compositor_layer *layer = &c->layers[i];
456 gen_rect_verts(vb, layer);
457 vb += 4;
458
459 if (layer->clearing &&
460 c->dirty_tl.x >= layer->dst.tl.x &&
461 c->dirty_tl.y >= layer->dst.tl.y &&
462 c->dirty_br.x <= layer->dst.br.x &&
463 c->dirty_br.y <= layer->dst.br.y) {
464
465 // We clear the dirty area anyway, no need for clear_render_target
466 c->dirty_tl.x = c->dirty_tl.y = 1.0f;
467 c->dirty_br.x = c->dirty_br.y = 0.0f;
468 }
469 }
470 }
471
472 pipe_buffer_unmap(c->pipe, buf_transfer);
473 }
474
475 static void
476 draw_layers(struct vl_compositor *c)
477 {
478 unsigned vb_index, i;
479
480 assert(c);
481
482 for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
483 if (c->used_layers & (1 << i)) {
484 struct vl_compositor_layer *layer = &c->layers[i];
485 struct pipe_sampler_view **samplers = &layer->sampler_views[0];
486 unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
487
488 c->pipe->bind_fs_state(c->pipe, layer->fs);
489 c->pipe->bind_fragment_sampler_states(c->pipe, num_sampler_views, layer->samplers);
490 c->pipe->set_fragment_sampler_views(c->pipe, num_sampler_views, samplers);
491 util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
492 vb_index++;
493
494 // Remember the currently drawn area as dirty for the next draw command
495 c->dirty_tl.x = MIN2(layer->dst.tl.x, c->dirty_tl.x);
496 c->dirty_tl.y = MIN2(layer->dst.tl.y, c->dirty_tl.y);
497 c->dirty_br.x = MAX2(layer->dst.br.x, c->dirty_br.x);
498 c->dirty_br.y = MAX2(layer->dst.br.y, c->dirty_br.y);
499 }
500 }
501 }
502
503 static void
504 vl_compositor_reset_dirty_area(struct pipe_video_compositor *compositor)
505 {
506 struct vl_compositor *c = (struct vl_compositor *)compositor;
507
508 assert(compositor);
509
510 c->dirty_tl.x = c->dirty_tl.y = 0.0f;
511 c->dirty_br.x = c->dirty_br.y = 1.0f;
512 }
513
514 static void
515 vl_compositor_set_clear_color(struct pipe_video_compositor *compositor, float color[4])
516 {
517 struct vl_compositor *c = (struct vl_compositor *)compositor;
518 unsigned i;
519
520 assert(compositor);
521
522 for (i = 0; i < 4; ++i)
523 c->clear_color[i] = color[i];
524 }
525
526 static void
527 vl_compositor_clear_layers(struct pipe_video_compositor *compositor)
528 {
529 struct vl_compositor *c = (struct vl_compositor *)compositor;
530 unsigned i, j;
531
532 assert(compositor);
533
534 c->used_layers = 0;
535 for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
536 c->layers[i].fs = NULL;
537 for ( j = 0; j < 3; j++)
538 pipe_sampler_view_reference(&c->layers[i].sampler_views[j], NULL);
539 }
540 }
541
542 static void
543 vl_compositor_destroy(struct pipe_video_compositor *compositor)
544 {
545 struct vl_compositor *c = (struct vl_compositor *)compositor;
546 assert(compositor);
547
548 vl_compositor_clear_layers(compositor);
549
550 cleanup_buffers(c);
551 cleanup_shaders(c);
552 cleanup_pipe_state(c);
553
554 FREE(compositor);
555 }
556
557 static void
558 vl_compositor_set_csc_matrix(struct pipe_video_compositor *compositor, const float matrix[16])
559 {
560 struct vl_compositor *c = (struct vl_compositor *)compositor;
561 struct pipe_transfer *buf_transfer;
562
563 assert(compositor);
564
565 memcpy
566 (
567 pipe_buffer_map(c->pipe, c->csc_matrix,
568 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
569 &buf_transfer),
570 matrix,
571 sizeof(csc_matrix)
572 );
573
574 pipe_buffer_unmap(c->pipe, buf_transfer);
575 }
576
577 static void
578 vl_compositor_set_buffer_layer(struct pipe_video_compositor *compositor,
579 unsigned layer,
580 struct pipe_video_buffer *buffer,
581 struct pipe_video_rect *src_rect,
582 struct pipe_video_rect *dst_rect)
583 {
584 struct vl_compositor *c = (struct vl_compositor *)compositor;
585 struct pipe_sampler_view **sampler_views;
586 unsigned i;
587
588 assert(compositor && buffer);
589
590 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
591
592 c->used_layers |= 1 << layer;
593 c->layers[layer].clearing = true;
594 c->layers[layer].fs = c->fs_video_buffer;
595
596 sampler_views = buffer->get_sampler_view_components(buffer);
597 for (i = 0; i < 3; ++i) {
598 c->layers[layer].samplers[i] = c->sampler_linear;
599 pipe_sampler_view_reference(&c->layers[layer].sampler_views[i], sampler_views[i]);
600 }
601
602 calc_src_and_dst(&c->layers[layer], buffer->width, buffer->height,
603 src_rect ? *src_rect : default_rect(&c->layers[layer]),
604 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
605 }
606
607 static void
608 vl_compositor_set_palette_layer(struct pipe_video_compositor *compositor,
609 unsigned layer,
610 struct pipe_sampler_view *indexes,
611 struct pipe_sampler_view *palette,
612 struct pipe_video_rect *src_rect,
613 struct pipe_video_rect *dst_rect)
614 {
615 struct vl_compositor *c = (struct vl_compositor *)compositor;
616 assert(compositor && indexes && palette);
617
618 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
619
620 c->used_layers |= 1 << layer;
621 c->layers[layer].clearing = false;
622 c->layers[layer].fs = c->fs_palette;
623 c->layers[layer].samplers[0] = c->sampler_linear;
624 c->layers[layer].samplers[1] = c->sampler_nearest;
625 c->layers[layer].samplers[2] = NULL;
626 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], indexes);
627 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], palette);
628 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
629 calc_src_and_dst(&c->layers[layer], indexes->texture->width0, indexes->texture->height0,
630 src_rect ? *src_rect : default_rect(&c->layers[layer]),
631 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
632
633 }
634
635 static void
636 vl_compositor_set_rgba_layer(struct pipe_video_compositor *compositor,
637 unsigned layer,
638 struct pipe_sampler_view *rgba,
639 struct pipe_video_rect *src_rect,
640 struct pipe_video_rect *dst_rect)
641 {
642 struct vl_compositor *c = (struct vl_compositor *)compositor;
643 assert(compositor && rgba);
644
645 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
646
647 c->used_layers |= 1 << layer;
648 c->layers[layer].clearing = false;
649 c->layers[layer].fs = c->fs_rgba;
650 c->layers[layer].samplers[0] = c->sampler_linear;
651 c->layers[layer].samplers[1] = NULL;
652 c->layers[layer].samplers[2] = NULL;
653 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], rgba);
654 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], NULL);
655 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
656 calc_src_and_dst(&c->layers[layer], rgba->texture->width0, rgba->texture->height0,
657 src_rect ? *src_rect : default_rect(&c->layers[layer]),
658 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
659 }
660
661 static void
662 vl_compositor_render(struct pipe_video_compositor *compositor,
663 enum pipe_mpeg12_picture_type picture_type,
664 struct pipe_surface *dst_surface,
665 struct pipe_video_rect *dst_area,
666 struct pipe_fence_handle **fence)
667 {
668 struct vl_compositor *c = (struct vl_compositor *)compositor;
669 struct pipe_scissor_state scissor;
670
671 assert(compositor);
672 assert(dst_surface);
673
674 c->fb_state.width = dst_surface->width;
675 c->fb_state.height = dst_surface->height;
676 c->fb_state.cbufs[0] = dst_surface;
677
678 c->viewport.scale[0] = dst_surface->width;
679 c->viewport.scale[1] = dst_surface->height;
680
681 if (dst_area) {
682 scissor.minx = dst_area->x;
683 scissor.miny = dst_area->y;
684 scissor.maxx = dst_area->x + dst_area->w;
685 scissor.maxy = dst_area->y + dst_area->h;
686 } else {
687 scissor.minx = 0;
688 scissor.miny = 0;
689 scissor.maxx = dst_surface->width;
690 scissor.maxy = dst_surface->height;
691 }
692
693 gen_vertex_data(c);
694
695 if (c->dirty_tl.x < c->dirty_br.x || c->dirty_tl.y < c->dirty_br.y) {
696 util_clear_render_target(c->pipe, dst_surface, c->clear_color, 0, 0, dst_surface->width, dst_surface->height);
697 c->dirty_tl.x = c->dirty_tl.y = 1.0f;
698 c->dirty_br.x = c->dirty_br.y = 0.0f;
699 }
700
701 c->pipe->set_scissor_state(c->pipe, &scissor);
702 c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
703 c->pipe->set_viewport_state(c->pipe, &c->viewport);
704 c->pipe->bind_vs_state(c->pipe, c->vs);
705 c->pipe->set_vertex_buffers(c->pipe, 1, &c->vertex_buf);
706 c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
707 c->pipe->set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, c->csc_matrix);
708 c->pipe->bind_blend_state(c->pipe, c->blend);
709 c->pipe->bind_rasterizer_state(c->pipe, c->rast);
710
711 draw_layers(c);
712
713 c->pipe->flush(c->pipe, fence);
714 }
715
716 struct pipe_video_compositor *
717 vl_compositor_init(struct pipe_video_context *vpipe, struct pipe_context *pipe)
718 {
719 csc_matrix csc_matrix;
720 struct vl_compositor *compositor;
721
722 compositor = CALLOC_STRUCT(vl_compositor);
723
724 compositor->base.context = vpipe;
725 compositor->base.destroy = vl_compositor_destroy;
726 compositor->base.set_csc_matrix = vl_compositor_set_csc_matrix;
727 compositor->base.reset_dirty_area = vl_compositor_reset_dirty_area;
728 compositor->base.set_clear_color = vl_compositor_set_clear_color;
729 compositor->base.clear_layers = vl_compositor_clear_layers;
730 compositor->base.set_buffer_layer = vl_compositor_set_buffer_layer;
731 compositor->base.set_palette_layer = vl_compositor_set_palette_layer;
732 compositor->base.set_rgba_layer = vl_compositor_set_rgba_layer;
733 compositor->base.render_picture = vl_compositor_render;
734
735 compositor->pipe = pipe;
736
737 if (!init_pipe_state(compositor))
738 return false;
739
740 if (!init_shaders(compositor)) {
741 cleanup_pipe_state(compositor);
742 return false;
743 }
744 if (!init_buffers(compositor)) {
745 cleanup_shaders(compositor);
746 cleanup_pipe_state(compositor);
747 return false;
748 }
749
750 vl_compositor_clear_layers(&compositor->base);
751
752 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, csc_matrix);
753 vl_compositor_set_csc_matrix(&compositor->base, csc_matrix);
754
755 compositor->clear_color[0] = compositor->clear_color[1] = 0.0f;
756 compositor->clear_color[2] = compositor->clear_color[3] = 0.0f;
757 vl_compositor_reset_dirty_area(&compositor->base);
758
759 return &compositor->base;
760 }