[g3dvl] move compositor src and dst normalisation into layer setting
[mesa.git] / src / gallium / auxiliary / vl / vl_compositor.c
1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include <assert.h>
29
30 #include <pipe/p_context.h>
31
32 #include <util/u_memory.h>
33 #include <util/u_draw.h>
34
35 #include <tgsi/tgsi_ureg.h>
36
37 #include "vl_csc.h"
38 #include "vl_types.h"
39 #include "vl_compositor.h"
40
41 typedef float csc_matrix[16];
42
43 static void *
44 create_vert_shader(struct vl_compositor *c)
45 {
46 struct ureg_program *shader;
47 struct ureg_src vpos, vtex;
48 struct ureg_dst o_vpos, o_vtex;
49
50 shader = ureg_create(TGSI_PROCESSOR_VERTEX);
51 if (!shader)
52 return false;
53
54 vpos = ureg_DECL_vs_input(shader, 0);
55 vtex = ureg_DECL_vs_input(shader, 1);
56 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, 0);
57 o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, 1);
58
59 /*
60 * o_vpos = vpos
61 * o_vtex = vtex
62 */
63 ureg_MOV(shader, o_vpos, vpos);
64 ureg_MOV(shader, o_vtex, vtex);
65
66 ureg_END(shader);
67
68 return ureg_create_shader_and_destroy(shader, c->pipe);
69 }
70
71 static void *
72 create_frag_shader_video_buffer(struct vl_compositor *c)
73 {
74 struct ureg_program *shader;
75 struct ureg_src tc;
76 struct ureg_src csc[3];
77 struct ureg_src sampler[3];
78 struct ureg_dst texel;
79 struct ureg_dst fragment;
80 unsigned i;
81
82 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
83 if (!shader)
84 return false;
85
86 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
87 for (i = 0; i < 3; ++i) {
88 csc[i] = ureg_DECL_constant(shader, i);
89 sampler[i] = ureg_DECL_sampler(shader, i);
90 }
91 texel = ureg_DECL_temporary(shader);
92 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
93
94 /*
95 * texel.xyz = tex(tc, sampler[i])
96 * fragment = csc * texel
97 */
98 for (i = 0; i < 3; ++i)
99 ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D, tc, sampler[i]);
100
101 ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
102
103 for (i = 0; i < 3; ++i)
104 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
105
106 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
107
108 ureg_release_temporary(shader, texel);
109 ureg_END(shader);
110
111 return ureg_create_shader_and_destroy(shader, c->pipe);
112 }
113
114 static void *
115 create_frag_shader_palette(struct vl_compositor *c)
116 {
117 struct ureg_program *shader;
118 struct ureg_src csc[3];
119 struct ureg_src tc;
120 struct ureg_src sampler;
121 struct ureg_src palette;
122 struct ureg_dst texel;
123 struct ureg_dst fragment;
124 unsigned i;
125
126 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
127 if (!shader)
128 return false;
129
130 for (i = 0; i < 3; ++i)
131 csc[i] = ureg_DECL_constant(shader, i);
132
133 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
134 sampler = ureg_DECL_sampler(shader, 0);
135 palette = ureg_DECL_sampler(shader, 1);
136 texel = ureg_DECL_temporary(shader);
137 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
138
139 /*
140 * texel = tex(tc, sampler)
141 * fragment.xyz = tex(texel, palette) * csc
142 * fragment.a = texel.a
143 */
144 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
145 ureg_MUL(shader, ureg_writemask(texel, TGSI_WRITEMASK_X), ureg_src(texel), ureg_imm1f(shader, 15.0f / 16.0f));
146 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
147
148 ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
149
150 for (i = 0; i < 3; ++i)
151 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
152
153 ureg_release_temporary(shader, texel);
154 ureg_END(shader);
155
156 return ureg_create_shader_and_destroy(shader, c->pipe);
157 }
158
159 static void *
160 create_frag_shader_rgba(struct vl_compositor *c)
161 {
162 struct ureg_program *shader;
163 struct ureg_src tc;
164 struct ureg_src sampler;
165 struct ureg_dst fragment;
166
167 shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
168 if (!shader)
169 return false;
170
171 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
172 sampler = ureg_DECL_sampler(shader, 0);
173 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
174
175 /*
176 * fragment = tex(tc, sampler)
177 */
178 ureg_TEX(shader, fragment, TGSI_TEXTURE_2D, tc, sampler);
179 ureg_END(shader);
180
181 return ureg_create_shader_and_destroy(shader, c->pipe);
182 }
183
184 static bool
185 init_shaders(struct vl_compositor *c)
186 {
187 assert(c);
188
189 c->vs = create_vert_shader(c);
190 if (!c->vs) {
191 debug_printf("Unable to create vertex shader.\n");
192 return false;
193 }
194
195 c->fs_video_buffer = create_frag_shader_video_buffer(c);
196 if (!c->fs_video_buffer) {
197 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
198 return false;
199 }
200
201 c->fs_palette = create_frag_shader_palette(c);
202 if (!c->fs_palette) {
203 debug_printf("Unable to create Palette-to-RGB fragment shader.\n");
204 return false;
205 }
206
207 c->fs_rgba = create_frag_shader_rgba(c);
208 if (!c->fs_rgba) {
209 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
210 return false;
211 }
212
213 return true;
214 }
215
216 static void cleanup_shaders(struct vl_compositor *c)
217 {
218 assert(c);
219
220 c->pipe->delete_vs_state(c->pipe, c->vs);
221 c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
222 c->pipe->delete_fs_state(c->pipe, c->fs_palette);
223 c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
224 }
225
226 static bool
227 init_pipe_state(struct vl_compositor *c)
228 {
229 struct pipe_rasterizer_state rast;
230 struct pipe_sampler_state sampler;
231 struct pipe_blend_state blend;
232
233 assert(c);
234
235 c->fb_state.nr_cbufs = 1;
236 c->fb_state.zsbuf = NULL;
237
238 c->viewport.scale[2] = 1;
239 c->viewport.scale[3] = 1;
240 c->viewport.translate[0] = 0;
241 c->viewport.translate[1] = 0;
242 c->viewport.translate[2] = 0;
243 c->viewport.translate[3] = 0;
244
245 memset(&sampler, 0, sizeof(sampler));
246 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
247 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
248 sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
249 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
250 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
251 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
252 sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
253 sampler.compare_func = PIPE_FUNC_ALWAYS;
254 sampler.normalized_coords = 1;
255
256 c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
257
258 sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
259 sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
260 c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
261
262 memset(&blend, 0, sizeof blend);
263 blend.independent_blend_enable = 0;
264 blend.rt[0].blend_enable = 1;
265 blend.rt[0].rgb_func = PIPE_BLEND_ADD;
266 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
267 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
268 blend.rt[0].alpha_func = PIPE_BLEND_ADD;
269 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
270 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
271 blend.logicop_enable = 0;
272 blend.logicop_func = PIPE_LOGICOP_CLEAR;
273 blend.rt[0].colormask = PIPE_MASK_RGBA;
274 blend.dither = 0;
275 c->blend = c->pipe->create_blend_state(c->pipe, &blend);
276
277 memset(&rast, 0, sizeof rast);
278 rast.flatshade = 1;
279 rast.front_ccw = 1;
280 rast.cull_face = PIPE_FACE_NONE;
281 rast.fill_back = PIPE_POLYGON_MODE_FILL;
282 rast.fill_front = PIPE_POLYGON_MODE_FILL;
283 rast.scissor = 1;
284 rast.line_width = 1;
285 rast.point_size_per_vertex = 1;
286 rast.offset_units = 1;
287 rast.offset_scale = 1;
288 rast.gl_rasterization_rules = 1;
289
290 c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
291
292 return true;
293 }
294
295 static void cleanup_pipe_state(struct vl_compositor *c)
296 {
297 assert(c);
298
299 c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
300 c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
301 c->pipe->delete_blend_state(c->pipe, c->blend);
302 c->pipe->delete_rasterizer_state(c->pipe, c->rast);
303 }
304
305 static bool
306 init_buffers(struct vl_compositor *c)
307 {
308 struct pipe_vertex_element vertex_elems[2];
309
310 assert(c);
311
312 /*
313 * Create our vertex buffer and vertex buffer elements
314 */
315 c->vertex_buf.stride = sizeof(struct vertex4f);
316 c->vertex_buf.buffer_offset = 0;
317 c->vertex_buf.buffer = pipe_buffer_create
318 (
319 c->pipe->screen,
320 PIPE_BIND_VERTEX_BUFFER,
321 PIPE_USAGE_STREAM,
322 sizeof(struct vertex4f) * (VL_COMPOSITOR_MAX_LAYERS + 1) * 4
323 );
324
325 vertex_elems[0].src_offset = 0;
326 vertex_elems[0].instance_divisor = 0;
327 vertex_elems[0].vertex_buffer_index = 0;
328 vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
329 vertex_elems[1].src_offset = sizeof(struct vertex2f);
330 vertex_elems[1].instance_divisor = 0;
331 vertex_elems[1].vertex_buffer_index = 0;
332 vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;
333 c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 2, vertex_elems);
334
335 /*
336 * Create our fragment shader's constant buffer
337 * Const buffer contains the color conversion matrix and bias vectors
338 */
339 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
340 c->csc_matrix = pipe_buffer_create
341 (
342 c->pipe->screen,
343 PIPE_BIND_CONSTANT_BUFFER,
344 PIPE_USAGE_STATIC,
345 sizeof(csc_matrix)
346 );
347
348 return true;
349 }
350
351 static void
352 cleanup_buffers(struct vl_compositor *c)
353 {
354 assert(c);
355
356 c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
357 pipe_resource_reference(&c->vertex_buf.buffer, NULL);
358 pipe_resource_reference(&c->csc_matrix, NULL);
359 }
360
361 static inline struct pipe_video_rect
362 default_rect(struct vl_compositor_layer *layer)
363 {
364 struct pipe_resource *res = layer->sampler_views[0]->texture;
365 struct pipe_video_rect rect = { 0, 0, res->width0, res->height0 };
366 return rect;
367 }
368
369 static inline struct vertex2f
370 calc_topleft(struct vertex2f inv_size, struct pipe_video_rect rect)
371 {
372 struct vertex2f res = { rect.x * inv_size.x, rect.y * inv_size.y };
373 return res;
374 }
375
376 static inline struct vertex2f
377 calc_bottomright(struct vertex2f inv_size, struct pipe_video_rect rect)
378 {
379 struct vertex2f res = { (rect.x + rect.w) * inv_size.x, (rect.y + rect.h) * inv_size.y };
380 return res;
381 }
382
383 static inline void
384 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
385 struct pipe_video_rect src, struct pipe_video_rect dst)
386 {
387 struct vertex2f inv_size = { 1.0f / width, 1.0f / height };
388
389 layer->src.tl = calc_topleft(inv_size, src);
390 layer->src.br = calc_bottomright(inv_size, src);
391 layer->dst.tl = calc_topleft(inv_size, dst);
392 layer->dst.br = calc_bottomright(inv_size, dst);
393 }
394
395 static void
396 gen_rect_verts(struct vertex4f *vb, struct vl_compositor_layer *layer)
397 {
398 assert(vb && layer);
399
400 vb[0].x = layer->dst.tl.x;
401 vb[0].y = layer->dst.tl.y;
402 vb[0].z = layer->src.tl.x;
403 vb[0].w = layer->src.tl.y;
404
405 vb[1].x = layer->dst.br.x;
406 vb[1].y = layer->dst.tl.y;
407 vb[1].z = layer->src.br.x;
408 vb[1].w = layer->src.tl.y;
409
410 vb[2].x = layer->dst.br.x;
411 vb[2].y = layer->dst.br.y;
412 vb[2].z = layer->src.br.x;
413 vb[2].w = layer->src.br.y;
414
415 vb[3].x = layer->dst.tl.x;
416 vb[3].y = layer->dst.br.y;
417 vb[3].z = layer->src.tl.x;
418 vb[3].w = layer->src.br.y;
419 }
420
421 static void
422 gen_vertex_data(struct vl_compositor *c)
423 {
424 struct vertex4f *vb;
425 struct pipe_transfer *buf_transfer;
426 unsigned i;
427
428 assert(c);
429
430 vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
431 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD | PIPE_TRANSFER_DONTBLOCK,
432 &buf_transfer);
433
434 if (!vb)
435 return;
436
437 for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
438 if (c->used_layers & (1 << i)) {
439 gen_rect_verts(vb, &c->layers[i]);
440 vb += 4;
441 }
442 }
443
444 pipe_buffer_unmap(c->pipe, buf_transfer);
445 }
446
447 static void
448 draw_layers(struct vl_compositor *c)
449 {
450 unsigned vb_index, i;
451
452 assert(c);
453
454 for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
455 if (c->used_layers & (1 << i)) {
456 struct pipe_sampler_view **samplers = &c->layers[i].sampler_views[0];
457 unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
458
459 c->pipe->bind_fs_state(c->pipe, c->layers[i].fs);
460 c->pipe->bind_fragment_sampler_states(c->pipe, num_sampler_views, c->layers[i].samplers);
461 c->pipe->set_fragment_sampler_views(c->pipe, num_sampler_views, samplers);
462 util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
463 vb_index++;
464 }
465 }
466 }
467
468 static void
469 vl_compositor_clear_layers(struct pipe_video_compositor *compositor)
470 {
471 struct vl_compositor *c = (struct vl_compositor *)compositor;
472 unsigned i, j;
473
474 assert(compositor);
475
476 c->used_layers = 0;
477 for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
478 c->layers[i].fs = NULL;
479 for ( j = 0; j < 3; j++)
480 pipe_sampler_view_reference(&c->layers[i].sampler_views[j], NULL);
481 }
482 }
483
484 static void
485 vl_compositor_destroy(struct pipe_video_compositor *compositor)
486 {
487 struct vl_compositor *c = (struct vl_compositor *)compositor;
488 assert(compositor);
489
490 vl_compositor_clear_layers(compositor);
491
492 cleanup_buffers(c);
493 cleanup_shaders(c);
494 cleanup_pipe_state(c);
495
496 FREE(compositor);
497 }
498
499 static void
500 vl_compositor_set_csc_matrix(struct pipe_video_compositor *compositor, const float matrix[16])
501 {
502 struct vl_compositor *c = (struct vl_compositor *)compositor;
503 struct pipe_transfer *buf_transfer;
504
505 assert(compositor);
506
507 memcpy
508 (
509 pipe_buffer_map(c->pipe, c->csc_matrix,
510 PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
511 &buf_transfer),
512 matrix,
513 sizeof(csc_matrix)
514 );
515
516 pipe_buffer_unmap(c->pipe, buf_transfer);
517 }
518
519 static void
520 vl_compositor_set_buffer_layer(struct pipe_video_compositor *compositor,
521 unsigned layer,
522 struct pipe_video_buffer *buffer,
523 struct pipe_video_rect *src_rect,
524 struct pipe_video_rect *dst_rect)
525 {
526 struct vl_compositor *c = (struct vl_compositor *)compositor;
527 struct pipe_sampler_view **sampler_views;
528 unsigned i;
529
530 assert(compositor && buffer);
531
532 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
533
534 c->used_layers |= 1 << layer;
535 c->layers[layer].fs = c->fs_video_buffer;
536
537 sampler_views = buffer->get_sampler_view_components(buffer);
538 for (i = 0; i < 3; ++i) {
539 c->layers[layer].samplers[i] = c->sampler_linear;
540 pipe_sampler_view_reference(&c->layers[layer].sampler_views[i], sampler_views[i]);
541 }
542
543 calc_src_and_dst(&c->layers[layer], buffer->width, buffer->height,
544 src_rect ? *src_rect : default_rect(&c->layers[layer]),
545 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
546 }
547
548 static void
549 vl_compositor_set_palette_layer(struct pipe_video_compositor *compositor,
550 unsigned layer,
551 struct pipe_sampler_view *indexes,
552 struct pipe_sampler_view *palette,
553 struct pipe_video_rect *src_rect,
554 struct pipe_video_rect *dst_rect)
555 {
556 struct vl_compositor *c = (struct vl_compositor *)compositor;
557 assert(compositor && indexes && palette);
558
559 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
560
561 c->used_layers |= 1 << layer;
562 c->layers[layer].fs = c->fs_palette;
563 c->layers[layer].samplers[0] = c->sampler_linear;
564 c->layers[layer].samplers[1] = c->sampler_nearest;
565 c->layers[layer].samplers[2] = NULL;
566 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], indexes);
567 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], palette);
568 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
569 calc_src_and_dst(&c->layers[layer], indexes->texture->width0, indexes->texture->height0,
570 src_rect ? *src_rect : default_rect(&c->layers[layer]),
571 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
572
573 }
574
575 static void
576 vl_compositor_set_rgba_layer(struct pipe_video_compositor *compositor,
577 unsigned layer,
578 struct pipe_sampler_view *rgba,
579 struct pipe_video_rect *src_rect,
580 struct pipe_video_rect *dst_rect)
581 {
582 struct vl_compositor *c = (struct vl_compositor *)compositor;
583 assert(compositor && rgba);
584
585 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
586
587 c->used_layers |= 1 << layer;
588 c->layers[layer].fs = c->fs_rgba;
589 c->layers[layer].samplers[0] = c->sampler_linear;
590 c->layers[layer].samplers[1] = NULL;
591 c->layers[layer].samplers[2] = NULL;
592 pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], rgba);
593 pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], NULL);
594 pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
595 calc_src_and_dst(&c->layers[layer], rgba->texture->width0, rgba->texture->height0,
596 src_rect ? *src_rect : default_rect(&c->layers[layer]),
597 dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
598 }
599
600 static void
601 vl_compositor_render(struct pipe_video_compositor *compositor,
602 enum pipe_mpeg12_picture_type picture_type,
603 struct pipe_surface *dst_surface,
604 struct pipe_video_rect *dst_area,
605 struct pipe_fence_handle **fence)
606 {
607 struct vl_compositor *c = (struct vl_compositor *)compositor;
608 struct pipe_scissor_state scissor;
609
610 assert(compositor);
611 assert(dst_surface);
612
613 c->fb_state.width = dst_surface->width;
614 c->fb_state.height = dst_surface->height;
615 c->fb_state.cbufs[0] = dst_surface;
616
617 c->viewport.scale[0] = dst_surface->width;
618 c->viewport.scale[1] = dst_surface->height;
619
620 if (dst_area) {
621 scissor.minx = dst_area->x;
622 scissor.miny = dst_area->y;
623 scissor.maxx = dst_area->x + dst_area->w;
624 scissor.maxy = dst_area->y + dst_area->h;
625 } else {
626 scissor.minx = 0;
627 scissor.miny = 0;
628 scissor.maxx = dst_surface->width;
629 scissor.maxy = dst_surface->height;
630 }
631
632 gen_vertex_data(c);
633
634 c->pipe->set_scissor_state(c->pipe, &scissor);
635 c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
636 c->pipe->set_viewport_state(c->pipe, &c->viewport);
637 c->pipe->bind_vs_state(c->pipe, c->vs);
638 c->pipe->set_vertex_buffers(c->pipe, 1, &c->vertex_buf);
639 c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
640 c->pipe->set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, c->csc_matrix);
641 c->pipe->bind_blend_state(c->pipe, c->blend);
642 c->pipe->bind_rasterizer_state(c->pipe, c->rast);
643
644 draw_layers(c);
645
646 c->pipe->flush(c->pipe, fence);
647 }
648
649 struct pipe_video_compositor *
650 vl_compositor_init(struct pipe_video_context *vpipe, struct pipe_context *pipe)
651 {
652 csc_matrix csc_matrix;
653 struct vl_compositor *compositor;
654
655 compositor = CALLOC_STRUCT(vl_compositor);
656
657 compositor->base.context = vpipe;
658 compositor->base.destroy = vl_compositor_destroy;
659 compositor->base.set_csc_matrix = vl_compositor_set_csc_matrix;
660 compositor->base.clear_layers = vl_compositor_clear_layers;
661 compositor->base.set_buffer_layer = vl_compositor_set_buffer_layer;
662 compositor->base.set_palette_layer = vl_compositor_set_palette_layer;
663 compositor->base.set_rgba_layer = vl_compositor_set_rgba_layer;
664 compositor->base.render_picture = vl_compositor_render;
665
666 compositor->pipe = pipe;
667
668 if (!init_pipe_state(compositor))
669 return false;
670
671 if (!init_shaders(compositor)) {
672 cleanup_pipe_state(compositor);
673 return false;
674 }
675 if (!init_buffers(compositor)) {
676 cleanup_shaders(compositor);
677 cleanup_pipe_state(compositor);
678 return false;
679 }
680
681 vl_compositor_clear_layers(&compositor->base);
682
683 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, csc_matrix);
684 vl_compositor_set_csc_matrix(&compositor->base, csc_matrix);
685
686 return &compositor->base;
687 }