1 /**************************************************************************
3 * Copyright 2009 Younes Manton.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
30 #include "pipe/p_compiler.h"
31 #include "pipe/p_context.h"
33 #include "util/u_memory.h"
34 #include "util/u_draw.h"
35 #include "util/u_surface.h"
36 #include "util/u_upload_mgr.h"
37 #include "util/u_sampler.h"
39 #include "tgsi/tgsi_ureg.h"
43 #include "vl_compositor.h"
55 create_vert_shader(struct vl_compositor
*c
)
57 struct ureg_program
*shader
;
58 struct ureg_src vpos
, vtex
, color
;
60 struct ureg_dst o_vpos
, o_vtex
, o_color
;
61 struct ureg_dst o_vtop
, o_vbottom
;
63 shader
= ureg_create(PIPE_SHADER_VERTEX
);
67 vpos
= ureg_DECL_vs_input(shader
, 0);
68 vtex
= ureg_DECL_vs_input(shader
, 1);
69 color
= ureg_DECL_vs_input(shader
, 2);
70 tmp
= ureg_DECL_temporary(shader
);
71 o_vpos
= ureg_DECL_output(shader
, TGSI_SEMANTIC_POSITION
, VS_O_VPOS
);
72 o_color
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, VS_O_COLOR
);
73 o_vtex
= ureg_DECL_output(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTEX
);
74 o_vtop
= ureg_DECL_output(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTOP
);
75 o_vbottom
= ureg_DECL_output(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VBOTTOM
);
82 ureg_MOV(shader
, o_vpos
, vpos
);
83 ureg_MOV(shader
, o_vtex
, vtex
);
84 ureg_MOV(shader
, o_color
, color
);
91 * o_vtop.y = vtex.y * tmp.x + 0.25f
92 * o_vtop.z = vtex.y * tmp.y + 0.25f
93 * o_vtop.w = 1 / tmp.x
95 * o_vbottom.x = vtex.x
96 * o_vbottom.y = vtex.y * tmp.x - 0.25f
97 * o_vbottom.z = vtex.y * tmp.y - 0.25f
98 * o_vbottom.w = 1 / tmp.y
100 ureg_MUL(shader
, ureg_writemask(tmp
, TGSI_WRITEMASK_X
),
101 ureg_scalar(vtex
, TGSI_SWIZZLE_W
), ureg_imm1f(shader
, 0.5f
));
102 ureg_MUL(shader
, ureg_writemask(tmp
, TGSI_WRITEMASK_Y
),
103 ureg_scalar(vtex
, TGSI_SWIZZLE_W
), ureg_imm1f(shader
, 0.25f
));
105 ureg_MOV(shader
, ureg_writemask(o_vtop
, TGSI_WRITEMASK_X
), vtex
);
106 ureg_MAD(shader
, ureg_writemask(o_vtop
, TGSI_WRITEMASK_Y
), ureg_scalar(vtex
, TGSI_SWIZZLE_Y
),
107 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_X
), ureg_imm1f(shader
, 0.25f
));
108 ureg_MAD(shader
, ureg_writemask(o_vtop
, TGSI_WRITEMASK_Z
), ureg_scalar(vtex
, TGSI_SWIZZLE_Y
),
109 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_Y
), ureg_imm1f(shader
, 0.25f
));
110 ureg_RCP(shader
, ureg_writemask(o_vtop
, TGSI_WRITEMASK_W
),
111 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_X
));
113 ureg_MOV(shader
, ureg_writemask(o_vbottom
, TGSI_WRITEMASK_X
), vtex
);
114 ureg_MAD(shader
, ureg_writemask(o_vbottom
, TGSI_WRITEMASK_Y
), ureg_scalar(vtex
, TGSI_SWIZZLE_Y
),
115 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_X
), ureg_imm1f(shader
, -0.25f
));
116 ureg_MAD(shader
, ureg_writemask(o_vbottom
, TGSI_WRITEMASK_Z
), ureg_scalar(vtex
, TGSI_SWIZZLE_Y
),
117 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_Y
), ureg_imm1f(shader
, -0.25f
));
118 ureg_RCP(shader
, ureg_writemask(o_vbottom
, TGSI_WRITEMASK_W
),
119 ureg_scalar(ureg_src(tmp
), TGSI_SWIZZLE_Y
));
123 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
127 create_frag_shader_weave(struct ureg_program
*shader
, struct ureg_dst fragment
)
129 struct ureg_src i_tc
[2];
130 struct ureg_src sampler
[3];
131 struct ureg_dst t_tc
[2];
132 struct ureg_dst t_texel
[2];
135 i_tc
[0] = ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTOP
, TGSI_INTERPOLATE_LINEAR
);
136 i_tc
[1] = ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VBOTTOM
, TGSI_INTERPOLATE_LINEAR
);
138 for (i
= 0; i
< 3; ++i
) {
139 sampler
[i
] = ureg_DECL_sampler(shader
, i
);
140 ureg_DECL_sampler_view(shader
, i
, TGSI_TEXTURE_2D_ARRAY
,
141 TGSI_RETURN_TYPE_FLOAT
,
142 TGSI_RETURN_TYPE_FLOAT
,
143 TGSI_RETURN_TYPE_FLOAT
,
144 TGSI_RETURN_TYPE_FLOAT
);
147 for (i
= 0; i
< 2; ++i
) {
148 t_tc
[i
] = ureg_DECL_temporary(shader
);
149 t_texel
[i
] = ureg_DECL_temporary(shader
);
152 /* calculate the texture offsets
154 * t_tc.y = (round(i_tc.y - 0.5) + 0.5) / height * 2
156 for (i
= 0; i
< 2; ++i
) {
157 ureg_MOV(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_X
), i_tc
[i
]);
158 ureg_ADD(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_YZ
),
159 i_tc
[i
], ureg_imm1f(shader
, -0.5f
));
160 ureg_ROUND(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_YZ
), ureg_src(t_tc
[i
]));
161 ureg_MOV(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_W
),
162 ureg_imm1f(shader
, i
? 1.0f
: 0.0f
));
163 ureg_ADD(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_YZ
),
164 ureg_src(t_tc
[i
]), ureg_imm1f(shader
, 0.5f
));
165 ureg_MUL(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_Y
),
166 ureg_src(t_tc
[i
]), ureg_scalar(i_tc
[0], TGSI_SWIZZLE_W
));
167 ureg_MUL(shader
, ureg_writemask(t_tc
[i
], TGSI_WRITEMASK_Z
),
168 ureg_src(t_tc
[i
]), ureg_scalar(i_tc
[1], TGSI_SWIZZLE_W
));
172 * texel[0..1].x = tex(t_tc[0..1][0])
173 * texel[0..1].y = tex(t_tc[0..1][1])
174 * texel[0..1].z = tex(t_tc[0..1][2])
176 for (i
= 0; i
< 2; ++i
)
177 for (j
= 0; j
< 3; ++j
) {
178 struct ureg_src src
= ureg_swizzle(ureg_src(t_tc
[i
]),
179 TGSI_SWIZZLE_X
, j
? TGSI_SWIZZLE_Z
: TGSI_SWIZZLE_Y
, TGSI_SWIZZLE_W
, TGSI_SWIZZLE_W
);
181 ureg_TEX(shader
, ureg_writemask(t_texel
[i
], TGSI_WRITEMASK_X
<< j
),
182 TGSI_TEXTURE_2D_ARRAY
, src
, sampler
[j
]);
185 /* calculate linear interpolation factor
186 * factor = |round(i_tc.y) - i_tc.y| * 2
188 ureg_ROUND(shader
, ureg_writemask(t_tc
[0], TGSI_WRITEMASK_YZ
), i_tc
[0]);
189 ureg_ADD(shader
, ureg_writemask(t_tc
[0], TGSI_WRITEMASK_YZ
),
190 ureg_src(t_tc
[0]), ureg_negate(i_tc
[0]));
191 ureg_MUL(shader
, ureg_writemask(t_tc
[0], TGSI_WRITEMASK_YZ
),
192 ureg_abs(ureg_src(t_tc
[0])), ureg_imm1f(shader
, 2.0f
));
193 ureg_LRP(shader
, fragment
, ureg_swizzle(ureg_src(t_tc
[0]),
194 TGSI_SWIZZLE_Y
, TGSI_SWIZZLE_Z
, TGSI_SWIZZLE_Z
, TGSI_SWIZZLE_Z
),
195 ureg_src(t_texel
[0]), ureg_src(t_texel
[1]));
197 for (i
= 0; i
< 2; ++i
) {
198 ureg_release_temporary(shader
, t_texel
[i
]);
199 ureg_release_temporary(shader
, t_tc
[i
]);
204 create_frag_shader_csc(struct ureg_program
*shader
, struct ureg_dst texel
,
205 struct ureg_dst fragment
)
207 struct ureg_src csc
[3];
208 struct ureg_src lumakey
;
209 struct ureg_dst temp
[2];
212 for (i
= 0; i
< 3; ++i
)
213 csc
[i
] = ureg_DECL_constant(shader
, i
);
215 lumakey
= ureg_DECL_constant(shader
, 3);
217 for (i
= 0; i
< 2; ++i
)
218 temp
[i
] = ureg_DECL_temporary(shader
);
220 ureg_MOV(shader
, ureg_writemask(texel
, TGSI_WRITEMASK_W
),
221 ureg_imm1f(shader
, 1.0f
));
223 for (i
= 0; i
< 3; ++i
)
224 ureg_DP4(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_X
<< i
), csc
[i
],
227 ureg_MOV(shader
, ureg_writemask(temp
[0], TGSI_WRITEMASK_W
),
228 ureg_scalar(ureg_src(texel
), TGSI_SWIZZLE_Z
));
229 ureg_SLE(shader
, ureg_writemask(temp
[1],TGSI_WRITEMASK_W
),
230 ureg_src(temp
[0]), ureg_scalar(lumakey
, TGSI_SWIZZLE_X
));
231 ureg_SGT(shader
, ureg_writemask(temp
[0],TGSI_WRITEMASK_W
),
232 ureg_src(temp
[0]), ureg_scalar(lumakey
, TGSI_SWIZZLE_Y
));
233 ureg_MAX(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_W
),
234 ureg_src(temp
[0]), ureg_src(temp
[1]));
236 for (i
= 0; i
< 2; ++i
)
237 ureg_release_temporary(shader
, temp
[i
]);
241 create_frag_shader_yuv(struct ureg_program
*shader
, struct ureg_dst texel
)
244 struct ureg_src sampler
[3];
247 tc
= ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTEX
, TGSI_INTERPOLATE_LINEAR
);
248 for (i
= 0; i
< 3; ++i
) {
249 sampler
[i
] = ureg_DECL_sampler(shader
, i
);
250 ureg_DECL_sampler_view(shader
, i
, TGSI_TEXTURE_2D_ARRAY
,
251 TGSI_RETURN_TYPE_FLOAT
,
252 TGSI_RETURN_TYPE_FLOAT
,
253 TGSI_RETURN_TYPE_FLOAT
,
254 TGSI_RETURN_TYPE_FLOAT
);
258 * texel.xyz = tex(tc, sampler[i])
260 for (i
= 0; i
< 3; ++i
)
261 ureg_TEX(shader
, ureg_writemask(texel
, TGSI_WRITEMASK_X
<< i
), TGSI_TEXTURE_2D_ARRAY
, tc
, sampler
[i
]);
265 create_frag_shader_video_buffer(struct vl_compositor
*c
)
267 struct ureg_program
*shader
;
268 struct ureg_dst texel
;
269 struct ureg_dst fragment
;
271 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
275 texel
= ureg_DECL_temporary(shader
);
276 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
278 create_frag_shader_yuv(shader
, texel
);
279 create_frag_shader_csc(shader
, texel
, fragment
);
281 ureg_release_temporary(shader
, texel
);
284 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
288 create_frag_shader_weave_rgb(struct vl_compositor
*c
)
290 struct ureg_program
*shader
;
291 struct ureg_dst texel
, fragment
;
293 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
297 texel
= ureg_DECL_temporary(shader
);
298 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
300 create_frag_shader_weave(shader
, texel
);
301 create_frag_shader_csc(shader
, texel
, fragment
);
303 ureg_release_temporary(shader
, texel
);
307 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
311 create_frag_shader_deint_yuv(struct vl_compositor
*c
, bool y
, bool w
)
313 struct ureg_program
*shader
;
314 struct ureg_dst texel
, fragment
;
316 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
320 texel
= ureg_DECL_temporary(shader
);
321 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
324 create_frag_shader_weave(shader
, texel
);
326 create_frag_shader_yuv(shader
, texel
);
329 ureg_MOV(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_X
), ureg_src(texel
));
331 ureg_MOV(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_XY
),
332 ureg_swizzle(ureg_src(texel
), TGSI_SWIZZLE_Y
,
333 TGSI_SWIZZLE_Z
, TGSI_SWIZZLE_W
, TGSI_SWIZZLE_W
));
335 ureg_release_temporary(shader
, texel
);
339 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
343 create_frag_shader_palette(struct vl_compositor
*c
, bool include_cc
)
345 struct ureg_program
*shader
;
346 struct ureg_src csc
[3];
348 struct ureg_src sampler
;
349 struct ureg_src palette
;
350 struct ureg_dst texel
;
351 struct ureg_dst fragment
;
354 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
358 for (i
= 0; include_cc
&& i
< 3; ++i
)
359 csc
[i
] = ureg_DECL_constant(shader
, i
);
361 tc
= ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTEX
, TGSI_INTERPOLATE_LINEAR
);
362 sampler
= ureg_DECL_sampler(shader
, 0);
363 ureg_DECL_sampler_view(shader
, 0, TGSI_TEXTURE_2D
,
364 TGSI_RETURN_TYPE_FLOAT
,
365 TGSI_RETURN_TYPE_FLOAT
,
366 TGSI_RETURN_TYPE_FLOAT
,
367 TGSI_RETURN_TYPE_FLOAT
);
368 palette
= ureg_DECL_sampler(shader
, 1);
369 ureg_DECL_sampler_view(shader
, 1, TGSI_TEXTURE_1D
,
370 TGSI_RETURN_TYPE_FLOAT
,
371 TGSI_RETURN_TYPE_FLOAT
,
372 TGSI_RETURN_TYPE_FLOAT
,
373 TGSI_RETURN_TYPE_FLOAT
);
375 texel
= ureg_DECL_temporary(shader
);
376 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
379 * texel = tex(tc, sampler)
380 * fragment.xyz = tex(texel, palette) * csc
381 * fragment.a = texel.a
383 ureg_TEX(shader
, texel
, TGSI_TEXTURE_2D
, tc
, sampler
);
384 ureg_MOV(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_W
), ureg_src(texel
));
387 ureg_TEX(shader
, texel
, TGSI_TEXTURE_1D
, ureg_src(texel
), palette
);
388 for (i
= 0; i
< 3; ++i
)
389 ureg_DP4(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_X
<< i
), csc
[i
], ureg_src(texel
));
391 ureg_TEX(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_XYZ
),
392 TGSI_TEXTURE_1D
, ureg_src(texel
), palette
);
395 ureg_release_temporary(shader
, texel
);
398 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
402 create_frag_shader_rgba(struct vl_compositor
*c
)
404 struct ureg_program
*shader
;
405 struct ureg_src tc
, color
, sampler
;
406 struct ureg_dst texel
, fragment
;
408 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
412 tc
= ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTEX
, TGSI_INTERPOLATE_LINEAR
);
413 color
= ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_COLOR
, VS_O_COLOR
, TGSI_INTERPOLATE_LINEAR
);
414 sampler
= ureg_DECL_sampler(shader
, 0);
415 ureg_DECL_sampler_view(shader
, 0, TGSI_TEXTURE_2D
,
416 TGSI_RETURN_TYPE_FLOAT
,
417 TGSI_RETURN_TYPE_FLOAT
,
418 TGSI_RETURN_TYPE_FLOAT
,
419 TGSI_RETURN_TYPE_FLOAT
);
420 texel
= ureg_DECL_temporary(shader
);
421 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
424 * fragment = tex(tc, sampler)
426 ureg_TEX(shader
, texel
, TGSI_TEXTURE_2D
, tc
, sampler
);
427 ureg_MUL(shader
, fragment
, ureg_src(texel
), color
);
430 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
434 create_frag_shader_rgb_yuv(struct vl_compositor
*c
, bool y
)
436 struct ureg_program
*shader
;
437 struct ureg_src tc
, sampler
;
438 struct ureg_dst texel
, fragment
;
440 struct ureg_src csc
[3];
443 shader
= ureg_create(PIPE_SHADER_FRAGMENT
);
447 for (i
= 0; i
< 3; ++i
)
448 csc
[i
] = ureg_DECL_constant(shader
, i
);
450 sampler
= ureg_DECL_sampler(shader
, 0);
451 tc
= ureg_DECL_fs_input(shader
, TGSI_SEMANTIC_GENERIC
, VS_O_VTEX
, TGSI_INTERPOLATE_LINEAR
);
452 texel
= ureg_DECL_temporary(shader
);
453 fragment
= ureg_DECL_output(shader
, TGSI_SEMANTIC_COLOR
, 0);
455 ureg_TEX(shader
, texel
, TGSI_TEXTURE_2D
, tc
, sampler
);
458 ureg_DP4(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_X
), csc
[0], ureg_src(texel
));
460 for (i
= 0; i
< 2; ++i
)
461 ureg_DP4(shader
, ureg_writemask(fragment
, TGSI_WRITEMASK_X
<< i
), csc
[i
+ 1], ureg_src(texel
));
464 ureg_release_temporary(shader
, texel
);
467 return ureg_create_shader_and_destroy(shader
, c
->pipe
);
471 init_shaders(struct vl_compositor
*c
)
475 c
->vs
= create_vert_shader(c
);
477 debug_printf("Unable to create vertex shader.\n");
481 c
->fs_video_buffer
= create_frag_shader_video_buffer(c
);
482 if (!c
->fs_video_buffer
) {
483 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
487 c
->fs_weave_rgb
= create_frag_shader_weave_rgb(c
);
488 if (!c
->fs_weave_rgb
) {
489 debug_printf("Unable to create YCbCr-to-RGB weave fragment shader.\n");
493 c
->fs_yuv
.weave
.y
= create_frag_shader_deint_yuv(c
, true, true);
494 c
->fs_yuv
.weave
.uv
= create_frag_shader_deint_yuv(c
, false, true);
495 c
->fs_yuv
.bob
.y
= create_frag_shader_deint_yuv(c
, true, false);
496 c
->fs_yuv
.bob
.uv
= create_frag_shader_deint_yuv(c
, false, false);
497 if (!c
->fs_yuv
.weave
.y
|| !c
->fs_yuv
.weave
.uv
||
498 !c
->fs_yuv
.bob
.y
|| !c
->fs_yuv
.bob
.uv
) {
499 debug_printf("Unable to create YCbCr i-to-YCbCr p deint fragment shader.\n");
503 c
->fs_palette
.yuv
= create_frag_shader_palette(c
, true);
504 if (!c
->fs_palette
.yuv
) {
505 debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
509 c
->fs_palette
.rgb
= create_frag_shader_palette(c
, false);
510 if (!c
->fs_palette
.rgb
) {
511 debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
515 c
->fs_rgba
= create_frag_shader_rgba(c
);
517 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
521 c
->fs_rgb_yuv
.y
= create_frag_shader_rgb_yuv(c
, true);
522 c
->fs_rgb_yuv
.uv
= create_frag_shader_rgb_yuv(c
, false);
523 if (!c
->fs_rgb_yuv
.y
|| !c
->fs_rgb_yuv
.uv
) {
524 debug_printf("Unable to create RGB-to-YUV fragment shader.\n");
531 static void cleanup_shaders(struct vl_compositor
*c
)
535 c
->pipe
->delete_vs_state(c
->pipe
, c
->vs
);
536 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_video_buffer
);
537 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_weave_rgb
);
538 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_yuv
.weave
.y
);
539 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_yuv
.weave
.uv
);
540 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_yuv
.bob
.y
);
541 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_yuv
.bob
.uv
);
542 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_palette
.yuv
);
543 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_palette
.rgb
);
544 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_rgba
);
545 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_rgb_yuv
.y
);
546 c
->pipe
->delete_fs_state(c
->pipe
, c
->fs_rgb_yuv
.uv
);
550 init_pipe_state(struct vl_compositor
*c
)
552 struct pipe_rasterizer_state rast
;
553 struct pipe_sampler_state sampler
;
554 struct pipe_blend_state blend
;
555 struct pipe_depth_stencil_alpha_state dsa
;
560 c
->fb_state
.nr_cbufs
= 1;
561 c
->fb_state
.zsbuf
= NULL
;
563 memset(&sampler
, 0, sizeof(sampler
));
564 sampler
.wrap_s
= PIPE_TEX_WRAP_CLAMP_TO_EDGE
;
565 sampler
.wrap_t
= PIPE_TEX_WRAP_CLAMP_TO_EDGE
;
566 sampler
.wrap_r
= PIPE_TEX_WRAP_REPEAT
;
567 sampler
.min_img_filter
= PIPE_TEX_FILTER_LINEAR
;
568 sampler
.min_mip_filter
= PIPE_TEX_MIPFILTER_NONE
;
569 sampler
.mag_img_filter
= PIPE_TEX_FILTER_LINEAR
;
570 sampler
.compare_mode
= PIPE_TEX_COMPARE_NONE
;
571 sampler
.compare_func
= PIPE_FUNC_ALWAYS
;
572 sampler
.normalized_coords
= 1;
574 c
->sampler_linear
= c
->pipe
->create_sampler_state(c
->pipe
, &sampler
);
576 sampler
.min_img_filter
= PIPE_TEX_FILTER_NEAREST
;
577 sampler
.mag_img_filter
= PIPE_TEX_FILTER_NEAREST
;
578 c
->sampler_nearest
= c
->pipe
->create_sampler_state(c
->pipe
, &sampler
);
580 memset(&blend
, 0, sizeof blend
);
581 blend
.independent_blend_enable
= 0;
582 blend
.rt
[0].blend_enable
= 0;
583 blend
.logicop_enable
= 0;
584 blend
.logicop_func
= PIPE_LOGICOP_CLEAR
;
585 blend
.rt
[0].colormask
= PIPE_MASK_RGBA
;
587 c
->blend_clear
= c
->pipe
->create_blend_state(c
->pipe
, &blend
);
589 blend
.rt
[0].blend_enable
= 1;
590 blend
.rt
[0].rgb_func
= PIPE_BLEND_ADD
;
591 blend
.rt
[0].rgb_src_factor
= PIPE_BLENDFACTOR_SRC_ALPHA
;
592 blend
.rt
[0].rgb_dst_factor
= PIPE_BLENDFACTOR_INV_SRC_ALPHA
;
593 blend
.rt
[0].alpha_func
= PIPE_BLEND_ADD
;
594 blend
.rt
[0].alpha_src_factor
= PIPE_BLENDFACTOR_ONE
;
595 blend
.rt
[0].alpha_dst_factor
= PIPE_BLENDFACTOR_ONE
;
596 c
->blend_add
= c
->pipe
->create_blend_state(c
->pipe
, &blend
);
598 memset(&rast
, 0, sizeof rast
);
601 rast
.cull_face
= PIPE_FACE_NONE
;
602 rast
.fill_back
= PIPE_POLYGON_MODE_FILL
;
603 rast
.fill_front
= PIPE_POLYGON_MODE_FILL
;
606 rast
.point_size_per_vertex
= 1;
607 rast
.offset_units
= 1;
608 rast
.offset_scale
= 1;
609 rast
.half_pixel_center
= 1;
610 rast
.bottom_edge_rule
= 1;
611 rast
.depth_clip_near
= 1;
612 rast
.depth_clip_far
= 1;
614 c
->rast
= c
->pipe
->create_rasterizer_state(c
->pipe
, &rast
);
616 memset(&dsa
, 0, sizeof dsa
);
617 dsa
.depth
.enabled
= 0;
618 dsa
.depth
.writemask
= 0;
619 dsa
.depth
.func
= PIPE_FUNC_ALWAYS
;
620 for (i
= 0; i
< 2; ++i
) {
621 dsa
.stencil
[i
].enabled
= 0;
622 dsa
.stencil
[i
].func
= PIPE_FUNC_ALWAYS
;
623 dsa
.stencil
[i
].fail_op
= PIPE_STENCIL_OP_KEEP
;
624 dsa
.stencil
[i
].zpass_op
= PIPE_STENCIL_OP_KEEP
;
625 dsa
.stencil
[i
].zfail_op
= PIPE_STENCIL_OP_KEEP
;
626 dsa
.stencil
[i
].valuemask
= 0;
627 dsa
.stencil
[i
].writemask
= 0;
629 dsa
.alpha
.enabled
= 0;
630 dsa
.alpha
.func
= PIPE_FUNC_ALWAYS
;
631 dsa
.alpha
.ref_value
= 0;
632 c
->dsa
= c
->pipe
->create_depth_stencil_alpha_state(c
->pipe
, &dsa
);
633 c
->pipe
->bind_depth_stencil_alpha_state(c
->pipe
, c
->dsa
);
638 static void cleanup_pipe_state(struct vl_compositor
*c
)
642 /* Asserted in softpipe_delete_fs_state() for some reason */
643 c
->pipe
->bind_vs_state(c
->pipe
, NULL
);
644 c
->pipe
->bind_fs_state(c
->pipe
, NULL
);
646 c
->pipe
->delete_depth_stencil_alpha_state(c
->pipe
, c
->dsa
);
647 c
->pipe
->delete_sampler_state(c
->pipe
, c
->sampler_linear
);
648 c
->pipe
->delete_sampler_state(c
->pipe
, c
->sampler_nearest
);
649 c
->pipe
->delete_blend_state(c
->pipe
, c
->blend_clear
);
650 c
->pipe
->delete_blend_state(c
->pipe
, c
->blend_add
);
651 c
->pipe
->delete_rasterizer_state(c
->pipe
, c
->rast
);
655 init_buffers(struct vl_compositor
*c
)
657 struct pipe_vertex_element vertex_elems
[3];
662 * Create our vertex buffer and vertex buffer elements
664 c
->vertex_buf
.stride
= sizeof(struct vertex2f
) + sizeof(struct vertex4f
) * 2;
665 c
->vertex_buf
.buffer_offset
= 0;
666 c
->vertex_buf
.buffer
.resource
= NULL
;
667 c
->vertex_buf
.is_user_buffer
= false;
669 vertex_elems
[0].src_offset
= 0;
670 vertex_elems
[0].instance_divisor
= 0;
671 vertex_elems
[0].vertex_buffer_index
= 0;
672 vertex_elems
[0].src_format
= PIPE_FORMAT_R32G32_FLOAT
;
673 vertex_elems
[1].src_offset
= sizeof(struct vertex2f
);
674 vertex_elems
[1].instance_divisor
= 0;
675 vertex_elems
[1].vertex_buffer_index
= 0;
676 vertex_elems
[1].src_format
= PIPE_FORMAT_R32G32B32A32_FLOAT
;
677 vertex_elems
[2].src_offset
= sizeof(struct vertex2f
) + sizeof(struct vertex4f
);
678 vertex_elems
[2].instance_divisor
= 0;
679 vertex_elems
[2].vertex_buffer_index
= 0;
680 vertex_elems
[2].src_format
= PIPE_FORMAT_R32G32B32A32_FLOAT
;
681 c
->vertex_elems_state
= c
->pipe
->create_vertex_elements_state(c
->pipe
, 3, vertex_elems
);
687 cleanup_buffers(struct vl_compositor
*c
)
691 c
->pipe
->delete_vertex_elements_state(c
->pipe
, c
->vertex_elems_state
);
692 pipe_resource_reference(&c
->vertex_buf
.buffer
.resource
, NULL
);
695 static inline struct u_rect
696 default_rect(struct vl_compositor_layer
*layer
)
698 struct pipe_resource
*res
= layer
->sampler_views
[0]->texture
;
699 struct u_rect rect
= { 0, res
->width0
, 0, res
->height0
* res
->array_size
};
703 static inline struct vertex2f
704 calc_topleft(struct vertex2f size
, struct u_rect rect
)
706 struct vertex2f res
= { rect
.x0
/ size
.x
, rect
.y0
/ size
.y
};
710 static inline struct vertex2f
711 calc_bottomright(struct vertex2f size
, struct u_rect rect
)
713 struct vertex2f res
= { rect
.x1
/ size
.x
, rect
.y1
/ size
.y
};
718 calc_src_and_dst(struct vl_compositor_layer
*layer
, unsigned width
, unsigned height
,
719 struct u_rect src
, struct u_rect dst
)
721 struct vertex2f size
= { width
, height
};
723 layer
->src
.tl
= calc_topleft(size
, src
);
724 layer
->src
.br
= calc_bottomright(size
, src
);
725 layer
->dst
.tl
= calc_topleft(size
, dst
);
726 layer
->dst
.br
= calc_bottomright(size
, dst
);
728 layer
->zw
.y
= size
.y
;
732 gen_rect_verts(struct vertex2f
*vb
, struct vl_compositor_layer
*layer
)
734 struct vertex2f tl
, tr
, br
, bl
;
738 switch (layer
->rotate
) {
740 case VL_COMPOSITOR_ROTATE_0
:
742 tr
.x
= layer
->dst
.br
.x
;
743 tr
.y
= layer
->dst
.tl
.y
;
745 bl
.x
= layer
->dst
.tl
.x
;
746 bl
.y
= layer
->dst
.br
.y
;
748 case VL_COMPOSITOR_ROTATE_90
:
749 tl
.x
= layer
->dst
.br
.x
;
750 tl
.y
= layer
->dst
.tl
.y
;
752 br
.x
= layer
->dst
.tl
.x
;
753 br
.y
= layer
->dst
.br
.y
;
756 case VL_COMPOSITOR_ROTATE_180
:
758 tr
.x
= layer
->dst
.tl
.x
;
759 tr
.y
= layer
->dst
.br
.y
;
761 bl
.x
= layer
->dst
.br
.x
;
762 bl
.y
= layer
->dst
.tl
.y
;
764 case VL_COMPOSITOR_ROTATE_270
:
765 tl
.x
= layer
->dst
.tl
.x
;
766 tl
.y
= layer
->dst
.br
.y
;
768 br
.x
= layer
->dst
.br
.x
;
769 br
.y
= layer
->dst
.tl
.y
;
776 vb
[ 1].x
= layer
->src
.tl
.x
;
777 vb
[ 1].y
= layer
->src
.tl
.y
;
779 vb
[ 3].x
= layer
->colors
[0].x
;
780 vb
[ 3].y
= layer
->colors
[0].y
;
781 vb
[ 4].x
= layer
->colors
[0].z
;
782 vb
[ 4].y
= layer
->colors
[0].w
;
786 vb
[ 6].x
= layer
->src
.br
.x
;
787 vb
[ 6].y
= layer
->src
.tl
.y
;
789 vb
[ 8].x
= layer
->colors
[1].x
;
790 vb
[ 8].y
= layer
->colors
[1].y
;
791 vb
[ 9].x
= layer
->colors
[1].z
;
792 vb
[ 9].y
= layer
->colors
[1].w
;
796 vb
[11].x
= layer
->src
.br
.x
;
797 vb
[11].y
= layer
->src
.br
.y
;
799 vb
[13].x
= layer
->colors
[2].x
;
800 vb
[13].y
= layer
->colors
[2].y
;
801 vb
[14].x
= layer
->colors
[2].z
;
802 vb
[14].y
= layer
->colors
[2].w
;
806 vb
[16].x
= layer
->src
.tl
.x
;
807 vb
[16].y
= layer
->src
.br
.y
;
809 vb
[18].x
= layer
->colors
[3].x
;
810 vb
[18].y
= layer
->colors
[3].y
;
811 vb
[19].x
= layer
->colors
[3].z
;
812 vb
[19].y
= layer
->colors
[3].w
;
815 static inline struct u_rect
816 calc_drawn_area(struct vl_compositor_state
*s
, struct vl_compositor_layer
*layer
)
818 struct vertex2f tl
, br
;
819 struct u_rect result
;
824 switch (layer
->rotate
) {
826 case VL_COMPOSITOR_ROTATE_0
:
830 case VL_COMPOSITOR_ROTATE_90
:
831 tl
.x
= layer
->dst
.br
.x
;
832 tl
.y
= layer
->dst
.tl
.y
;
833 br
.x
= layer
->dst
.tl
.x
;
834 br
.y
= layer
->dst
.br
.y
;
836 case VL_COMPOSITOR_ROTATE_180
:
840 case VL_COMPOSITOR_ROTATE_270
:
841 tl
.x
= layer
->dst
.tl
.x
;
842 tl
.y
= layer
->dst
.br
.y
;
843 br
.x
= layer
->dst
.br
.x
;
844 br
.y
= layer
->dst
.tl
.y
;
849 result
.x0
= tl
.x
* layer
->viewport
.scale
[0] + layer
->viewport
.translate
[0];
850 result
.y0
= tl
.y
* layer
->viewport
.scale
[1] + layer
->viewport
.translate
[1];
851 result
.x1
= br
.x
* layer
->viewport
.scale
[0] + layer
->viewport
.translate
[0];
852 result
.y1
= br
.y
* layer
->viewport
.scale
[1] + layer
->viewport
.translate
[1];
855 result
.x0
= MAX2(result
.x0
, s
->scissor
.minx
);
856 result
.y0
= MAX2(result
.y0
, s
->scissor
.miny
);
857 result
.x1
= MIN2(result
.x1
, s
->scissor
.maxx
);
858 result
.y1
= MIN2(result
.y1
, s
->scissor
.maxy
);
863 gen_vertex_data(struct vl_compositor
*c
, struct vl_compositor_state
*s
, struct u_rect
*dirty
)
870 /* Allocate new memory for vertices. */
871 u_upload_alloc(c
->pipe
->stream_uploader
, 0,
872 c
->vertex_buf
.stride
* VL_COMPOSITOR_MAX_LAYERS
* 4, /* size */
874 &c
->vertex_buf
.buffer_offset
, &c
->vertex_buf
.buffer
.resource
,
877 for (i
= 0; i
< VL_COMPOSITOR_MAX_LAYERS
; i
++) {
878 if (s
->used_layers
& (1 << i
)) {
879 struct vl_compositor_layer
*layer
= &s
->layers
[i
];
880 gen_rect_verts(vb
, layer
);
883 if (!layer
->viewport_valid
) {
884 layer
->viewport
.scale
[0] = c
->fb_state
.width
;
885 layer
->viewport
.scale
[1] = c
->fb_state
.height
;
886 layer
->viewport
.translate
[0] = 0;
887 layer
->viewport
.translate
[1] = 0;
890 if (dirty
&& layer
->clearing
) {
891 struct u_rect drawn
= calc_drawn_area(s
, layer
);
893 dirty
->x0
>= drawn
.x0
&&
894 dirty
->y0
>= drawn
.y0
&&
895 dirty
->x1
<= drawn
.x1
&&
896 dirty
->y1
<= drawn
.y1
) {
898 // We clear the dirty area anyway, no need for clear_render_target
899 dirty
->x0
= dirty
->y0
= VL_COMPOSITOR_MAX_DIRTY
;
900 dirty
->x1
= dirty
->y1
= VL_COMPOSITOR_MIN_DIRTY
;
906 u_upload_unmap(c
->pipe
->stream_uploader
);
910 draw_layers(struct vl_compositor
*c
, struct vl_compositor_state
*s
, struct u_rect
*dirty
)
912 unsigned vb_index
, i
;
916 for (i
= 0, vb_index
= 0; i
< VL_COMPOSITOR_MAX_LAYERS
; ++i
) {
917 if (s
->used_layers
& (1 << i
)) {
918 struct vl_compositor_layer
*layer
= &s
->layers
[i
];
919 struct pipe_sampler_view
**samplers
= &layer
->sampler_views
[0];
920 unsigned num_sampler_views
= !samplers
[1] ? 1 : !samplers
[2] ? 2 : 3;
921 void *blend
= layer
->blend
? layer
->blend
: i
? c
->blend_add
: c
->blend_clear
;
923 c
->pipe
->bind_blend_state(c
->pipe
, blend
);
924 c
->pipe
->set_viewport_states(c
->pipe
, 0, 1, &layer
->viewport
);
925 c
->pipe
->bind_fs_state(c
->pipe
, layer
->fs
);
926 c
->pipe
->bind_sampler_states(c
->pipe
, PIPE_SHADER_FRAGMENT
, 0,
927 num_sampler_views
, layer
->samplers
);
928 c
->pipe
->set_sampler_views(c
->pipe
, PIPE_SHADER_FRAGMENT
, 0,
929 num_sampler_views
, samplers
);
931 util_draw_arrays(c
->pipe
, PIPE_PRIM_QUADS
, vb_index
* 4, 4);
935 // Remember the currently drawn area as dirty for the next draw command
936 struct u_rect drawn
= calc_drawn_area(s
, layer
);
937 dirty
->x0
= MIN2(drawn
.x0
, dirty
->x0
);
938 dirty
->y0
= MIN2(drawn
.y0
, dirty
->y0
);
939 dirty
->x1
= MAX2(drawn
.x1
, dirty
->x1
);
940 dirty
->y1
= MAX2(drawn
.y1
, dirty
->y1
);
947 set_yuv_layer(struct vl_compositor_state
*s
, struct vl_compositor
*c
,
948 unsigned layer
, struct pipe_video_buffer
*buffer
,
949 struct u_rect
*src_rect
, struct u_rect
*dst_rect
,
950 bool y
, enum vl_compositor_deinterlace deinterlace
)
952 struct pipe_sampler_view
**sampler_views
;
956 assert(s
&& c
&& buffer
);
958 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
960 s
->used_layers
|= 1 << layer
;
961 sampler_views
= buffer
->get_sampler_view_components(buffer
);
962 for (i
= 0; i
< 3; ++i
) {
963 s
->layers
[layer
].samplers
[i
] = c
->sampler_linear
;
964 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[i
], sampler_views
[i
]);
967 calc_src_and_dst(&s
->layers
[layer
], buffer
->width
, buffer
->height
,
968 src_rect
? *src_rect
: default_rect(&s
->layers
[layer
]),
969 dst_rect
? *dst_rect
: default_rect(&s
->layers
[layer
]));
971 half_a_line
= 0.5f
/ s
->layers
[layer
].zw
.y
;
973 switch(deinterlace
) {
974 case VL_COMPOSITOR_BOB_TOP
:
975 s
->layers
[layer
].zw
.x
= 0.0f
;
976 s
->layers
[layer
].src
.tl
.y
+= half_a_line
;
977 s
->layers
[layer
].src
.br
.y
+= half_a_line
;
978 s
->layers
[layer
].fs
= (y
) ? c
->fs_yuv
.bob
.y
: c
->fs_yuv
.bob
.uv
;
981 case VL_COMPOSITOR_BOB_BOTTOM
:
982 s
->layers
[layer
].zw
.x
= 1.0f
;
983 s
->layers
[layer
].src
.tl
.y
-= half_a_line
;
984 s
->layers
[layer
].src
.br
.y
-= half_a_line
;
985 s
->layers
[layer
].fs
= (y
) ? c
->fs_yuv
.bob
.y
: c
->fs_yuv
.bob
.uv
;
989 s
->layers
[layer
].fs
= (y
) ? c
->fs_yuv
.weave
.y
: c
->fs_yuv
.weave
.uv
;
995 set_rgb_to_yuv_layer(struct vl_compositor_state
*s
, struct vl_compositor
*c
,
996 unsigned layer
, struct pipe_sampler_view
*v
,
997 struct u_rect
*src_rect
, struct u_rect
*dst_rect
, bool y
)
999 vl_csc_matrix csc_matrix
;
1001 assert(s
&& c
&& v
);
1003 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1005 s
->used_layers
|= 1 << layer
;
1007 s
->layers
[layer
].fs
= y
? c
->fs_rgb_yuv
.y
: c
->fs_rgb_yuv
.uv
;
1009 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_709_REV
, NULL
, false, &csc_matrix
);
1010 vl_compositor_set_csc_matrix(s
, (const vl_csc_matrix
*)&csc_matrix
, 1.0f
, 0.0f
);
1012 s
->layers
[layer
].samplers
[0] = c
->sampler_linear
;
1013 s
->layers
[layer
].samplers
[1] = NULL
;
1014 s
->layers
[layer
].samplers
[2] = NULL
;
1016 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[0], v
);
1017 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[1], NULL
);
1018 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[2], NULL
);
1020 calc_src_and_dst(&s
->layers
[layer
], v
->texture
->width0
, v
->texture
->height0
,
1021 src_rect
? *src_rect
: default_rect(&s
->layers
[layer
]),
1022 dst_rect
? *dst_rect
: default_rect(&s
->layers
[layer
]));
1026 vl_compositor_reset_dirty_area(struct u_rect
*dirty
)
1030 dirty
->x0
= dirty
->y0
= VL_COMPOSITOR_MIN_DIRTY
;
1031 dirty
->x1
= dirty
->y1
= VL_COMPOSITOR_MAX_DIRTY
;
1035 vl_compositor_set_clear_color(struct vl_compositor_state
*s
, union pipe_color_union
*color
)
1040 s
->clear_color
= *color
;
1044 vl_compositor_get_clear_color(struct vl_compositor_state
*s
, union pipe_color_union
*color
)
1049 *color
= s
->clear_color
;
1053 vl_compositor_clear_layers(struct vl_compositor_state
*s
)
1060 for ( i
= 0; i
< VL_COMPOSITOR_MAX_LAYERS
; ++i
) {
1061 struct vertex4f v_one
= { 1.0f
, 1.0f
, 1.0f
, 1.0f
};
1062 s
->layers
[i
].clearing
= i
? false : true;
1063 s
->layers
[i
].blend
= NULL
;
1064 s
->layers
[i
].fs
= NULL
;
1065 s
->layers
[i
].viewport
.scale
[2] = 1;
1066 s
->layers
[i
].viewport
.translate
[2] = 0;
1067 s
->layers
[i
].rotate
= VL_COMPOSITOR_ROTATE_0
;
1069 for ( j
= 0; j
< 3; j
++)
1070 pipe_sampler_view_reference(&s
->layers
[i
].sampler_views
[j
], NULL
);
1071 for ( j
= 0; j
< 4; ++j
)
1072 s
->layers
[i
].colors
[j
] = v_one
;
1077 vl_compositor_cleanup(struct vl_compositor
*c
)
1083 cleanup_pipe_state(c
);
1087 vl_compositor_set_csc_matrix(struct vl_compositor_state
*s
,
1088 vl_csc_matrix
const *matrix
,
1089 float luma_min
, float luma_max
)
1091 struct pipe_transfer
*buf_transfer
;
1095 float *ptr
= pipe_buffer_map(s
->pipe
, s
->csc_matrix
,
1096 PIPE_TRANSFER_WRITE
| PIPE_TRANSFER_DISCARD_RANGE
,
1102 memcpy(ptr
, matrix
, sizeof(vl_csc_matrix
));
1104 ptr
+= sizeof(vl_csc_matrix
)/sizeof(float);
1108 pipe_buffer_unmap(s
->pipe
, buf_transfer
);
1114 vl_compositor_set_dst_clip(struct vl_compositor_state
*s
, struct u_rect
*dst_clip
)
1118 s
->scissor_valid
= dst_clip
!= NULL
;
1120 s
->scissor
.minx
= dst_clip
->x0
;
1121 s
->scissor
.miny
= dst_clip
->y0
;
1122 s
->scissor
.maxx
= dst_clip
->x1
;
1123 s
->scissor
.maxy
= dst_clip
->y1
;
1128 vl_compositor_set_layer_blend(struct vl_compositor_state
*s
,
1129 unsigned layer
, void *blend
,
1134 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1136 s
->layers
[layer
].clearing
= is_clearing
;
1137 s
->layers
[layer
].blend
= blend
;
1141 vl_compositor_set_layer_dst_area(struct vl_compositor_state
*s
,
1142 unsigned layer
, struct u_rect
*dst_area
)
1146 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1148 s
->layers
[layer
].viewport_valid
= dst_area
!= NULL
;
1150 s
->layers
[layer
].viewport
.scale
[0] = dst_area
->x1
- dst_area
->x0
;
1151 s
->layers
[layer
].viewport
.scale
[1] = dst_area
->y1
- dst_area
->y0
;
1152 s
->layers
[layer
].viewport
.translate
[0] = dst_area
->x0
;
1153 s
->layers
[layer
].viewport
.translate
[1] = dst_area
->y0
;
1158 vl_compositor_set_buffer_layer(struct vl_compositor_state
*s
,
1159 struct vl_compositor
*c
,
1161 struct pipe_video_buffer
*buffer
,
1162 struct u_rect
*src_rect
,
1163 struct u_rect
*dst_rect
,
1164 enum vl_compositor_deinterlace deinterlace
)
1166 struct pipe_sampler_view
**sampler_views
;
1169 assert(s
&& c
&& buffer
);
1171 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1173 s
->used_layers
|= 1 << layer
;
1174 sampler_views
= buffer
->get_sampler_view_components(buffer
);
1175 for (i
= 0; i
< 3; ++i
) {
1176 s
->layers
[layer
].samplers
[i
] = c
->sampler_linear
;
1177 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[i
], sampler_views
[i
]);
1180 calc_src_and_dst(&s
->layers
[layer
], buffer
->width
, buffer
->height
,
1181 src_rect
? *src_rect
: default_rect(&s
->layers
[layer
]),
1182 dst_rect
? *dst_rect
: default_rect(&s
->layers
[layer
]));
1184 if (buffer
->interlaced
) {
1185 float half_a_line
= 0.5f
/ s
->layers
[layer
].zw
.y
;
1186 switch(deinterlace
) {
1187 case VL_COMPOSITOR_WEAVE
:
1188 s
->layers
[layer
].fs
= c
->fs_weave_rgb
;
1191 case VL_COMPOSITOR_BOB_TOP
:
1192 s
->layers
[layer
].zw
.x
= 0.0f
;
1193 s
->layers
[layer
].src
.tl
.y
+= half_a_line
;
1194 s
->layers
[layer
].src
.br
.y
+= half_a_line
;
1195 s
->layers
[layer
].fs
= c
->fs_video_buffer
;
1198 case VL_COMPOSITOR_BOB_BOTTOM
:
1199 s
->layers
[layer
].zw
.x
= 1.0f
;
1200 s
->layers
[layer
].src
.tl
.y
-= half_a_line
;
1201 s
->layers
[layer
].src
.br
.y
-= half_a_line
;
1202 s
->layers
[layer
].fs
= c
->fs_video_buffer
;
1207 s
->layers
[layer
].fs
= c
->fs_video_buffer
;
1211 vl_compositor_set_palette_layer(struct vl_compositor_state
*s
,
1212 struct vl_compositor
*c
,
1214 struct pipe_sampler_view
*indexes
,
1215 struct pipe_sampler_view
*palette
,
1216 struct u_rect
*src_rect
,
1217 struct u_rect
*dst_rect
,
1218 bool include_color_conversion
)
1220 assert(s
&& c
&& indexes
&& palette
);
1222 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1224 s
->used_layers
|= 1 << layer
;
1226 s
->layers
[layer
].fs
= include_color_conversion
?
1227 c
->fs_palette
.yuv
: c
->fs_palette
.rgb
;
1229 s
->layers
[layer
].samplers
[0] = c
->sampler_linear
;
1230 s
->layers
[layer
].samplers
[1] = c
->sampler_nearest
;
1231 s
->layers
[layer
].samplers
[2] = NULL
;
1232 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[0], indexes
);
1233 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[1], palette
);
1234 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[2], NULL
);
1235 calc_src_and_dst(&s
->layers
[layer
], indexes
->texture
->width0
, indexes
->texture
->height0
,
1236 src_rect
? *src_rect
: default_rect(&s
->layers
[layer
]),
1237 dst_rect
? *dst_rect
: default_rect(&s
->layers
[layer
]));
1241 vl_compositor_set_rgba_layer(struct vl_compositor_state
*s
,
1242 struct vl_compositor
*c
,
1244 struct pipe_sampler_view
*rgba
,
1245 struct u_rect
*src_rect
,
1246 struct u_rect
*dst_rect
,
1247 struct vertex4f
*colors
)
1251 assert(s
&& c
&& rgba
);
1253 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1255 s
->used_layers
|= 1 << layer
;
1256 s
->layers
[layer
].fs
= c
->fs_rgba
;
1257 s
->layers
[layer
].samplers
[0] = c
->sampler_linear
;
1258 s
->layers
[layer
].samplers
[1] = NULL
;
1259 s
->layers
[layer
].samplers
[2] = NULL
;
1260 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[0], rgba
);
1261 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[1], NULL
);
1262 pipe_sampler_view_reference(&s
->layers
[layer
].sampler_views
[2], NULL
);
1263 calc_src_and_dst(&s
->layers
[layer
], rgba
->texture
->width0
, rgba
->texture
->height0
,
1264 src_rect
? *src_rect
: default_rect(&s
->layers
[layer
]),
1265 dst_rect
? *dst_rect
: default_rect(&s
->layers
[layer
]));
1268 for (i
= 0; i
< 4; ++i
)
1269 s
->layers
[layer
].colors
[i
] = colors
[i
];
1273 vl_compositor_set_layer_rotation(struct vl_compositor_state
*s
,
1275 enum vl_compositor_rotation rotate
)
1278 assert(layer
< VL_COMPOSITOR_MAX_LAYERS
);
1279 s
->layers
[layer
].rotate
= rotate
;
1283 vl_compositor_yuv_deint_full(struct vl_compositor_state
*s
,
1284 struct vl_compositor
*c
,
1285 struct pipe_video_buffer
*src
,
1286 struct pipe_video_buffer
*dst
,
1287 struct u_rect
*src_rect
,
1288 struct u_rect
*dst_rect
,
1289 enum vl_compositor_deinterlace deinterlace
)
1291 struct pipe_surface
**dst_surfaces
;
1293 dst_surfaces
= dst
->get_surfaces(dst
);
1294 vl_compositor_clear_layers(s
);
1296 set_yuv_layer(s
, c
, 0, src
, src_rect
, NULL
, true, deinterlace
);
1297 vl_compositor_set_layer_dst_area(s
, 0, dst_rect
);
1298 vl_compositor_render(s
, c
, dst_surfaces
[0], NULL
, false);
1305 set_yuv_layer(s
, c
, 0, src
, src_rect
, NULL
, false, deinterlace
);
1306 vl_compositor_set_layer_dst_area(s
, 0, dst_rect
);
1307 vl_compositor_render(s
, c
, dst_surfaces
[1], NULL
, false);
1309 s
->pipe
->flush(s
->pipe
, NULL
, 0);
1313 vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state
*s
,
1314 struct vl_compositor
*c
,
1316 struct pipe_resource
*src_res
,
1317 struct pipe_video_buffer
*dst
,
1318 struct u_rect
*src_rect
,
1319 struct u_rect
*dst_rect
)
1321 struct pipe_sampler_view
*sv
, sv_templ
;
1322 struct pipe_surface
**dst_surfaces
;
1324 dst_surfaces
= dst
->get_surfaces(dst
);
1326 memset(&sv_templ
, 0, sizeof(sv_templ
));
1327 u_sampler_view_default_template(&sv_templ
, src_res
, src_res
->format
);
1328 sv
= s
->pipe
->create_sampler_view(s
->pipe
, src_res
, &sv_templ
);
1330 vl_compositor_clear_layers(s
);
1332 set_rgb_to_yuv_layer(s
, c
, 0, sv
, src_rect
, NULL
, true);
1333 vl_compositor_set_layer_dst_area(s
, 0, dst_rect
);
1334 vl_compositor_render(s
, c
, dst_surfaces
[0], NULL
, false);
1341 set_rgb_to_yuv_layer(s
, c
, 0, sv
, src_rect
, NULL
, false);
1342 vl_compositor_set_layer_dst_area(s
, 0, dst_rect
);
1343 vl_compositor_render(s
, c
, dst_surfaces
[1], NULL
, false);
1344 pipe_sampler_view_reference(&sv
, NULL
);
1346 s
->pipe
->flush(s
->pipe
, NULL
, 0);
1350 vl_compositor_render(struct vl_compositor_state
*s
,
1351 struct vl_compositor
*c
,
1352 struct pipe_surface
*dst_surface
,
1353 struct u_rect
*dirty_area
,
1357 assert(dst_surface
);
1359 c
->fb_state
.width
= dst_surface
->width
;
1360 c
->fb_state
.height
= dst_surface
->height
;
1361 c
->fb_state
.cbufs
[0] = dst_surface
;
1363 if (!s
->scissor_valid
) {
1364 s
->scissor
.minx
= 0;
1365 s
->scissor
.miny
= 0;
1366 s
->scissor
.maxx
= dst_surface
->width
;
1367 s
->scissor
.maxy
= dst_surface
->height
;
1369 c
->pipe
->set_scissor_states(c
->pipe
, 0, 1, &s
->scissor
);
1371 gen_vertex_data(c
, s
, dirty_area
);
1373 if (clear_dirty
&& dirty_area
&&
1374 (dirty_area
->x0
< dirty_area
->x1
|| dirty_area
->y0
< dirty_area
->y1
)) {
1376 c
->pipe
->clear_render_target(c
->pipe
, dst_surface
, &s
->clear_color
,
1377 0, 0, dst_surface
->width
, dst_surface
->height
, false);
1378 dirty_area
->x0
= dirty_area
->y0
= VL_COMPOSITOR_MAX_DIRTY
;
1379 dirty_area
->x1
= dirty_area
->y1
= VL_COMPOSITOR_MIN_DIRTY
;
1382 c
->pipe
->set_framebuffer_state(c
->pipe
, &c
->fb_state
);
1383 c
->pipe
->bind_vs_state(c
->pipe
, c
->vs
);
1384 c
->pipe
->set_vertex_buffers(c
->pipe
, 0, 1, &c
->vertex_buf
);
1385 c
->pipe
->bind_vertex_elements_state(c
->pipe
, c
->vertex_elems_state
);
1386 pipe_set_constant_buffer(c
->pipe
, PIPE_SHADER_FRAGMENT
, 0, s
->csc_matrix
);
1387 c
->pipe
->bind_rasterizer_state(c
->pipe
, c
->rast
);
1389 draw_layers(c
, s
, dirty_area
);
1393 vl_compositor_init(struct vl_compositor
*c
, struct pipe_context
*pipe
)
1397 memset(c
, 0, sizeof(*c
));
1401 if (!init_pipe_state(c
)) {
1405 if (!init_shaders(c
)) {
1406 cleanup_pipe_state(c
);
1410 if (!init_buffers(c
)) {
1412 cleanup_pipe_state(c
);
1420 vl_compositor_init_state(struct vl_compositor_state
*s
, struct pipe_context
*pipe
)
1422 vl_csc_matrix csc_matrix
;
1426 memset(s
, 0, sizeof(*s
));
1430 s
->clear_color
.f
[0] = s
->clear_color
.f
[1] = 0.0f
;
1431 s
->clear_color
.f
[2] = s
->clear_color
.f
[3] = 0.0f
;
1434 * Create our fragment shader's constant buffer
1435 * Const buffer contains the color conversion matrix and bias vectors
1437 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
1438 s
->csc_matrix
= pipe_buffer_create_const0
1441 PIPE_BIND_CONSTANT_BUFFER
,
1443 sizeof(csc_matrix
) + 2*sizeof(float)
1449 vl_compositor_clear_layers(s
);
1451 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY
, NULL
, true, &csc_matrix
);
1452 if (!vl_compositor_set_csc_matrix(s
, (const vl_csc_matrix
*)&csc_matrix
, 1.0f
, 0.0f
))
1459 vl_compositor_cleanup_state(struct vl_compositor_state
*s
)
1463 vl_compositor_clear_layers(s
);
1464 pipe_resource_reference(&s
->csc_matrix
, NULL
);