turnip: don't require src image to be set for clear blits
[mesa.git] / src / freedreno / vulkan / tu_blit.c
1 /*
2 * Copyright © 2019 Valve Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jonathan Marek <jonathan@marek.ca>
25 *
26 */
27
28 #include "tu_blit.h"
29
30 #include "a6xx.xml.h"
31 #include "adreno_common.xml.h"
32 #include "adreno_pm4.xml.h"
33
34 #include "vk_format.h"
35
36 #include "tu_cs.h"
37
38 /* TODO:
39 * - Avoid disabling tiling for swapped formats
40 * (image_to_image copy doesn't deal with it)
41 * - Fix d24_unorm_s8_uint support & aspects
42 * - UBWC
43 */
44
45 static VkFormat
46 blit_copy_format(VkFormat format)
47 {
48 switch (vk_format_get_blocksizebits(format)) {
49 case 8: return VK_FORMAT_R8_UINT;
50 case 16: return VK_FORMAT_R16_UINT;
51 case 32: return VK_FORMAT_R32_UINT;
52 case 64: return VK_FORMAT_R32G32_UINT;
53 case 96: return VK_FORMAT_R32G32B32_UINT;
54 case 128:return VK_FORMAT_R32G32B32A32_UINT;
55 default:
56 unreachable("unhandled format size");
57 }
58 }
59
60 static uint32_t
61 blit_image_info(const struct tu_blit_surf *img, bool src, bool stencil_read)
62 {
63 const struct tu_native_format *fmt = tu6_get_native_format(img->fmt);
64 enum a6xx_color_fmt rb = fmt->rb;
65 enum a3xx_color_swap swap = img->tiled ? WZYX : fmt->swap;
66 if (rb == RB6_R10G10B10A2_UNORM && src)
67 rb = RB6_R10G10B10A2_FLOAT16;
68 if (rb == RB6_Z24_UNORM_S8_UINT)
69 rb = RB6_Z24_UNORM_S8_UINT_AS_R8G8B8A8;
70
71 if (stencil_read)
72 swap = XYZW;
73
74 return A6XX_SP_PS_2D_SRC_INFO_COLOR_FORMAT(rb) |
75 A6XX_SP_PS_2D_SRC_INFO_TILE_MODE(img->tile_mode) |
76 A6XX_SP_PS_2D_SRC_INFO_COLOR_SWAP(swap) |
77 COND(vk_format_is_srgb(img->fmt), A6XX_SP_PS_2D_SRC_INFO_SRGB) |
78 COND(img->ubwc_size, A6XX_SP_PS_2D_SRC_INFO_FLAGS);
79 }
80
81 static void
82 emit_blit_step(struct tu_cmd_buffer *cmdbuf, const struct tu_blit *blt)
83 {
84 struct tu_cs *cs = &cmdbuf->cs;
85
86 tu_cs_reserve_space(cmdbuf->device, cs, 66);
87
88 enum a6xx_color_fmt fmt = tu6_get_native_format(blt->dst.fmt)->rb;
89 if (fmt == RB6_Z24_UNORM_S8_UINT)
90 fmt = RB6_Z24_UNORM_S8_UINT_AS_R8G8B8A8;
91
92 enum a6xx_2d_ifmt ifmt = tu6_rb_fmt_to_ifmt(fmt);
93
94 if (vk_format_is_srgb(blt->dst.fmt)) {
95 assert(ifmt == R2D_UNORM8);
96 ifmt = R2D_UNORM8_SRGB;
97 }
98
99 uint32_t blit_cntl = A6XX_RB_2D_BLIT_CNTL_ROTATE(blt->rotation) |
100 COND(blt->type == TU_BLIT_CLEAR, A6XX_RB_2D_BLIT_CNTL_SOLID_COLOR) |
101 A6XX_RB_2D_BLIT_CNTL_COLOR_FORMAT(fmt) | /* not required? */
102 COND(fmt == RB6_Z24_UNORM_S8_UINT_AS_R8G8B8A8, A6XX_RB_2D_BLIT_CNTL_D24S8) |
103 A6XX_RB_2D_BLIT_CNTL_MASK(0xf) |
104 A6XX_RB_2D_BLIT_CNTL_IFMT(ifmt);
105
106 tu_cs_emit_pkt4(&cmdbuf->cs, REG_A6XX_RB_2D_BLIT_CNTL, 1);
107 tu_cs_emit(&cmdbuf->cs, blit_cntl);
108
109 tu_cs_emit_pkt4(&cmdbuf->cs, REG_A6XX_GRAS_2D_BLIT_CNTL, 1);
110 tu_cs_emit(&cmdbuf->cs, blit_cntl);
111
112 /*
113 * Emit source:
114 */
115 if (blt->type == TU_BLIT_CLEAR) {
116 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_SRC_SOLID_C0, 4);
117 tu_cs_emit(cs, blt->clear_value[0]);
118 tu_cs_emit(cs, blt->clear_value[1]);
119 tu_cs_emit(cs, blt->clear_value[2]);
120 tu_cs_emit(cs, blt->clear_value[3]);
121 } else {
122 tu_cs_emit_pkt4(cs, REG_A6XX_SP_PS_2D_SRC_INFO, 10);
123 tu_cs_emit(cs, blit_image_info(&blt->src, true, blt->stencil_read) |
124 A6XX_SP_PS_2D_SRC_INFO_SAMPLES(tu_msaa_samples(blt->src.samples)) |
125 /* TODO: should disable this bit for integer formats ? */
126 COND(blt->src.samples > 1, A6XX_SP_PS_2D_SRC_INFO_SAMPLES_AVERAGE) |
127 COND(blt->filter, A6XX_SP_PS_2D_SRC_INFO_FILTER) |
128 0x500000);
129 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_SIZE_WIDTH(blt->src.x + blt->src.width) |
130 A6XX_SP_PS_2D_SRC_SIZE_HEIGHT(blt->src.y + blt->src.height));
131 tu_cs_emit_qw(cs, blt->src.va);
132 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_PITCH_PITCH(blt->src.pitch));
133
134 tu_cs_emit(cs, 0x00000000);
135 tu_cs_emit(cs, 0x00000000);
136 tu_cs_emit(cs, 0x00000000);
137 tu_cs_emit(cs, 0x00000000);
138 tu_cs_emit(cs, 0x00000000);
139
140 if (blt->src.ubwc_size) {
141 tu_cs_emit_pkt4(cs, REG_A6XX_SP_PS_2D_SRC_FLAGS_LO, 6);
142 tu_cs_emit_qw(cs, blt->src.ubwc_va);
143 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_FLAGS_PITCH_PITCH(blt->src.ubwc_pitch) |
144 A6XX_SP_PS_2D_SRC_FLAGS_PITCH_ARRAY_PITCH(blt->src.ubwc_size >> 2));
145 tu_cs_emit(cs, 0x00000000);
146 tu_cs_emit(cs, 0x00000000);
147 tu_cs_emit(cs, 0x00000000);
148 }
149 }
150
151 /*
152 * Emit destination:
153 */
154 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_DST_INFO, 9);
155 tu_cs_emit(cs, blit_image_info(&blt->dst, false, false));
156 tu_cs_emit_qw(cs, blt->dst.va);
157 tu_cs_emit(cs, A6XX_RB_2D_DST_SIZE_PITCH(blt->dst.pitch));
158 tu_cs_emit(cs, 0x00000000);
159 tu_cs_emit(cs, 0x00000000);
160 tu_cs_emit(cs, 0x00000000);
161 tu_cs_emit(cs, 0x00000000);
162 tu_cs_emit(cs, 0x00000000);
163
164 if (blt->dst.ubwc_size) {
165 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_DST_FLAGS_LO, 6);
166 tu_cs_emit_qw(cs, blt->dst.ubwc_va);
167 tu_cs_emit(cs, A6XX_RB_2D_DST_FLAGS_PITCH_PITCH(blt->dst.ubwc_pitch) |
168 A6XX_RB_2D_DST_FLAGS_PITCH_ARRAY_PITCH(blt->dst.ubwc_size >> 2));
169 tu_cs_emit(cs, 0x00000000);
170 tu_cs_emit(cs, 0x00000000);
171 tu_cs_emit(cs, 0x00000000);
172 }
173
174 tu_cs_emit_pkt4(cs, REG_A6XX_GRAS_2D_SRC_TL_X, 4);
175 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_TL_X_X(blt->src.x));
176 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_BR_X_X(blt->src.x + blt->src.width - 1));
177 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_TL_Y_Y(blt->src.y));
178 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_BR_Y_Y(blt->src.y + blt->src.height - 1));
179
180 tu_cs_emit_pkt4(cs, REG_A6XX_GRAS_2D_DST_TL, 2);
181 tu_cs_emit(cs, A6XX_GRAS_2D_DST_TL_X(blt->dst.x) |
182 A6XX_GRAS_2D_DST_TL_Y(blt->dst.y));
183 tu_cs_emit(cs, A6XX_GRAS_2D_DST_BR_X(blt->dst.x + blt->dst.width - 1) |
184 A6XX_GRAS_2D_DST_BR_Y(blt->dst.y + blt->dst.height - 1));
185
186 tu_cs_emit_pkt7(cs, CP_EVENT_WRITE, 1);
187 tu_cs_emit(cs, 0x3f);
188 tu_cs_emit_wfi(cs);
189
190 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8C01, 1);
191 tu_cs_emit(cs, 0);
192
193 if (fmt == RB6_R10G10B10A2_UNORM)
194 fmt = RB6_R16G16B16A16_FLOAT;
195
196 tu_cs_emit_pkt4(cs, REG_A6XX_SP_2D_SRC_FORMAT, 1);
197 tu_cs_emit(cs, COND(vk_format_is_sint(blt->src.fmt), A6XX_SP_2D_SRC_FORMAT_SINT) |
198 COND(vk_format_is_uint(blt->src.fmt), A6XX_SP_2D_SRC_FORMAT_UINT) |
199 A6XX_SP_2D_SRC_FORMAT_COLOR_FORMAT(fmt) |
200 COND(ifmt == R2D_UNORM8_SRGB, A6XX_SP_2D_SRC_FORMAT_SRGB) |
201 A6XX_SP_2D_SRC_FORMAT_MASK(0xf));
202
203 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8E04, 1);
204 tu_cs_emit(cs, 0x01000000);
205
206 tu_cs_emit_pkt7(cs, CP_BLIT, 1);
207 tu_cs_emit(cs, CP_BLIT_0_OP(BLIT_OP_SCALE));
208
209 tu_cs_emit_wfi(cs);
210
211 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8E04, 1);
212 tu_cs_emit(cs, 0);
213 }
214
215 void tu_blit(struct tu_cmd_buffer *cmdbuf, struct tu_blit *blt)
216 {
217 switch (blt->type) {
218 case TU_BLIT_COPY:
219 blt->stencil_read =
220 blt->dst.fmt == VK_FORMAT_R8_UNORM &&
221 blt->src.fmt == VK_FORMAT_D24_UNORM_S8_UINT;
222
223 assert(vk_format_get_blocksize(blt->dst.fmt) ==
224 vk_format_get_blocksize(blt->src.fmt) || blt->stencil_read);
225 assert(blt->src.samples == blt->dst.samples);
226
227 if (vk_format_is_compressed(blt->src.fmt)) {
228 unsigned block_width = vk_format_get_blockwidth(blt->src.fmt);
229 unsigned block_height = vk_format_get_blockheight(blt->src.fmt);
230
231 blt->src.pitch /= block_width;
232 blt->src.x /= block_width;
233 blt->src.y /= block_height;
234 blt->src.fmt = blit_copy_format(blt->src.fmt);
235
236 /* for image_to_image copy, width/height is on the src format */
237 blt->dst.width = blt->src.width = DIV_ROUND_UP(blt->src.width, block_width);
238 blt->dst.height = blt->src.height = DIV_ROUND_UP(blt->src.height, block_height);
239 }
240
241 if (vk_format_is_compressed(blt->dst.fmt)) {
242 unsigned block_width = vk_format_get_blockwidth(blt->dst.fmt);
243 unsigned block_height = vk_format_get_blockheight(blt->dst.fmt);
244
245 blt->dst.pitch /= block_width;
246 blt->dst.x /= block_width;
247 blt->dst.y /= block_height;
248 blt->dst.fmt = blit_copy_format(blt->dst.fmt);
249 }
250
251 if (blt->dst.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
252 blt->dst.fmt = blit_copy_format(blt->dst.fmt);
253
254 if (blt->src.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
255 blt->src.fmt = blit_copy_format(blt->src.fmt);
256
257 /* TODO: multisample image copy does not work correctly with tiling/UBWC */
258 blt->src.x *= blt->src.samples;
259 blt->dst.x *= blt->dst.samples;
260 blt->src.width *= blt->src.samples;
261 blt->dst.width *= blt->dst.samples;
262 blt->src.samples = 1;
263 blt->dst.samples = 1;
264 break;
265 case TU_BLIT_CLEAR:
266 /* unsupported format cleared as UINT32 */
267 if (blt->dst.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
268 blt->dst.fmt = VK_FORMAT_R32_UINT;
269 assert(blt->dst.samples == 1); /* TODO */
270 blt->src = blt->dst;
271 break;
272 default:
273 assert(blt->dst.samples == 1);
274 }
275
276 tu_cs_reserve_space(cmdbuf->device, &cmdbuf->cs, 18);
277
278 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, LRZ_FLUSH, false);
279 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, 0x1d, true);
280 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, FACENESS_FLUSH, true);
281 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, PC_CCU_INVALIDATE_COLOR, false);
282 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, PC_CCU_INVALIDATE_DEPTH, false);
283
284 /* buffer copy setup */
285 tu_cs_emit_pkt7(&cmdbuf->cs, CP_SET_MARKER, 1);
286 tu_cs_emit(&cmdbuf->cs, A6XX_CP_SET_MARKER_0_MODE(RM6_BLIT2DSCALE));
287
288 for (unsigned layer = 0; layer < blt->layers; layer++) {
289 if (blt->buffer) {
290 struct tu_blit line_blt = *blt;
291 uint64_t dst_va = line_blt.dst.va, src_va = line_blt.src.va;
292 unsigned blocksize = vk_format_get_blocksize(blt->src.fmt);
293 uint32_t size = line_blt.src.width, tmp;
294
295 while (size) {
296 line_blt.src.x = (src_va & 63) / blocksize;
297 line_blt.src.va = src_va & ~63;
298 tmp = MIN2(size, 0x4000 - line_blt.src.x);
299
300 line_blt.dst.x = (dst_va & 63) / blocksize;
301 line_blt.dst.va = dst_va & ~63;
302 tmp = MIN2(tmp, 0x4000 - line_blt.dst.x);
303
304 line_blt.src.width = line_blt.dst.width = tmp;
305
306 emit_blit_step(cmdbuf, &line_blt);
307
308 src_va += tmp * blocksize;
309 dst_va += tmp * blocksize;
310 size -= tmp;
311 }
312 } else if ((blt->src.va & 63) || (blt->src.pitch & 63)) {
313 /* per line copy path (buffer_to_image) */
314 assert(blt->type == TU_BLIT_COPY && !blt->src.tiled);
315 struct tu_blit line_blt = *blt;
316 uint64_t src_va = line_blt.src.va + blt->src.pitch * blt->src.y;
317
318 line_blt.src.y = 0;
319 line_blt.src.pitch = 0;
320 line_blt.src.height = 1;
321 line_blt.dst.height = 1;
322
323 for (unsigned y = 0; y < blt->src.height; y++) {
324 line_blt.src.x = blt->src.x + (src_va & 63) / vk_format_get_blocksize(blt->src.fmt);
325 line_blt.src.va = src_va & ~63;
326
327 emit_blit_step(cmdbuf, &line_blt);
328
329 line_blt.dst.y++;
330 src_va += blt->src.pitch;
331 }
332 } else if ((blt->dst.va & 63) || (blt->dst.pitch & 63)) {
333 /* per line copy path (image_to_buffer) */
334 assert(blt->type == TU_BLIT_COPY && !blt->dst.tiled);
335 struct tu_blit line_blt = *blt;
336 uint64_t dst_va = line_blt.dst.va + blt->dst.pitch * blt->dst.y;
337
338 line_blt.dst.y = 0;
339 line_blt.dst.pitch = 0;
340 line_blt.src.height = 1;
341 line_blt.dst.height = 1;
342
343 for (unsigned y = 0; y < blt->src.height; y++) {
344 line_blt.dst.x = blt->dst.x + (dst_va & 63) / vk_format_get_blocksize(blt->dst.fmt);
345 line_blt.dst.va = dst_va & ~63;
346
347 emit_blit_step(cmdbuf, &line_blt);
348
349 line_blt.src.y++;
350 dst_va += blt->dst.pitch;
351 }
352 } else {
353 emit_blit_step(cmdbuf, blt);
354 }
355 blt->dst.va += blt->dst.layer_size;
356 blt->src.va += blt->src.layer_size;
357 blt->dst.ubwc_va += blt->dst.ubwc_size;
358 blt->src.ubwc_va += blt->src.ubwc_size;
359 }
360
361 tu_cs_reserve_space(cmdbuf->device, &cmdbuf->cs, 17);
362
363 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, 0x1d, true);
364 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, FACENESS_FLUSH, true);
365 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, CACHE_FLUSH_TS, true);
366 tu6_emit_event_write(cmdbuf, &cmdbuf->cs, CACHE_INVALIDATE, false);
367 }