turnip: automatically reserve cmdstream space in emit_pkt4/emit_pkt7
[mesa.git] / src / freedreno / vulkan / tu_blit.c
1 /*
2 * Copyright © 2019 Valve Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jonathan Marek <jonathan@marek.ca>
25 *
26 */
27
28 #include "tu_blit.h"
29
30 #include "a6xx.xml.h"
31 #include "adreno_common.xml.h"
32 #include "adreno_pm4.xml.h"
33
34 #include "vk_format.h"
35
36 #include "tu_cs.h"
37
38 /* TODO:
39 * - Avoid disabling tiling for swapped formats
40 * (image_to_image copy doesn't deal with it)
41 * - Fix d24_unorm_s8_uint support & aspects
42 * - UBWC
43 */
44
45 static VkFormat
46 blit_copy_format(VkFormat format)
47 {
48 switch (vk_format_get_blocksizebits(format)) {
49 case 8: return VK_FORMAT_R8_UINT;
50 case 16: return VK_FORMAT_R16_UINT;
51 case 32: return VK_FORMAT_R32_UINT;
52 case 64: return VK_FORMAT_R32G32_UINT;
53 case 96: return VK_FORMAT_R32G32B32_UINT;
54 case 128:return VK_FORMAT_R32G32B32A32_UINT;
55 default:
56 unreachable("unhandled format size");
57 }
58 }
59
60 static uint32_t
61 blit_image_info(const struct tu_blit_surf *img, bool src, bool stencil_read)
62 {
63 const struct tu_native_format *fmt = tu6_get_native_format(img->fmt);
64 enum a6xx_format rb = fmt->rb;
65 enum a3xx_color_swap swap = img->tiled ? WZYX : fmt->swap;
66 if (rb == FMT6_10_10_10_2_UNORM_DEST && src)
67 rb = FMT6_10_10_10_2_UNORM;
68 if (rb == FMT6_Z24_UNORM_S8_UINT)
69 rb = FMT6_Z24_UNORM_S8_UINT_AS_R8G8B8A8;
70
71 if (stencil_read)
72 swap = XYZW;
73
74 return A6XX_SP_PS_2D_SRC_INFO_COLOR_FORMAT(rb) |
75 A6XX_SP_PS_2D_SRC_INFO_TILE_MODE(img->tile_mode) |
76 A6XX_SP_PS_2D_SRC_INFO_COLOR_SWAP(swap) |
77 COND(vk_format_is_srgb(img->fmt), A6XX_SP_PS_2D_SRC_INFO_SRGB) |
78 COND(img->ubwc_size, A6XX_SP_PS_2D_SRC_INFO_FLAGS);
79 }
80
81 static void
82 emit_blit_step(struct tu_cmd_buffer *cmdbuf, struct tu_cs *cs,
83 const struct tu_blit *blt)
84 {
85 struct tu_physical_device *phys_dev = cmdbuf->device->physical_device;
86
87 enum a6xx_format fmt = tu6_get_native_format(blt->dst.fmt)->rb;
88 if (fmt == FMT6_Z24_UNORM_S8_UINT)
89 fmt = FMT6_Z24_UNORM_S8_UINT_AS_R8G8B8A8;
90
91 enum a6xx_2d_ifmt ifmt = tu6_fmt_to_ifmt(fmt);
92
93 if (vk_format_is_srgb(blt->dst.fmt)) {
94 assert(ifmt == R2D_UNORM8);
95 ifmt = R2D_UNORM8_SRGB;
96 }
97
98 uint32_t blit_cntl = A6XX_RB_2D_BLIT_CNTL_ROTATE(blt->rotation) |
99 COND(blt->type == TU_BLIT_CLEAR, A6XX_RB_2D_BLIT_CNTL_SOLID_COLOR) |
100 A6XX_RB_2D_BLIT_CNTL_COLOR_FORMAT(fmt) | /* not required? */
101 COND(fmt == FMT6_Z24_UNORM_S8_UINT_AS_R8G8B8A8, A6XX_RB_2D_BLIT_CNTL_D24S8) |
102 A6XX_RB_2D_BLIT_CNTL_MASK(0xf) |
103 A6XX_RB_2D_BLIT_CNTL_IFMT(ifmt);
104
105 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_BLIT_CNTL, 1);
106 tu_cs_emit(cs, blit_cntl);
107
108 tu_cs_emit_pkt4(cs, REG_A6XX_GRAS_2D_BLIT_CNTL, 1);
109 tu_cs_emit(cs, blit_cntl);
110
111 /*
112 * Emit source:
113 */
114 if (blt->type == TU_BLIT_CLEAR) {
115 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_SRC_SOLID_C0, 4);
116 tu_cs_emit(cs, blt->clear_value[0]);
117 tu_cs_emit(cs, blt->clear_value[1]);
118 tu_cs_emit(cs, blt->clear_value[2]);
119 tu_cs_emit(cs, blt->clear_value[3]);
120 } else {
121 tu_cs_emit_pkt4(cs, REG_A6XX_SP_PS_2D_SRC_INFO, 10);
122 tu_cs_emit(cs, blit_image_info(&blt->src, true, blt->stencil_read) |
123 A6XX_SP_PS_2D_SRC_INFO_SAMPLES(tu_msaa_samples(blt->src.samples)) |
124 /* TODO: should disable this bit for integer formats ? */
125 COND(blt->src.samples > 1, A6XX_SP_PS_2D_SRC_INFO_SAMPLES_AVERAGE) |
126 COND(blt->filter, A6XX_SP_PS_2D_SRC_INFO_FILTER) |
127 0x500000);
128 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_SIZE_WIDTH(blt->src.x + blt->src.width) |
129 A6XX_SP_PS_2D_SRC_SIZE_HEIGHT(blt->src.y + blt->src.height));
130 tu_cs_emit_qw(cs, blt->src.va);
131 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_PITCH_PITCH(blt->src.pitch));
132
133 tu_cs_emit(cs, 0x00000000);
134 tu_cs_emit(cs, 0x00000000);
135 tu_cs_emit(cs, 0x00000000);
136 tu_cs_emit(cs, 0x00000000);
137 tu_cs_emit(cs, 0x00000000);
138
139 if (blt->src.ubwc_size) {
140 tu_cs_emit_pkt4(cs, REG_A6XX_SP_PS_2D_SRC_FLAGS_LO, 6);
141 tu_cs_emit_qw(cs, blt->src.ubwc_va);
142 tu_cs_emit(cs, A6XX_SP_PS_2D_SRC_FLAGS_PITCH_PITCH(blt->src.ubwc_pitch) |
143 A6XX_SP_PS_2D_SRC_FLAGS_PITCH_ARRAY_PITCH(blt->src.ubwc_size >> 2));
144 tu_cs_emit(cs, 0x00000000);
145 tu_cs_emit(cs, 0x00000000);
146 tu_cs_emit(cs, 0x00000000);
147 }
148 }
149
150 /*
151 * Emit destination:
152 */
153 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_DST_INFO, 9);
154 tu_cs_emit(cs, blit_image_info(&blt->dst, false, false));
155 tu_cs_emit_qw(cs, blt->dst.va);
156 tu_cs_emit(cs, A6XX_RB_2D_DST_SIZE_PITCH(blt->dst.pitch));
157 tu_cs_emit(cs, 0x00000000);
158 tu_cs_emit(cs, 0x00000000);
159 tu_cs_emit(cs, 0x00000000);
160 tu_cs_emit(cs, 0x00000000);
161 tu_cs_emit(cs, 0x00000000);
162
163 if (blt->dst.ubwc_size) {
164 tu_cs_emit_pkt4(cs, REG_A6XX_RB_2D_DST_FLAGS_LO, 6);
165 tu_cs_emit_qw(cs, blt->dst.ubwc_va);
166 tu_cs_emit(cs, A6XX_RB_2D_DST_FLAGS_PITCH_PITCH(blt->dst.ubwc_pitch) |
167 A6XX_RB_2D_DST_FLAGS_PITCH_ARRAY_PITCH(blt->dst.ubwc_size >> 2));
168 tu_cs_emit(cs, 0x00000000);
169 tu_cs_emit(cs, 0x00000000);
170 tu_cs_emit(cs, 0x00000000);
171 }
172
173 tu_cs_emit_pkt4(cs, REG_A6XX_GRAS_2D_SRC_TL_X, 4);
174 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_TL_X_X(blt->src.x));
175 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_BR_X_X(blt->src.x + blt->src.width - 1));
176 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_TL_Y_Y(blt->src.y));
177 tu_cs_emit(cs, A6XX_GRAS_2D_SRC_BR_Y_Y(blt->src.y + blt->src.height - 1));
178
179 tu_cs_emit_pkt4(cs, REG_A6XX_GRAS_2D_DST_TL, 2);
180 tu_cs_emit(cs, A6XX_GRAS_2D_DST_TL_X(blt->dst.x) |
181 A6XX_GRAS_2D_DST_TL_Y(blt->dst.y));
182 tu_cs_emit(cs, A6XX_GRAS_2D_DST_BR_X(blt->dst.x + blt->dst.width - 1) |
183 A6XX_GRAS_2D_DST_BR_Y(blt->dst.y + blt->dst.height - 1));
184
185 tu_cs_emit_pkt7(cs, CP_EVENT_WRITE, 1);
186 tu_cs_emit(cs, 0x3f);
187 tu_cs_emit_wfi(cs);
188
189 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8C01, 1);
190 tu_cs_emit(cs, 0);
191
192 if (fmt == FMT6_10_10_10_2_UNORM_DEST)
193 fmt = FMT6_16_16_16_16_FLOAT;
194
195 tu_cs_emit_pkt4(cs, REG_A6XX_SP_2D_SRC_FORMAT, 1);
196 tu_cs_emit(cs, COND(vk_format_is_sint(blt->src.fmt), A6XX_SP_2D_SRC_FORMAT_SINT) |
197 COND(vk_format_is_uint(blt->src.fmt), A6XX_SP_2D_SRC_FORMAT_UINT) |
198 A6XX_SP_2D_SRC_FORMAT_COLOR_FORMAT(fmt) |
199 COND(ifmt == R2D_UNORM8_SRGB, A6XX_SP_2D_SRC_FORMAT_SRGB) |
200 A6XX_SP_2D_SRC_FORMAT_MASK(0xf));
201
202 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8E04, 1);
203 tu_cs_emit(cs, phys_dev->magic.RB_UNKNOWN_8E04_blit);
204
205 tu_cs_emit_pkt7(cs, CP_BLIT, 1);
206 tu_cs_emit(cs, CP_BLIT_0_OP(BLIT_OP_SCALE));
207
208 tu_cs_emit_wfi(cs);
209
210 tu_cs_emit_pkt4(cs, REG_A6XX_RB_UNKNOWN_8E04, 1);
211 tu_cs_emit(cs, 0);
212 }
213
214 void tu_blit(struct tu_cmd_buffer *cmdbuf, struct tu_cs *cs,
215 struct tu_blit *blt)
216 {
217 switch (blt->type) {
218 case TU_BLIT_COPY:
219 blt->stencil_read =
220 blt->dst.fmt == VK_FORMAT_R8_UNORM &&
221 blt->src.fmt == VK_FORMAT_D24_UNORM_S8_UINT;
222
223 assert(vk_format_get_blocksize(blt->dst.fmt) ==
224 vk_format_get_blocksize(blt->src.fmt) || blt->stencil_read);
225 assert(blt->src.samples == blt->dst.samples);
226
227 if (vk_format_is_compressed(blt->src.fmt)) {
228 unsigned block_width = vk_format_get_blockwidth(blt->src.fmt);
229 unsigned block_height = vk_format_get_blockheight(blt->src.fmt);
230
231 blt->src.pitch /= block_width;
232 blt->src.x /= block_width;
233 blt->src.y /= block_height;
234 blt->src.fmt = blit_copy_format(blt->src.fmt);
235
236 /* for image_to_image copy, width/height is on the src format */
237 blt->dst.width = blt->src.width = DIV_ROUND_UP(blt->src.width, block_width);
238 blt->dst.height = blt->src.height = DIV_ROUND_UP(blt->src.height, block_height);
239 }
240
241 if (vk_format_is_compressed(blt->dst.fmt)) {
242 unsigned block_width = vk_format_get_blockwidth(blt->dst.fmt);
243 unsigned block_height = vk_format_get_blockheight(blt->dst.fmt);
244
245 blt->dst.pitch /= block_width;
246 blt->dst.x /= block_width;
247 blt->dst.y /= block_height;
248 blt->dst.fmt = blit_copy_format(blt->dst.fmt);
249 }
250
251 if (blt->dst.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
252 blt->dst.fmt = blit_copy_format(blt->dst.fmt);
253
254 if (blt->src.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
255 blt->src.fmt = blit_copy_format(blt->src.fmt);
256
257 /* TODO: multisample image copy does not work correctly with tiling/UBWC */
258 blt->src.x *= blt->src.samples;
259 blt->dst.x *= blt->dst.samples;
260 blt->src.width *= blt->src.samples;
261 blt->dst.width *= blt->dst.samples;
262 blt->src.samples = 1;
263 blt->dst.samples = 1;
264 break;
265 case TU_BLIT_CLEAR:
266 /* unsupported format cleared as UINT32 */
267 if (blt->dst.fmt == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32)
268 blt->dst.fmt = VK_FORMAT_R32_UINT;
269 /* TODO: multisample image clearing also seems not to work with certain
270 * formats. The blob uses a shader-based clear in these cases.
271 */
272 blt->dst.x *= blt->dst.samples;
273 blt->dst.width *= blt->dst.samples;
274 blt->dst.samples = 1;
275 blt->src = blt->dst;
276 break;
277 default:
278 assert(blt->dst.samples == 1);
279 }
280
281 tu6_emit_event_write(cmdbuf, cs, LRZ_FLUSH, false);
282 tu6_emit_event_write(cmdbuf, cs, PC_CCU_FLUSH_COLOR_TS, true);
283 tu6_emit_event_write(cmdbuf, cs, PC_CCU_FLUSH_DEPTH_TS, true);
284 tu6_emit_event_write(cmdbuf, cs, PC_CCU_INVALIDATE_COLOR, false);
285 tu6_emit_event_write(cmdbuf, cs, PC_CCU_INVALIDATE_DEPTH, false);
286
287 /* buffer copy setup */
288 tu_cs_emit_pkt7(cs, CP_SET_MARKER, 1);
289 tu_cs_emit(cs, A6XX_CP_SET_MARKER_0_MODE(RM6_BLIT2DSCALE));
290
291 for (unsigned layer = 0; layer < blt->layers; layer++) {
292 if (blt->buffer) {
293 struct tu_blit line_blt = *blt;
294 uint64_t dst_va = line_blt.dst.va, src_va = line_blt.src.va;
295 unsigned blocksize = vk_format_get_blocksize(blt->src.fmt);
296 uint32_t size = line_blt.src.width, tmp;
297
298 while (size) {
299 line_blt.src.x = (src_va & 63) / blocksize;
300 line_blt.src.va = src_va & ~63;
301 tmp = MIN2(size, 0x4000 - line_blt.src.x);
302
303 line_blt.dst.x = (dst_va & 63) / blocksize;
304 line_blt.dst.va = dst_va & ~63;
305 tmp = MIN2(tmp, 0x4000 - line_blt.dst.x);
306
307 line_blt.src.width = line_blt.dst.width = tmp;
308
309 emit_blit_step(cmdbuf, cs, &line_blt);
310
311 src_va += tmp * blocksize;
312 dst_va += tmp * blocksize;
313 size -= tmp;
314 }
315 } else if ((blt->src.va & 63) || (blt->src.pitch & 63)) {
316 /* per line copy path (buffer_to_image) */
317 assert(blt->type == TU_BLIT_COPY && !blt->src.tiled);
318 struct tu_blit line_blt = *blt;
319 uint64_t src_va = line_blt.src.va + blt->src.pitch * blt->src.y;
320
321 line_blt.src.y = 0;
322 line_blt.src.pitch = 0;
323 line_blt.src.height = 1;
324 line_blt.dst.height = 1;
325
326 for (unsigned y = 0; y < blt->src.height; y++) {
327 line_blt.src.x = blt->src.x + (src_va & 63) / vk_format_get_blocksize(blt->src.fmt);
328 line_blt.src.va = src_va & ~63;
329
330 emit_blit_step(cmdbuf, cs, &line_blt);
331
332 line_blt.dst.y++;
333 src_va += blt->src.pitch;
334 }
335 } else if ((blt->dst.va & 63) || (blt->dst.pitch & 63)) {
336 /* per line copy path (image_to_buffer) */
337 assert(blt->type == TU_BLIT_COPY && !blt->dst.tiled);
338 struct tu_blit line_blt = *blt;
339 uint64_t dst_va = line_blt.dst.va + blt->dst.pitch * blt->dst.y;
340
341 line_blt.dst.y = 0;
342 line_blt.dst.pitch = 0;
343 line_blt.src.height = 1;
344 line_blt.dst.height = 1;
345
346 for (unsigned y = 0; y < blt->src.height; y++) {
347 line_blt.dst.x = blt->dst.x + (dst_va & 63) / vk_format_get_blocksize(blt->dst.fmt);
348 line_blt.dst.va = dst_va & ~63;
349
350 emit_blit_step(cmdbuf, cs, &line_blt);
351
352 line_blt.src.y++;
353 dst_va += blt->dst.pitch;
354 }
355 } else {
356 emit_blit_step(cmdbuf, cs, blt);
357 }
358 blt->dst.va += blt->dst.layer_size;
359 blt->src.va += blt->src.layer_size;
360 blt->dst.ubwc_va += blt->dst.ubwc_size;
361 blt->src.ubwc_va += blt->src.ubwc_size;
362 }
363
364 tu6_emit_event_write(cmdbuf, cs, PC_CCU_FLUSH_COLOR_TS, true);
365 tu6_emit_event_write(cmdbuf, cs, PC_CCU_FLUSH_DEPTH_TS, true);
366 tu6_emit_event_write(cmdbuf, cs, CACHE_FLUSH_TS, true);
367 tu6_emit_event_write(cmdbuf, cs, CACHE_INVALIDATE, false);
368 }