freedreno/layout: layout simplifications and pitch from level 0 pitch
[mesa.git] / src / gallium / drivers / freedreno / a5xx / fd5_format.c
1 /*
2 * Copyright (C) 2016 Rob Clark <robclark@freedesktop.org>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 * SOFTWARE.
22 *
23 * Authors:
24 * Rob Clark <robclark@freedesktop.org>
25 */
26
27 #include "pipe/p_defines.h"
28 #include "util/format/u_format.h"
29
30 #include "fd5_format.h"
31
32
33 /* Specifies the table of all the formats and their features. Also supplies
34 * the helpers that look up various data in those tables.
35 */
36
37 struct fd5_format {
38 enum a5xx_vtx_fmt vtx;
39 enum a5xx_tex_fmt tex;
40 enum a5xx_color_fmt rb;
41 enum a3xx_color_swap swap;
42 boolean present;
43 };
44
45 /* vertex + texture */
46 #define VT(pipe, fmt, rbfmt, swapfmt) \
47 [PIPE_FORMAT_ ## pipe] = { \
48 .present = 1, \
49 .vtx = VFMT5_ ## fmt, \
50 .tex = TFMT5_ ## fmt, \
51 .rb = RB5_ ## rbfmt, \
52 .swap = swapfmt \
53 }
54
55 /* texture-only */
56 #define _T(pipe, fmt, rbfmt, swapfmt) \
57 [PIPE_FORMAT_ ## pipe] = { \
58 .present = 1, \
59 .vtx = VFMT5_NONE, \
60 .tex = TFMT5_ ## fmt, \
61 .rb = RB5_ ## rbfmt, \
62 .swap = swapfmt \
63 }
64
65 /* vertex-only */
66 #define V_(pipe, fmt, rbfmt, swapfmt) \
67 [PIPE_FORMAT_ ## pipe] = { \
68 .present = 1, \
69 .vtx = VFMT5_ ## fmt, \
70 .tex = TFMT5_NONE, \
71 .rb = RB5_ ## rbfmt, \
72 .swap = swapfmt \
73 }
74
75 static struct fd5_format formats[PIPE_FORMAT_COUNT] = {
76 /* for blitting, treat PIPE_FORMAT_NONE as 8bit R8: */
77 _T(R8_UINT, 8_UINT, R8_UINT, WZYX),
78
79 /* 8-bit */
80 VT(R8_UNORM, 8_UNORM, R8_UNORM, WZYX),
81 VT(R8_SNORM, 8_SNORM, R8_SNORM, WZYX),
82 VT(R8_UINT, 8_UINT, R8_UINT, WZYX),
83 VT(R8_SINT, 8_SINT, R8_SINT, WZYX),
84 V_(R8_USCALED, 8_UINT, NONE, WZYX),
85 V_(R8_SSCALED, 8_SINT, NONE, WZYX),
86
87 _T(A8_UNORM, 8_UNORM, A8_UNORM, WZYX),
88 _T(L8_UNORM, 8_UNORM, R8_UNORM, WZYX),
89 _T(I8_UNORM, 8_UNORM, NONE, WZYX),
90
91 _T(A8_UINT, 8_UINT, NONE, WZYX),
92 _T(A8_SINT, 8_SINT, NONE, WZYX),
93 _T(L8_UINT, 8_UINT, NONE, WZYX),
94 _T(L8_SINT, 8_SINT, NONE, WZYX),
95 _T(I8_UINT, 8_UINT, NONE, WZYX),
96 _T(I8_SINT, 8_SINT, NONE, WZYX),
97
98 _T(S8_UINT, 8_UINT, R8_UNORM, WZYX),
99
100 /* 16-bit */
101 VT(R16_UNORM, 16_UNORM, R16_UNORM, WZYX),
102 VT(R16_SNORM, 16_SNORM, R16_SNORM, WZYX),
103 VT(R16_UINT, 16_UINT, R16_UINT, WZYX),
104 VT(R16_SINT, 16_SINT, R16_SINT, WZYX),
105 V_(R16_USCALED, 16_UINT, NONE, WZYX),
106 V_(R16_SSCALED, 16_SINT, NONE, WZYX),
107 VT(R16_FLOAT, 16_FLOAT, R16_FLOAT, WZYX),
108 _T(Z16_UNORM, 16_UNORM, R16_UNORM, WZYX),
109
110 _T(A16_UNORM, 16_UNORM, NONE, WZYX),
111 _T(A16_SNORM, 16_SNORM, NONE, WZYX),
112 _T(A16_UINT, 16_UINT, NONE, WZYX),
113 _T(A16_SINT, 16_SINT, NONE, WZYX),
114 _T(L16_UNORM, 16_UNORM, NONE, WZYX),
115 _T(L16_SNORM, 16_SNORM, NONE, WZYX),
116 _T(L16_UINT, 16_UINT, NONE, WZYX),
117 _T(L16_SINT, 16_SINT, NONE, WZYX),
118 _T(I16_UNORM, 16_UNORM, NONE, WZYX),
119 _T(I16_SNORM, 16_SNORM, NONE, WZYX),
120 _T(I16_UINT, 16_UINT, NONE, WZYX),
121 _T(I16_SINT, 16_SINT, NONE, WZYX),
122
123 VT(R8G8_UNORM, 8_8_UNORM, R8G8_UNORM, WZYX),
124 VT(R8G8_SNORM, 8_8_SNORM, R8G8_SNORM, WZYX),
125 VT(R8G8_UINT, 8_8_UINT, R8G8_UINT, WZYX),
126 VT(R8G8_SINT, 8_8_SINT, R8G8_SINT, WZYX),
127 V_(R8G8_USCALED, 8_8_UINT, NONE, WZYX),
128 V_(R8G8_SSCALED, 8_8_SINT, NONE, WZYX),
129
130 _T(L8A8_UINT, 8_8_UINT, NONE, WZYX),
131 _T(L8A8_SINT, 8_8_SINT, NONE, WZYX),
132
133 _T(B5G6R5_UNORM, 5_6_5_UNORM, R5G6B5_UNORM, WXYZ),
134 _T(B5G5R5A1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
135 _T(B5G5R5X1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
136 _T(B4G4R4A4_UNORM, 4_4_4_4_UNORM, R4G4B4A4_UNORM, WXYZ),
137
138 /* 24-bit */
139 V_(R8G8B8_UNORM, 8_8_8_UNORM, NONE, WZYX),
140 V_(R8G8B8_SNORM, 8_8_8_SNORM, NONE, WZYX),
141 V_(R8G8B8_UINT, 8_8_8_UINT, NONE, WZYX),
142 V_(R8G8B8_SINT, 8_8_8_SINT, NONE, WZYX),
143 V_(R8G8B8_USCALED, 8_8_8_UINT, NONE, WZYX),
144 V_(R8G8B8_SSCALED, 8_8_8_SINT, NONE, WZYX),
145
146 /* 32-bit */
147 VT(R32_UINT, 32_UINT, R32_UINT, WZYX),
148 VT(R32_SINT, 32_SINT, R32_SINT, WZYX),
149 V_(R32_USCALED, 32_UINT, NONE, WZYX),
150 V_(R32_SSCALED, 32_SINT, NONE, WZYX),
151 VT(R32_FLOAT, 32_FLOAT, R32_FLOAT,WZYX),
152 V_(R32_FIXED, 32_FIXED, NONE, WZYX),
153
154 _T(A32_UINT, 32_UINT, NONE, WZYX),
155 _T(A32_SINT, 32_SINT, NONE, WZYX),
156 _T(L32_UINT, 32_UINT, NONE, WZYX),
157 _T(L32_SINT, 32_SINT, NONE, WZYX),
158 _T(I32_UINT, 32_UINT, NONE, WZYX),
159 _T(I32_SINT, 32_SINT, NONE, WZYX),
160
161 VT(R16G16_UNORM, 16_16_UNORM, R16G16_UNORM, WZYX),
162 VT(R16G16_SNORM, 16_16_SNORM, R16G16_SNORM, WZYX),
163 VT(R16G16_UINT, 16_16_UINT, R16G16_UINT, WZYX),
164 VT(R16G16_SINT, 16_16_SINT, R16G16_SINT, WZYX),
165 VT(R16G16_USCALED, 16_16_UINT, NONE, WZYX),
166 VT(R16G16_SSCALED, 16_16_SINT, NONE, WZYX),
167 VT(R16G16_FLOAT, 16_16_FLOAT, R16G16_FLOAT, WZYX),
168
169 _T(L16A16_UNORM, 16_16_UNORM, NONE, WZYX),
170 _T(L16A16_SNORM, 16_16_SNORM, NONE, WZYX),
171 _T(L16A16_UINT, 16_16_UINT, NONE, WZYX),
172 _T(L16A16_SINT, 16_16_SINT, NONE, WZYX),
173
174 VT(R8G8B8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
175 _T(R8G8B8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
176 _T(R8G8B8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
177 _T(R8G8B8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
178 VT(R8G8B8A8_SNORM, 8_8_8_8_SNORM, R8G8B8A8_SNORM, WZYX),
179 VT(R8G8B8A8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, WZYX),
180 VT(R8G8B8A8_SINT, 8_8_8_8_SINT, R8G8B8A8_SINT, WZYX),
181 V_(R8G8B8A8_USCALED, 8_8_8_8_UINT, NONE, WZYX),
182 V_(R8G8B8A8_SSCALED, 8_8_8_8_SINT, NONE, WZYX),
183
184 VT(B8G8R8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
185 _T(B8G8R8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
186 VT(B8G8R8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
187 _T(B8G8R8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
188
189 VT(A8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
190 _T(X8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
191 _T(A8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
192 _T(X8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
193
194 VT(A8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
195 _T(X8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
196 _T(A8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
197 _T(X8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
198
199 VT(R10G10B10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WZYX),
200 VT(B10G10R10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
201 _T(B10G10R10X2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
202 V_(R10G10B10A2_SNORM, 10_10_10_2_SNORM, NONE, WZYX),
203 V_(B10G10R10A2_SNORM, 10_10_10_2_SNORM, NONE, WXYZ),
204 VT(R10G10B10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WZYX),
205 VT(B10G10R10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WXYZ),
206 V_(R10G10B10A2_USCALED, 10_10_10_2_UINT, NONE, WZYX),
207 V_(B10G10R10A2_USCALED, 10_10_10_2_UINT, NONE, WXYZ),
208 V_(R10G10B10A2_SSCALED, 10_10_10_2_SINT, NONE, WZYX),
209 V_(B10G10R10A2_SSCALED, 10_10_10_2_SINT, NONE, WXYZ),
210
211 VT(R11G11B10_FLOAT, 11_11_10_FLOAT, R11G11B10_FLOAT, WZYX),
212 _T(R9G9B9E5_FLOAT, 9_9_9_E5_FLOAT, NONE, WZYX),
213
214 _T(Z24X8_UNORM, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
215 _T(X24S8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, XYZW),
216 _T(Z24_UNORM_S8_UINT, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
217 _T(Z32_FLOAT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
218 _T(Z32_FLOAT_S8X24_UINT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
219 _T(X32_S8X24_UINT, 8_UINT, R8_UINT, WZYX),
220
221 /* 48-bit */
222 V_(R16G16B16_UNORM, 16_16_16_UNORM, NONE, WZYX),
223 V_(R16G16B16_SNORM, 16_16_16_SNORM, NONE, WZYX),
224 V_(R16G16B16_UINT, 16_16_16_UINT, NONE, WZYX),
225 V_(R16G16B16_SINT, 16_16_16_SINT, NONE, WZYX),
226 V_(R16G16B16_USCALED, 16_16_16_UINT, NONE, WZYX),
227 V_(R16G16B16_SSCALED, 16_16_16_SINT, NONE, WZYX),
228 V_(R16G16B16_FLOAT, 16_16_16_FLOAT, NONE, WZYX),
229
230 /* 64-bit */
231 VT(R16G16B16A16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
232 VT(R16G16B16X16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
233 VT(R16G16B16A16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
234 VT(R16G16B16X16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
235 VT(R16G16B16A16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
236 VT(R16G16B16X16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
237 VT(R16G16B16A16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
238 VT(R16G16B16X16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
239 VT(R16G16B16A16_USCALED, 16_16_16_16_UINT, NONE, WZYX),
240 VT(R16G16B16A16_SSCALED, 16_16_16_16_SINT, NONE, WZYX),
241 VT(R16G16B16A16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
242 VT(R16G16B16X16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
243
244 VT(R32G32_UINT, 32_32_UINT, R32G32_UINT, WZYX),
245 VT(R32G32_SINT, 32_32_SINT, R32G32_SINT, WZYX),
246 V_(R32G32_USCALED, 32_32_UINT, NONE, WZYX),
247 V_(R32G32_SSCALED, 32_32_SINT, NONE, WZYX),
248 VT(R32G32_FLOAT, 32_32_FLOAT, R32G32_FLOAT,WZYX),
249 V_(R32G32_FIXED, 32_32_FIXED, NONE, WZYX),
250
251 _T(L32A32_UINT, 32_32_UINT, NONE, WZYX),
252 _T(L32A32_SINT, 32_32_SINT, NONE, WZYX),
253
254 /* 96-bit */
255 VT(R32G32B32_UINT, 32_32_32_UINT, NONE, WZYX),
256 VT(R32G32B32_SINT, 32_32_32_SINT, NONE, WZYX),
257 V_(R32G32B32_USCALED, 32_32_32_UINT, NONE, WZYX),
258 V_(R32G32B32_SSCALED, 32_32_32_SINT, NONE, WZYX),
259 VT(R32G32B32_FLOAT, 32_32_32_FLOAT, NONE, WZYX),
260 V_(R32G32B32_FIXED, 32_32_32_FIXED, NONE, WZYX),
261
262 /* 128-bit */
263 VT(R32G32B32A32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
264 _T(R32G32B32X32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
265 VT(R32G32B32A32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
266 _T(R32G32B32X32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
267 V_(R32G32B32A32_USCALED, 32_32_32_32_UINT, NONE, WZYX),
268 V_(R32G32B32A32_SSCALED, 32_32_32_32_SINT, NONE, WZYX),
269 VT(R32G32B32A32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
270 _T(R32G32B32X32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
271 V_(R32G32B32A32_FIXED, 32_32_32_32_FIXED, NONE, WZYX),
272
273 /* compressed */
274 _T(ETC1_RGB8, ETC1, NONE, WZYX),
275 _T(ETC2_RGB8, ETC2_RGB8, NONE, WZYX),
276 _T(ETC2_SRGB8, ETC2_RGB8, NONE, WZYX),
277 _T(ETC2_RGB8A1, ETC2_RGB8A1, NONE, WZYX),
278 _T(ETC2_SRGB8A1, ETC2_RGB8A1, NONE, WZYX),
279 _T(ETC2_RGBA8, ETC2_RGBA8, NONE, WZYX),
280 _T(ETC2_SRGBA8, ETC2_RGBA8, NONE, WZYX),
281 _T(ETC2_R11_UNORM, ETC2_R11_UNORM, NONE, WZYX),
282 _T(ETC2_R11_SNORM, ETC2_R11_SNORM, NONE, WZYX),
283 _T(ETC2_RG11_UNORM, ETC2_RG11_UNORM, NONE, WZYX),
284 _T(ETC2_RG11_SNORM, ETC2_RG11_SNORM, NONE, WZYX),
285
286 _T(DXT1_RGB, DXT1, NONE, WZYX),
287 _T(DXT1_SRGB, DXT1, NONE, WZYX),
288 _T(DXT1_RGBA, DXT1, NONE, WZYX),
289 _T(DXT1_SRGBA, DXT1, NONE, WZYX),
290 _T(DXT3_RGBA, DXT3, NONE, WZYX),
291 _T(DXT3_SRGBA, DXT3, NONE, WZYX),
292 _T(DXT5_RGBA, DXT5, NONE, WZYX),
293 _T(DXT5_SRGBA, DXT5, NONE, WZYX),
294
295 _T(BPTC_RGBA_UNORM, BPTC, NONE, WZYX),
296 _T(BPTC_SRGBA, BPTC, NONE, WZYX),
297 _T(BPTC_RGB_FLOAT, BPTC_FLOAT, NONE, WZYX),
298 _T(BPTC_RGB_UFLOAT, BPTC_UFLOAT, NONE, WZYX),
299
300 _T(RGTC1_UNORM, RGTC1_UNORM, NONE, WZYX),
301 _T(RGTC1_SNORM, RGTC1_SNORM, NONE, WZYX),
302 _T(RGTC2_UNORM, RGTC2_UNORM, NONE, WZYX),
303 _T(RGTC2_SNORM, RGTC2_SNORM, NONE, WZYX),
304 _T(LATC1_UNORM, RGTC1_UNORM, NONE, WZYX),
305 _T(LATC1_SNORM, RGTC1_SNORM, NONE, WZYX),
306 _T(LATC2_UNORM, RGTC2_UNORM, NONE, WZYX),
307 _T(LATC2_SNORM, RGTC2_SNORM, NONE, WZYX),
308
309 _T(ASTC_4x4, ASTC_4x4, NONE, WZYX),
310 _T(ASTC_5x4, ASTC_5x4, NONE, WZYX),
311 _T(ASTC_5x5, ASTC_5x5, NONE, WZYX),
312 _T(ASTC_6x5, ASTC_6x5, NONE, WZYX),
313 _T(ASTC_6x6, ASTC_6x6, NONE, WZYX),
314 _T(ASTC_8x5, ASTC_8x5, NONE, WZYX),
315 _T(ASTC_8x6, ASTC_8x6, NONE, WZYX),
316 _T(ASTC_8x8, ASTC_8x8, NONE, WZYX),
317 _T(ASTC_10x5, ASTC_10x5, NONE, WZYX),
318 _T(ASTC_10x6, ASTC_10x6, NONE, WZYX),
319 _T(ASTC_10x8, ASTC_10x8, NONE, WZYX),
320 _T(ASTC_10x10, ASTC_10x10, NONE, WZYX),
321 _T(ASTC_12x10, ASTC_12x10, NONE, WZYX),
322 _T(ASTC_12x12, ASTC_12x12, NONE, WZYX),
323
324 _T(ASTC_4x4_SRGB, ASTC_4x4, NONE, WZYX),
325 _T(ASTC_5x4_SRGB, ASTC_5x4, NONE, WZYX),
326 _T(ASTC_5x5_SRGB, ASTC_5x5, NONE, WZYX),
327 _T(ASTC_6x5_SRGB, ASTC_6x5, NONE, WZYX),
328 _T(ASTC_6x6_SRGB, ASTC_6x6, NONE, WZYX),
329 _T(ASTC_8x5_SRGB, ASTC_8x5, NONE, WZYX),
330 _T(ASTC_8x6_SRGB, ASTC_8x6, NONE, WZYX),
331 _T(ASTC_8x8_SRGB, ASTC_8x8, NONE, WZYX),
332 _T(ASTC_10x5_SRGB, ASTC_10x5, NONE, WZYX),
333 _T(ASTC_10x6_SRGB, ASTC_10x6, NONE, WZYX),
334 _T(ASTC_10x8_SRGB, ASTC_10x8, NONE, WZYX),
335 _T(ASTC_10x10_SRGB, ASTC_10x10, NONE, WZYX),
336 _T(ASTC_12x10_SRGB, ASTC_12x10, NONE, WZYX),
337 _T(ASTC_12x12_SRGB, ASTC_12x12, NONE, WZYX),
338 };
339
340 /* convert pipe format to vertex buffer format: */
341 enum a5xx_vtx_fmt
342 fd5_pipe2vtx(enum pipe_format format)
343 {
344 if (!formats[format].present)
345 return VFMT5_NONE;
346 return formats[format].vtx;
347 }
348
349 /* convert pipe format to texture sampler format: */
350 enum a5xx_tex_fmt
351 fd5_pipe2tex(enum pipe_format format)
352 {
353 if (!formats[format].present)
354 return TFMT5_NONE;
355 return formats[format].tex;
356 }
357
358 /* convert pipe format to MRT / copydest format used for render-target: */
359 enum a5xx_color_fmt
360 fd5_pipe2color(enum pipe_format format)
361 {
362 if (!formats[format].present)
363 return RB5_NONE;
364 return formats[format].rb;
365 }
366
367 enum a3xx_color_swap
368 fd5_pipe2swap(enum pipe_format format)
369 {
370 if (!formats[format].present)
371 return WZYX;
372 return formats[format].swap;
373 }
374
375 enum a5xx_depth_format
376 fd5_pipe2depth(enum pipe_format format)
377 {
378 switch (format) {
379 case PIPE_FORMAT_Z16_UNORM:
380 return DEPTH5_16;
381 case PIPE_FORMAT_Z24X8_UNORM:
382 case PIPE_FORMAT_Z24_UNORM_S8_UINT:
383 case PIPE_FORMAT_X8Z24_UNORM:
384 case PIPE_FORMAT_S8_UINT_Z24_UNORM:
385 return DEPTH5_24_8;
386 case PIPE_FORMAT_Z32_FLOAT:
387 case PIPE_FORMAT_Z32_FLOAT_S8X24_UINT:
388 return DEPTH5_32;
389 default:
390 return ~0;
391 }
392 }
393
394 static inline enum a5xx_tex_swiz
395 tex_swiz(unsigned swiz)
396 {
397 switch (swiz) {
398 default:
399 case PIPE_SWIZZLE_X: return A5XX_TEX_X;
400 case PIPE_SWIZZLE_Y: return A5XX_TEX_Y;
401 case PIPE_SWIZZLE_Z: return A5XX_TEX_Z;
402 case PIPE_SWIZZLE_W: return A5XX_TEX_W;
403 case PIPE_SWIZZLE_0: return A5XX_TEX_ZERO;
404 case PIPE_SWIZZLE_1: return A5XX_TEX_ONE;
405 }
406 }
407
408 uint32_t
409 fd5_tex_swiz(enum pipe_format format, unsigned swizzle_r, unsigned swizzle_g,
410 unsigned swizzle_b, unsigned swizzle_a)
411 {
412 const struct util_format_description *desc =
413 util_format_description(format);
414 unsigned char swiz[4] = {
415 swizzle_r, swizzle_g, swizzle_b, swizzle_a,
416 }, rswiz[4];
417
418 util_format_compose_swizzles(desc->swizzle, swiz, rswiz);
419
420 return A5XX_TEX_CONST_0_SWIZ_X(tex_swiz(rswiz[0])) |
421 A5XX_TEX_CONST_0_SWIZ_Y(tex_swiz(rswiz[1])) |
422 A5XX_TEX_CONST_0_SWIZ_Z(tex_swiz(rswiz[2])) |
423 A5XX_TEX_CONST_0_SWIZ_W(tex_swiz(rswiz[3]));
424 }