Added few more stubs so that control reaches to DestroyDevice().
[mesa.git] / src / freedreno / vulkan / tu_util.h
1 /*
2 * Copyright 2020 Valve Corporation
3 * SPDX-License-Identifier: MIT
4 *
5 * Authors:
6 * Jonathan Marek <jonathan@marek.ca>
7 */
8
9 #ifndef TU_UTIL_H
10 #define TU_UTIL_H
11
12 #include <assert.h>
13 #include <stdint.h>
14
15 #include "util/macros.h"
16 #include "util/u_math.h"
17 #include "util/format/u_format_pack.h"
18 #include "util/format/u_format_zs.h"
19 #include "compiler/shader_enums.h"
20
21 #include "adreno_common.xml.h"
22 #include "adreno_pm4.xml.h"
23 #include "a6xx.xml.h"
24
25 #include <vulkan/vulkan.h>
26
27 static inline gl_shader_stage
28 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage)
29 {
30 assert(__builtin_popcount(vk_stage) == 1);
31 return util_logbase2(vk_stage);
32 }
33
34 static inline VkShaderStageFlagBits
35 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage)
36 {
37 return 1 << mesa_stage;
38 }
39
40 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
41
42 #define tu_foreach_stage(stage, stage_bits) \
43 for (gl_shader_stage stage, \
44 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
45 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
46
47 static inline enum a3xx_msaa_samples
48 tu_msaa_samples(VkSampleCountFlagBits samples)
49 {
50 assert(__builtin_popcount(samples) == 1);
51 return util_logbase2(samples);
52 }
53
54 static inline uint32_t
55 tu6_stage2opcode(gl_shader_stage stage)
56 {
57 if (stage == MESA_SHADER_FRAGMENT || stage == MESA_SHADER_COMPUTE)
58 return CP_LOAD_STATE6_FRAG;
59 return CP_LOAD_STATE6_GEOM;
60 }
61
62 static inline enum a6xx_state_block
63 tu6_stage2texsb(gl_shader_stage stage)
64 {
65 return SB6_VS_TEX + stage;
66 }
67
68 static inline enum a6xx_state_block
69 tu6_stage2shadersb(gl_shader_stage stage)
70 {
71 return SB6_VS_SHADER + stage;
72 }
73
74 static inline enum a3xx_rop_code
75 tu6_rop(VkLogicOp op)
76 {
77 /* note: hw enum matches the VK enum, but with the 4 bits reversed */
78 static const uint8_t lookup[] = {
79 [VK_LOGIC_OP_CLEAR] = ROP_CLEAR,
80 [VK_LOGIC_OP_AND] = ROP_AND,
81 [VK_LOGIC_OP_AND_REVERSE] = ROP_AND_REVERSE,
82 [VK_LOGIC_OP_COPY] = ROP_COPY,
83 [VK_LOGIC_OP_AND_INVERTED] = ROP_AND_INVERTED,
84 [VK_LOGIC_OP_NO_OP] = ROP_NOOP,
85 [VK_LOGIC_OP_XOR] = ROP_XOR,
86 [VK_LOGIC_OP_OR] = ROP_OR,
87 [VK_LOGIC_OP_NOR] = ROP_NOR,
88 [VK_LOGIC_OP_EQUIVALENT] = ROP_EQUIV,
89 [VK_LOGIC_OP_INVERT] = ROP_INVERT,
90 [VK_LOGIC_OP_OR_REVERSE] = ROP_OR_REVERSE,
91 [VK_LOGIC_OP_COPY_INVERTED] = ROP_COPY_INVERTED,
92 [VK_LOGIC_OP_OR_INVERTED] = ROP_OR_INVERTED,
93 [VK_LOGIC_OP_NAND] = ROP_NAND,
94 [VK_LOGIC_OP_SET] = ROP_SET,
95 };
96 assert(op < ARRAY_SIZE(lookup));
97 return lookup[op];
98 }
99
100 static inline enum pc_di_primtype
101 tu6_primtype(VkPrimitiveTopology topology)
102 {
103 static const uint8_t lookup[] = {
104 [VK_PRIMITIVE_TOPOLOGY_POINT_LIST] = DI_PT_POINTLIST,
105 [VK_PRIMITIVE_TOPOLOGY_LINE_LIST] = DI_PT_LINELIST,
106 [VK_PRIMITIVE_TOPOLOGY_LINE_STRIP] = DI_PT_LINESTRIP,
107 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST] = DI_PT_TRILIST,
108 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP] = DI_PT_TRISTRIP,
109 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN] = DI_PT_TRIFAN,
110 [VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY] = DI_PT_LINE_ADJ,
111 [VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY] = DI_PT_LINESTRIP_ADJ,
112 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY] = DI_PT_TRI_ADJ,
113 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY] = DI_PT_TRISTRIP_ADJ,
114 /* Return PATCH0 and update in tu_pipeline_builder_parse_tessellation */
115 [VK_PRIMITIVE_TOPOLOGY_PATCH_LIST] = DI_PT_PATCHES0,
116 };
117 assert(topology < ARRAY_SIZE(lookup));
118 return lookup[topology];
119 }
120
121 static inline enum adreno_compare_func
122 tu6_compare_func(VkCompareOp op)
123 {
124 return (enum adreno_compare_func) op;
125 }
126
127 static inline enum adreno_stencil_op
128 tu6_stencil_op(VkStencilOp op)
129 {
130 return (enum adreno_stencil_op) op;
131 }
132
133 static inline enum adreno_rb_blend_factor
134 tu6_blend_factor(VkBlendFactor factor)
135 {
136 static const uint8_t lookup[] = {
137 [VK_BLEND_FACTOR_ZERO] = FACTOR_ZERO,
138 [VK_BLEND_FACTOR_ONE] = FACTOR_ONE,
139 [VK_BLEND_FACTOR_SRC_COLOR] = FACTOR_SRC_COLOR,
140 [VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR] = FACTOR_ONE_MINUS_SRC_COLOR,
141 [VK_BLEND_FACTOR_DST_COLOR] = FACTOR_DST_COLOR,
142 [VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR] = FACTOR_ONE_MINUS_DST_COLOR,
143 [VK_BLEND_FACTOR_SRC_ALPHA] = FACTOR_SRC_ALPHA,
144 [VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA] = FACTOR_ONE_MINUS_SRC_ALPHA,
145 [VK_BLEND_FACTOR_DST_ALPHA] = FACTOR_DST_ALPHA,
146 [VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA] = FACTOR_ONE_MINUS_DST_ALPHA,
147 [VK_BLEND_FACTOR_CONSTANT_COLOR] = FACTOR_CONSTANT_COLOR,
148 [VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR]= FACTOR_ONE_MINUS_CONSTANT_COLOR,
149 [VK_BLEND_FACTOR_CONSTANT_ALPHA] = FACTOR_CONSTANT_ALPHA,
150 [VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA]= FACTOR_ONE_MINUS_CONSTANT_ALPHA,
151 [VK_BLEND_FACTOR_SRC_ALPHA_SATURATE] = FACTOR_SRC_ALPHA_SATURATE,
152 [VK_BLEND_FACTOR_SRC1_COLOR] = FACTOR_SRC1_COLOR,
153 [VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR] = FACTOR_ONE_MINUS_SRC1_COLOR,
154 [VK_BLEND_FACTOR_SRC1_ALPHA] = FACTOR_SRC1_ALPHA,
155 [VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA] = FACTOR_ONE_MINUS_SRC1_ALPHA,
156 };
157 assert(factor < ARRAY_SIZE(lookup));
158 return lookup[factor];
159 }
160
161 static inline enum a3xx_rb_blend_opcode
162 tu6_blend_op(VkBlendOp op)
163 {
164 return (enum a3xx_rb_blend_opcode) op;
165 }
166
167 static inline enum a6xx_tex_type
168 tu6_tex_type(VkImageViewType type, bool storage)
169 {
170 switch (type) {
171 default:
172 case VK_IMAGE_VIEW_TYPE_1D:
173 case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
174 return A6XX_TEX_1D;
175 case VK_IMAGE_VIEW_TYPE_2D:
176 case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
177 return A6XX_TEX_2D;
178 case VK_IMAGE_VIEW_TYPE_3D:
179 return A6XX_TEX_3D;
180 case VK_IMAGE_VIEW_TYPE_CUBE:
181 case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
182 return storage ? A6XX_TEX_2D : A6XX_TEX_CUBE;
183 }
184 }
185
186 static inline enum a6xx_tex_clamp
187 tu6_tex_wrap(VkSamplerAddressMode address_mode)
188 {
189 uint8_t lookup[] = {
190 [VK_SAMPLER_ADDRESS_MODE_REPEAT] = A6XX_TEX_REPEAT,
191 [VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT] = A6XX_TEX_MIRROR_REPEAT,
192 [VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE] = A6XX_TEX_CLAMP_TO_EDGE,
193 [VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER] = A6XX_TEX_CLAMP_TO_BORDER,
194 [VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE] = A6XX_TEX_MIRROR_CLAMP,
195 };
196 assert(address_mode < ARRAY_SIZE(lookup));
197 return lookup[address_mode];
198 }
199
200 static inline enum a6xx_tex_filter
201 tu6_tex_filter(VkFilter filter, unsigned aniso)
202 {
203 switch (filter) {
204 case VK_FILTER_NEAREST:
205 return A6XX_TEX_NEAREST;
206 case VK_FILTER_LINEAR:
207 return aniso ? A6XX_TEX_ANISO : A6XX_TEX_LINEAR;
208 case VK_FILTER_CUBIC_EXT:
209 return A6XX_TEX_CUBIC;
210 default:
211 unreachable("illegal texture filter");
212 break;
213 }
214 }
215
216 static inline enum a6xx_reduction_mode
217 tu6_reduction_mode(VkSamplerReductionMode reduction_mode)
218 {
219 return (enum a6xx_reduction_mode) reduction_mode;
220 }
221
222 static inline enum a6xx_depth_format
223 tu6_pipe2depth(VkFormat format)
224 {
225 switch (format) {
226 case VK_FORMAT_D16_UNORM:
227 return DEPTH6_16;
228 case VK_FORMAT_X8_D24_UNORM_PACK32:
229 case VK_FORMAT_D24_UNORM_S8_UINT:
230 return DEPTH6_24_8;
231 case VK_FORMAT_D32_SFLOAT:
232 case VK_FORMAT_D32_SFLOAT_S8_UINT:
233 case VK_FORMAT_S8_UINT:
234 return DEPTH6_32;
235 default:
236 return ~0;
237 }
238 }
239
240 static inline enum a6xx_polygon_mode
241 tu6_polygon_mode(VkPolygonMode mode)
242 {
243 switch (mode) {
244 case VK_POLYGON_MODE_POINT:
245 return POLYMODE6_POINTS;
246 case VK_POLYGON_MODE_LINE:
247 return POLYMODE6_LINES;
248 case VK_POLYGON_MODE_FILL:
249 return POLYMODE6_TRIANGLES;
250 default:
251 unreachable("bad polygon mode");
252 }
253 }
254
255 struct bcolor_entry {
256 uint32_t fp32[4];
257 uint64_t ui16;
258 uint64_t si16;
259 uint64_t fp16;
260 uint16_t rgb565;
261 uint16_t rgb5a1;
262 uint16_t rgba4;
263 uint8_t __pad0[2];
264 uint32_t ui8;
265 uint32_t si8;
266 uint32_t rgb10a2;
267 uint32_t z24; /* also s8? */
268 uint64_t srgb;
269 uint8_t __pad1[56];
270 } __attribute__((aligned(128)));
271
272 /* vulkan does not want clamping of integer clear values, differs from u_format
273 * see spec for VkClearColorValue
274 */
275 static inline void
276 pack_int8(uint32_t *dst, const uint32_t *val)
277 {
278 *dst = (val[0] & 0xff) |
279 (val[1] & 0xff) << 8 |
280 (val[2] & 0xff) << 16 |
281 (val[3] & 0xff) << 24;
282 }
283
284 static inline void
285 pack_int10_2(uint32_t *dst, const uint32_t *val)
286 {
287 *dst = (val[0] & 0x3ff) |
288 (val[1] & 0x3ff) << 10 |
289 (val[2] & 0x3ff) << 20 |
290 (val[3] & 0x3) << 30;
291 }
292
293 static inline void
294 pack_int16(uint32_t *dst, const uint32_t *val)
295 {
296 dst[0] = (val[0] & 0xffff) |
297 (val[1] & 0xffff) << 16;
298 dst[1] = (val[2] & 0xffff) |
299 (val[3] & 0xffff) << 16;
300 }
301
302 static inline void
303 tu6_pack_border_color(struct bcolor_entry *bcolor, const VkClearColorValue *val, bool is_int)
304 {
305 memcpy(bcolor->fp32, val, 4 * sizeof(float));
306 if (is_int) {
307 pack_int16((uint32_t*) &bcolor->fp16, val->uint32);
308 return;
309 }
310 #define PACK_F(x, type) util_format_##type##_pack_rgba_float \
311 ( (uint8_t*) (&bcolor->x), 0, val->float32, 0, 1, 1)
312 PACK_F(ui16, r16g16b16a16_unorm);
313 PACK_F(si16, r16g16b16a16_snorm);
314 PACK_F(fp16, r16g16b16a16_float);
315 PACK_F(rgb565, r5g6b5_unorm);
316 PACK_F(rgb5a1, r5g5b5a1_unorm);
317 PACK_F(rgba4, r4g4b4a4_unorm);
318 PACK_F(ui8, r8g8b8a8_unorm);
319 PACK_F(si8, r8g8b8a8_snorm);
320 PACK_F(rgb10a2, r10g10b10a2_unorm);
321 util_format_x8z24_unorm_pack_z_float((uint8_t*) &bcolor->z24,
322 0, val->float32, 0, 1, 1);
323 PACK_F(srgb, r16g16b16a16_float); /* TODO: clamp? */
324 #undef PACK_F
325 }
326
327 #endif /* TU_UTIL_H */