2 * Copyright 2020 Valve Corporation
3 * SPDX-License-Identifier: MIT
6 * Jonathan Marek <jonathan@marek.ca>
15 #include "util/macros.h"
16 #include "util/u_math.h"
17 #include "compiler/shader_enums.h"
19 #include "adreno_common.xml.h"
20 #include "adreno_pm4.xml.h"
23 #include <vulkan/vulkan.h>
25 static inline gl_shader_stage
26 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
28 assert(__builtin_popcount(vk_stage
) == 1);
29 return util_logbase2(vk_stage
);
32 static inline VkShaderStageFlagBits
33 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
35 return 1 << mesa_stage
;
38 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
40 #define tu_foreach_stage(stage, stage_bits) \
41 for (gl_shader_stage stage, \
42 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
43 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
45 static inline enum a3xx_msaa_samples
46 tu_msaa_samples(VkSampleCountFlagBits samples
)
48 assert(__builtin_popcount(samples
) == 1);
49 return util_logbase2(samples
);
52 static inline enum a4xx_index_size
53 tu6_index_size(VkIndexType type
)
56 case VK_INDEX_TYPE_UINT16
:
57 return INDEX4_SIZE_16_BIT
;
58 case VK_INDEX_TYPE_UINT32
:
59 return INDEX4_SIZE_32_BIT
;
60 case VK_INDEX_TYPE_UINT8_EXT
:
61 return INDEX4_SIZE_8_BIT
;
63 unreachable("invalid VkIndexType");
67 static inline uint32_t
68 tu6_stage2opcode(gl_shader_stage stage
)
70 if (stage
== MESA_SHADER_FRAGMENT
|| stage
== MESA_SHADER_COMPUTE
)
71 return CP_LOAD_STATE6_FRAG
;
72 return CP_LOAD_STATE6_GEOM
;
75 static inline enum a6xx_state_block
76 tu6_stage2texsb(gl_shader_stage stage
)
78 return SB6_VS_TEX
+ stage
;
81 static inline enum a6xx_state_block
82 tu6_stage2shadersb(gl_shader_stage stage
)
84 return SB6_VS_SHADER
+ stage
;
87 static inline enum a3xx_rop_code
90 /* note: hw enum matches the VK enum, but with the 4 bits reversed */
91 static const uint8_t lookup
[] = {
92 [VK_LOGIC_OP_CLEAR
] = ROP_CLEAR
,
93 [VK_LOGIC_OP_AND
] = ROP_AND
,
94 [VK_LOGIC_OP_AND_REVERSE
] = ROP_AND_REVERSE
,
95 [VK_LOGIC_OP_COPY
] = ROP_COPY
,
96 [VK_LOGIC_OP_AND_INVERTED
] = ROP_AND_INVERTED
,
97 [VK_LOGIC_OP_NO_OP
] = ROP_NOOP
,
98 [VK_LOGIC_OP_XOR
] = ROP_XOR
,
99 [VK_LOGIC_OP_OR
] = ROP_OR
,
100 [VK_LOGIC_OP_NOR
] = ROP_NOR
,
101 [VK_LOGIC_OP_EQUIVALENT
] = ROP_EQUIV
,
102 [VK_LOGIC_OP_INVERT
] = ROP_INVERT
,
103 [VK_LOGIC_OP_OR_REVERSE
] = ROP_OR_REVERSE
,
104 [VK_LOGIC_OP_COPY_INVERTED
] = ROP_COPY_INVERTED
,
105 [VK_LOGIC_OP_OR_INVERTED
] = ROP_OR_INVERTED
,
106 [VK_LOGIC_OP_NAND
] = ROP_NAND
,
107 [VK_LOGIC_OP_SET
] = ROP_SET
,
109 assert(op
< ARRAY_SIZE(lookup
));
113 static inline enum pc_di_primtype
114 tu6_primtype(VkPrimitiveTopology topology
)
116 static const uint8_t lookup
[] = {
117 [VK_PRIMITIVE_TOPOLOGY_POINT_LIST
] = DI_PT_POINTLIST
,
118 [VK_PRIMITIVE_TOPOLOGY_LINE_LIST
] = DI_PT_LINELIST
,
119 [VK_PRIMITIVE_TOPOLOGY_LINE_STRIP
] = DI_PT_LINESTRIP
,
120 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
] = DI_PT_TRILIST
,
121 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
] = DI_PT_TRISTRIP
,
122 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN
] = DI_PT_TRIFAN
,
123 [VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY
] = DI_PT_LINE_ADJ
,
124 [VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY
] = DI_PT_LINESTRIP_ADJ
,
125 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY
] = DI_PT_TRI_ADJ
,
126 [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY
] = DI_PT_TRISTRIP_ADJ
,
127 /* Return PATCH0 and update in tu_pipeline_builder_parse_tessellation */
128 [VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
] = DI_PT_PATCHES0
,
130 assert(topology
< ARRAY_SIZE(lookup
));
131 return lookup
[topology
];
134 static inline enum adreno_compare_func
135 tu6_compare_func(VkCompareOp op
)
137 return (enum adreno_compare_func
) op
;
140 static inline enum adreno_stencil_op
141 tu6_stencil_op(VkStencilOp op
)
143 return (enum adreno_stencil_op
) op
;
146 static inline enum adreno_rb_blend_factor
147 tu6_blend_factor(VkBlendFactor factor
)
149 static const uint8_t lookup
[] = {
150 [VK_BLEND_FACTOR_ZERO
] = FACTOR_ZERO
,
151 [VK_BLEND_FACTOR_ONE
] = FACTOR_ONE
,
152 [VK_BLEND_FACTOR_SRC_COLOR
] = FACTOR_SRC_COLOR
,
153 [VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR
] = FACTOR_ONE_MINUS_SRC_COLOR
,
154 [VK_BLEND_FACTOR_DST_COLOR
] = FACTOR_DST_COLOR
,
155 [VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR
] = FACTOR_ONE_MINUS_DST_COLOR
,
156 [VK_BLEND_FACTOR_SRC_ALPHA
] = FACTOR_SRC_ALPHA
,
157 [VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA
] = FACTOR_ONE_MINUS_SRC_ALPHA
,
158 [VK_BLEND_FACTOR_DST_ALPHA
] = FACTOR_DST_ALPHA
,
159 [VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA
] = FACTOR_ONE_MINUS_DST_ALPHA
,
160 [VK_BLEND_FACTOR_CONSTANT_COLOR
] = FACTOR_CONSTANT_COLOR
,
161 [VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR
]= FACTOR_ONE_MINUS_CONSTANT_COLOR
,
162 [VK_BLEND_FACTOR_CONSTANT_ALPHA
] = FACTOR_CONSTANT_ALPHA
,
163 [VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA
]= FACTOR_ONE_MINUS_CONSTANT_ALPHA
,
164 [VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
] = FACTOR_SRC_ALPHA_SATURATE
,
165 [VK_BLEND_FACTOR_SRC1_COLOR
] = FACTOR_SRC1_COLOR
,
166 [VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR
] = FACTOR_ONE_MINUS_SRC1_COLOR
,
167 [VK_BLEND_FACTOR_SRC1_ALPHA
] = FACTOR_SRC1_ALPHA
,
168 [VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
] = FACTOR_ONE_MINUS_SRC1_ALPHA
,
170 assert(factor
< ARRAY_SIZE(lookup
));
171 return lookup
[factor
];
174 static inline enum a3xx_rb_blend_opcode
175 tu6_blend_op(VkBlendOp op
)
177 return (enum a3xx_rb_blend_opcode
) op
;
180 static inline enum a6xx_tex_type
181 tu6_tex_type(VkImageViewType type
, bool storage
)
185 case VK_IMAGE_VIEW_TYPE_1D
:
186 case VK_IMAGE_VIEW_TYPE_1D_ARRAY
:
188 case VK_IMAGE_VIEW_TYPE_2D
:
189 case VK_IMAGE_VIEW_TYPE_2D_ARRAY
:
191 case VK_IMAGE_VIEW_TYPE_3D
:
193 case VK_IMAGE_VIEW_TYPE_CUBE
:
194 case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
:
195 return storage
? A6XX_TEX_2D
: A6XX_TEX_CUBE
;
199 static inline enum a6xx_tex_clamp
200 tu6_tex_wrap(VkSamplerAddressMode address_mode
)
203 [VK_SAMPLER_ADDRESS_MODE_REPEAT
] = A6XX_TEX_REPEAT
,
204 [VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT
] = A6XX_TEX_MIRROR_REPEAT
,
205 [VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
] = A6XX_TEX_CLAMP_TO_EDGE
,
206 [VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
] = A6XX_TEX_CLAMP_TO_BORDER
,
207 [VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
] = A6XX_TEX_MIRROR_CLAMP
,
209 assert(address_mode
< ARRAY_SIZE(lookup
));
210 return lookup
[address_mode
];
213 static inline enum a6xx_tex_filter
214 tu6_tex_filter(VkFilter filter
, unsigned aniso
)
217 case VK_FILTER_NEAREST
:
218 return A6XX_TEX_NEAREST
;
219 case VK_FILTER_LINEAR
:
220 return aniso
? A6XX_TEX_ANISO
: A6XX_TEX_LINEAR
;
221 case VK_FILTER_CUBIC_EXT
:
222 return A6XX_TEX_CUBIC
;
224 unreachable("illegal texture filter");
229 static inline enum a6xx_reduction_mode
230 tu6_reduction_mode(VkSamplerReductionMode reduction_mode
)
232 return (enum a6xx_reduction_mode
) reduction_mode
;
235 static inline enum a6xx_depth_format
236 tu6_pipe2depth(VkFormat format
)
239 case VK_FORMAT_D16_UNORM
:
241 case VK_FORMAT_X8_D24_UNORM_PACK32
:
242 case VK_FORMAT_D24_UNORM_S8_UINT
:
244 case VK_FORMAT_D32_SFLOAT
:
245 case VK_FORMAT_S8_UINT
:
252 #endif /* TU_UTIL_H */