nir/lower_atomics_to_ssbo: Also lower barriers
[mesa.git] / src / compiler / nir / nir_lower_atomics_to_ssbo.c
1 /*
2 * Copyright © 2017 Red Hat
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Rob Clark <robclark@freedesktop.org>
25 */
26
27 #include "nir.h"
28 #include "nir_builder.h"
29
30 #if defined(_WIN32) && !defined(snprintf)
31 #define snprintf _snprintf
32 #endif
33
34 /*
35 * Remap atomic counters to SSBOs. Atomic counters get remapped to
36 * SSBO binding points [0..ssbo_offset) and the original SSBOs are
37 * remapped to [ssbo_offset..n) (mostly to align with what mesa/st
38 * does.
39 */
40
41 static bool
42 lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
43 {
44 nir_intrinsic_op op;
45 int idx_src;
46
47 b->cursor = nir_before_instr(&instr->instr);
48
49 switch (instr->intrinsic) {
50 case nir_intrinsic_memory_barrier_atomic_counter:
51 /* Atomic counters are now SSBOs so memoryBarrierAtomicCounter() is now
52 * memoryBarrierBuffer().
53 */
54 instr->intrinsic = nir_intrinsic_memory_barrier_buffer;
55 return true;
56
57 case nir_intrinsic_ssbo_atomic_add:
58 case nir_intrinsic_ssbo_atomic_imin:
59 case nir_intrinsic_ssbo_atomic_umin:
60 case nir_intrinsic_ssbo_atomic_imax:
61 case nir_intrinsic_ssbo_atomic_umax:
62 case nir_intrinsic_ssbo_atomic_and:
63 case nir_intrinsic_ssbo_atomic_or:
64 case nir_intrinsic_ssbo_atomic_xor:
65 case nir_intrinsic_ssbo_atomic_exchange:
66 case nir_intrinsic_ssbo_atomic_comp_swap:
67 case nir_intrinsic_ssbo_atomic_fadd:
68 case nir_intrinsic_ssbo_atomic_fmin:
69 case nir_intrinsic_ssbo_atomic_fmax:
70 case nir_intrinsic_ssbo_atomic_fcomp_swap:
71 case nir_intrinsic_store_ssbo:
72 case nir_intrinsic_load_ssbo:
73 case nir_intrinsic_get_buffer_size:
74 /* easy case, keep same opcode and just remap SSBO buffer index: */
75 op = instr->intrinsic;
76 idx_src = (op == nir_intrinsic_store_ssbo) ? 1 : 0;
77 nir_ssa_def *old_idx = nir_ssa_for_src(b, instr->src[idx_src], 1);
78 nir_ssa_def *new_idx = nir_iadd(b, old_idx, nir_imm_int(b, ssbo_offset));
79 nir_instr_rewrite_src(&instr->instr,
80 &instr->src[idx_src],
81 nir_src_for_ssa(new_idx));
82 return true;
83 case nir_intrinsic_atomic_counter_inc:
84 case nir_intrinsic_atomic_counter_add:
85 case nir_intrinsic_atomic_counter_pre_dec:
86 case nir_intrinsic_atomic_counter_post_dec:
87 /* inc and dec get remapped to add: */
88 op = nir_intrinsic_ssbo_atomic_add;
89 break;
90 case nir_intrinsic_atomic_counter_read:
91 op = nir_intrinsic_load_ssbo;
92 break;
93 case nir_intrinsic_atomic_counter_min:
94 op = nir_intrinsic_ssbo_atomic_umin;
95 break;
96 case nir_intrinsic_atomic_counter_max:
97 op = nir_intrinsic_ssbo_atomic_umax;
98 break;
99 case nir_intrinsic_atomic_counter_and:
100 op = nir_intrinsic_ssbo_atomic_and;
101 break;
102 case nir_intrinsic_atomic_counter_or:
103 op = nir_intrinsic_ssbo_atomic_or;
104 break;
105 case nir_intrinsic_atomic_counter_xor:
106 op = nir_intrinsic_ssbo_atomic_xor;
107 break;
108 case nir_intrinsic_atomic_counter_exchange:
109 op = nir_intrinsic_ssbo_atomic_exchange;
110 break;
111 case nir_intrinsic_atomic_counter_comp_swap:
112 op = nir_intrinsic_ssbo_atomic_comp_swap;
113 break;
114 default:
115 return false;
116 }
117
118 nir_ssa_def *buffer = nir_imm_int(b, nir_intrinsic_base(instr));
119 nir_ssa_def *temp = NULL;
120 nir_intrinsic_instr *new_instr =
121 nir_intrinsic_instr_create(ralloc_parent(instr), op);
122
123 /* a couple instructions need special handling since they don't map
124 * 1:1 with ssbo atomics
125 */
126 switch (instr->intrinsic) {
127 case nir_intrinsic_atomic_counter_inc:
128 /* remapped to ssbo_atomic_add: { buffer_idx, offset, +1 } */
129 temp = nir_imm_int(b, +1);
130 new_instr->src[0] = nir_src_for_ssa(buffer);
131 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);
132 new_instr->src[2] = nir_src_for_ssa(temp);
133 break;
134 case nir_intrinsic_atomic_counter_pre_dec:
135 case nir_intrinsic_atomic_counter_post_dec:
136 /* remapped to ssbo_atomic_add: { buffer_idx, offset, -1 } */
137 /* NOTE semantic difference so we adjust the return value below */
138 temp = nir_imm_int(b, -1);
139 new_instr->src[0] = nir_src_for_ssa(buffer);
140 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);
141 new_instr->src[2] = nir_src_for_ssa(temp);
142 break;
143 case nir_intrinsic_atomic_counter_read:
144 /* remapped to load_ssbo: { buffer_idx, offset } */
145 new_instr->src[0] = nir_src_for_ssa(buffer);
146 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);
147 break;
148 default:
149 /* remapped to ssbo_atomic_x: { buffer_idx, offset, data, (compare)? } */
150 new_instr->src[0] = nir_src_for_ssa(buffer);
151 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);
152 nir_src_copy(&new_instr->src[2], &instr->src[1], new_instr);
153 if (op == nir_intrinsic_ssbo_atomic_comp_swap ||
154 op == nir_intrinsic_ssbo_atomic_fcomp_swap)
155 nir_src_copy(&new_instr->src[3], &instr->src[2], new_instr);
156 break;
157 }
158
159 if (new_instr->intrinsic == nir_intrinsic_load_ssbo ||
160 new_instr->intrinsic == nir_intrinsic_store_ssbo)
161 nir_intrinsic_set_align(new_instr, 4, 0);
162
163 nir_ssa_dest_init(&new_instr->instr, &new_instr->dest,
164 instr->dest.ssa.num_components,
165 instr->dest.ssa.bit_size, NULL);
166 nir_instr_insert_before(&instr->instr, &new_instr->instr);
167 nir_instr_remove(&instr->instr);
168
169 if (instr->intrinsic == nir_intrinsic_atomic_counter_pre_dec) {
170 b->cursor = nir_after_instr(&new_instr->instr);
171 nir_ssa_def *result = nir_iadd(b, &new_instr->dest.ssa, temp);
172 nir_ssa_def_rewrite_uses(&instr->dest.ssa, nir_src_for_ssa(result));
173 } else {
174 nir_ssa_def_rewrite_uses(&instr->dest.ssa, nir_src_for_ssa(&new_instr->dest.ssa));
175 }
176
177 /* we could be replacing an intrinsic with fixed # of dest num_components
178 * with one that has variable number. So best to take this from the dest:
179 */
180 new_instr->num_components = instr->dest.ssa.num_components;
181
182 return true;
183 }
184
185 static bool
186 is_atomic_uint(const struct glsl_type *type)
187 {
188 if (glsl_get_base_type(type) == GLSL_TYPE_ARRAY)
189 return is_atomic_uint(glsl_get_array_element(type));
190 return glsl_get_base_type(type) == GLSL_TYPE_ATOMIC_UINT;
191 }
192
193 bool
194 nir_lower_atomics_to_ssbo(nir_shader *shader, unsigned ssbo_offset)
195 {
196 bool progress = false;
197
198 nir_foreach_function(function, shader) {
199 if (function->impl) {
200 nir_builder builder;
201 nir_builder_init(&builder, function->impl);
202 nir_foreach_block(block, function->impl) {
203 nir_foreach_instr_safe(instr, block) {
204 if (instr->type == nir_instr_type_intrinsic)
205 progress |= lower_instr(nir_instr_as_intrinsic(instr),
206 ssbo_offset, &builder);
207 }
208 }
209
210 nir_metadata_preserve(function->impl, nir_metadata_block_index |
211 nir_metadata_dominance);
212 }
213 }
214
215 if (progress) {
216 /* replace atomic_uint uniforms with ssbo's: */
217 unsigned replaced = 0;
218 nir_foreach_variable_safe(var, &shader->uniforms) {
219 if (is_atomic_uint(var->type)) {
220 exec_node_remove(&var->node);
221
222 if (replaced & (1 << var->data.binding))
223 continue;
224
225 nir_variable *ssbo;
226 char name[16];
227
228 /* A length of 0 is used to denote unsized arrays */
229 const struct glsl_type *type = glsl_array_type(glsl_uint_type(), 0, 0);
230
231 snprintf(name, sizeof(name), "counter%d", var->data.binding);
232
233 ssbo = nir_variable_create(shader, nir_var_mem_ssbo, type, name);
234 ssbo->data.binding = var->data.binding;
235
236 struct glsl_struct_field field = {
237 .type = type,
238 .name = "counters",
239 .location = -1,
240 };
241
242 ssbo->interface_type =
243 glsl_interface_type(&field, 1, GLSL_INTERFACE_PACKING_STD430,
244 false, "counters");
245
246 replaced |= (1 << var->data.binding);
247 }
248 }
249
250 /* Make sure that shader->info.num_ssbos still reflects the maximum SSBO
251 * index that can be used in the shader.
252 */
253 if (shader->info.num_ssbos > 0) {
254 shader->info.num_ssbos += ssbo_offset;
255 } else {
256 /* We can't use num_abos, because it only represents the number of
257 * active atomic counters, and currently unlike SSBO's they aren't
258 * compacted so num_abos actually isn't a bound on the index passed
259 * to nir_intrinsic_atomic_counter_*. e.g. if we have a single atomic
260 * counter declared like:
261 *
262 * layout(binding=1) atomic_uint counter0;
263 *
264 * then when we lower accesses to it the atomic_counter_* intrinsics
265 * will have 1 as the index but num_abos will still be 1.
266 * */
267 shader->info.num_ssbos = util_last_bit(replaced);
268 }
269 shader->info.num_abos = 0;
270 }
271
272 return progress;
273 }
274