nir: Split nir_index_vars into two functions
[mesa.git] / src / compiler / nir / nir_opt_large_constants.c
1 /*
2 * Copyright © 2018 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "nir.h"
25 #include "nir_builder.h"
26 #include "nir_deref.h"
27
28 struct var_info {
29 nir_variable *var;
30
31 bool is_constant;
32 bool found_read;
33 bool duplicate;
34
35 /* Block that has all the variable stores. All the blocks with reads
36 * should be dominated by this block.
37 */
38 nir_block *block;
39
40 /* If is_constant, hold the collected constant data for this var. */
41 uint32_t constant_data_size;
42 void *constant_data;
43 };
44
45 static int
46 var_info_cmp(const void *_a, const void *_b)
47 {
48 const struct var_info *a = _a;
49 const struct var_info *b = _b;
50 uint32_t a_size = a->constant_data_size;
51 uint32_t b_size = b->constant_data_size;
52
53 if (a_size < b_size) {
54 return -1;
55 } else if (a_size > b_size) {
56 return 1;
57 } else if (a_size == 0) {
58 /* Don't call memcmp with invalid pointers. */
59 return 0;
60 } else {
61 return memcmp(a->constant_data, b->constant_data, a_size);
62 }
63 }
64
65 static nir_ssa_def *
66 build_constant_load(nir_builder *b, nir_deref_instr *deref,
67 glsl_type_size_align_func size_align)
68 {
69 nir_variable *var = nir_deref_instr_get_variable(deref);
70
71 const unsigned bit_size = glsl_get_bit_size(deref->type);
72 const unsigned num_components = glsl_get_vector_elements(deref->type);
73
74 UNUSED unsigned var_size, var_align;
75 size_align(var->type, &var_size, &var_align);
76 assert(var->data.location % var_align == 0);
77
78 UNUSED unsigned deref_size, deref_align;
79 size_align(deref->type, &deref_size, &deref_align);
80
81 nir_intrinsic_instr *load =
82 nir_intrinsic_instr_create(b->shader, nir_intrinsic_load_constant);
83 load->num_components = num_components;
84 nir_intrinsic_set_base(load, var->data.location);
85 nir_intrinsic_set_range(load, var_size);
86 nir_intrinsic_set_align(load, deref_align, 0);
87 load->src[0] = nir_src_for_ssa(nir_build_deref_offset(b, deref, size_align));
88 nir_ssa_dest_init(&load->instr, &load->dest,
89 num_components, bit_size, NULL);
90 nir_builder_instr_insert(b, &load->instr);
91
92 if (load->dest.ssa.bit_size < 8) {
93 /* Booleans are special-cased to be 32-bit */
94 assert(glsl_type_is_boolean(deref->type));
95 assert(deref_size == num_components * 4);
96 load->dest.ssa.bit_size = 32;
97 return nir_b2b1(b, &load->dest.ssa);
98 } else {
99 assert(deref_size == num_components * bit_size / 8);
100 return &load->dest.ssa;
101 }
102 }
103
104 static void
105 handle_constant_store(void *mem_ctx, struct var_info *info,
106 nir_deref_instr *deref, nir_const_value *val,
107 unsigned writemask,
108 glsl_type_size_align_func size_align)
109 {
110 assert(!nir_deref_instr_has_indirect(deref));
111 const unsigned bit_size = glsl_get_bit_size(deref->type);
112 const unsigned num_components = glsl_get_vector_elements(deref->type);
113
114 if (info->constant_data_size == 0) {
115 unsigned var_size, var_align;
116 size_align(info->var->type, &var_size, &var_align);
117 info->constant_data_size = var_size;
118 info->constant_data = rzalloc_size(mem_ctx, var_size);
119 }
120
121 char *dst = (char *)info->constant_data +
122 nir_deref_instr_get_const_offset(deref, size_align);
123
124 for (unsigned i = 0; i < num_components; i++) {
125 if (!(writemask & (1 << i)))
126 continue;
127
128 switch (bit_size) {
129 case 1:
130 /* Booleans are special-cased to be 32-bit */
131 ((int32_t *)dst)[i] = -(int)val[i].b;
132 break;
133
134 case 8:
135 ((uint8_t *)dst)[i] = val[i].u8;
136 break;
137
138 case 16:
139 ((uint16_t *)dst)[i] = val[i].u16;
140 break;
141
142 case 32:
143 ((uint32_t *)dst)[i] = val[i].u32;
144 break;
145
146 case 64:
147 ((uint64_t *)dst)[i] = val[i].u64;
148 break;
149
150 default:
151 unreachable("Invalid bit size");
152 }
153 }
154 }
155
156 /** Lower large constant variables to shader constant data
157 *
158 * This pass looks for large (type_size(var->type) > threshold) variables
159 * which are statically constant and moves them into shader constant data.
160 * This is especially useful when large tables are baked into the shader
161 * source code because they can be moved into a UBO by the driver to reduce
162 * register pressure and make indirect access cheaper.
163 */
164 bool
165 nir_opt_large_constants(nir_shader *shader,
166 glsl_type_size_align_func size_align,
167 unsigned threshold)
168 {
169 /* Default to a natural alignment if none is provided */
170 if (size_align == NULL)
171 size_align = glsl_get_natural_size_align_bytes;
172
173 /* This only works with a single entrypoint */
174 nir_function_impl *impl = nir_shader_get_entrypoint(shader);
175
176 /* This pass can only be run once */
177 assert(shader->constant_data == NULL && shader->constant_data_size == 0);
178
179 unsigned num_locals = nir_function_impl_index_vars(impl);
180
181 if (num_locals == 0) {
182 nir_shader_preserve_all_metadata(shader);
183 return false;
184 }
185
186 struct var_info *var_infos = ralloc_array(NULL, struct var_info, num_locals);
187 nir_foreach_function_temp_variable(var, impl) {
188 var_infos[var->index] = (struct var_info) {
189 .var = var,
190 .is_constant = true,
191 .found_read = false,
192 };
193 }
194
195 nir_metadata_require(impl, nir_metadata_dominance);
196
197 /* First, walk through the shader and figure out what variables we can
198 * lower to the constant blob.
199 */
200 nir_foreach_block(block, impl) {
201 nir_foreach_instr(instr, block) {
202 if (instr->type != nir_instr_type_intrinsic)
203 continue;
204
205 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
206
207 bool src_is_const = false;
208 nir_deref_instr *src_deref = NULL, *dst_deref = NULL;
209 unsigned writemask = 0;
210 switch (intrin->intrinsic) {
211 case nir_intrinsic_store_deref:
212 dst_deref = nir_src_as_deref(intrin->src[0]);
213 src_is_const = nir_src_is_const(intrin->src[1]);
214 writemask = nir_intrinsic_write_mask(intrin);
215 break;
216
217 case nir_intrinsic_load_deref:
218 src_deref = nir_src_as_deref(intrin->src[0]);
219 break;
220
221 case nir_intrinsic_copy_deref:
222 assert(!"Lowering of copy_deref with large constants is prohibited");
223 break;
224
225 default:
226 continue;
227 }
228
229 if (dst_deref && dst_deref->mode == nir_var_function_temp) {
230 nir_variable *var = nir_deref_instr_get_variable(dst_deref);
231 assert(var->data.mode == nir_var_function_temp);
232
233 struct var_info *info = &var_infos[var->index];
234 if (!info->is_constant)
235 continue;
236
237 if (!info->block)
238 info->block = block;
239
240 /* We only consider variables constant if they only have constant
241 * stores, all the stores come before any reads, and all stores
242 * come from the same block. We also can't handle indirect stores.
243 */
244 if (!src_is_const || info->found_read || block != info->block ||
245 nir_deref_instr_has_indirect(dst_deref)) {
246 info->is_constant = false;
247 } else {
248 nir_const_value *val = nir_src_as_const_value(intrin->src[1]);
249 handle_constant_store(var_infos, info, dst_deref, val, writemask,
250 size_align);
251 }
252 }
253
254 if (src_deref && src_deref->mode == nir_var_function_temp) {
255 nir_variable *var = nir_deref_instr_get_variable(src_deref);
256 assert(var->data.mode == nir_var_function_temp);
257
258 /* We only consider variables constant if all the reads are
259 * dominated by the block that writes to it.
260 */
261 struct var_info *info = &var_infos[var->index];
262 if (!info->is_constant)
263 continue;
264
265 if (!info->block || !nir_block_dominates(info->block, block))
266 info->is_constant = false;
267
268 info->found_read = true;
269 }
270 }
271 }
272
273 /* Allocate constant data space for each variable that just has constant
274 * data. We sort them by size and content so we can easily find
275 * duplicates.
276 */
277 shader->constant_data_size = 0;
278 qsort(var_infos, num_locals, sizeof(struct var_info), var_info_cmp);
279 for (int i = 0; i < num_locals; i++) {
280 struct var_info *info = &var_infos[i];
281
282 /* Fix up indices after we sorted. */
283 info->var->index = i;
284
285 if (!info->is_constant)
286 continue;
287
288 unsigned var_size, var_align;
289 size_align(info->var->type, &var_size, &var_align);
290 if (var_size <= threshold || !info->found_read) {
291 /* Don't bother lowering small stuff or data that's never read */
292 info->is_constant = false;
293 continue;
294 }
295
296 if (i > 0 && var_info_cmp(info, &var_infos[i - 1]) == 0) {
297 info->var->data.location = var_infos[i - 1].var->data.location;
298 info->duplicate = true;
299 } else {
300 info->var->data.location = ALIGN_POT(shader->constant_data_size, var_align);
301 shader->constant_data_size = info->var->data.location + var_size;
302 }
303 }
304
305 if (shader->constant_data_size == 0) {
306 nir_shader_preserve_all_metadata(shader);
307 ralloc_free(var_infos);
308 return false;
309 }
310
311 shader->constant_data = rzalloc_size(shader, shader->constant_data_size);
312 for (int i = 0; i < num_locals; i++) {
313 struct var_info *info = &var_infos[i];
314 if (!info->duplicate && info->is_constant) {
315 memcpy((char *)shader->constant_data + info->var->data.location,
316 info->constant_data, info->constant_data_size);
317 }
318 }
319
320 nir_builder b;
321 nir_builder_init(&b, impl);
322
323 nir_foreach_block(block, impl) {
324 nir_foreach_instr_safe(instr, block) {
325 if (instr->type != nir_instr_type_intrinsic)
326 continue;
327
328 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
329
330 switch (intrin->intrinsic) {
331 case nir_intrinsic_load_deref: {
332 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
333 if (deref->mode != nir_var_function_temp)
334 continue;
335
336 nir_variable *var = nir_deref_instr_get_variable(deref);
337 struct var_info *info = &var_infos[var->index];
338 if (info->is_constant) {
339 b.cursor = nir_after_instr(&intrin->instr);
340 nir_ssa_def *val = build_constant_load(&b, deref, size_align);
341 nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
342 nir_src_for_ssa(val));
343 nir_instr_remove(&intrin->instr);
344 nir_deref_instr_remove_if_unused(deref);
345 }
346 break;
347 }
348
349 case nir_intrinsic_store_deref: {
350 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
351 if (deref->mode != nir_var_function_temp)
352 continue;
353
354 nir_variable *var = nir_deref_instr_get_variable(deref);
355 struct var_info *info = &var_infos[var->index];
356 if (info->is_constant) {
357 nir_instr_remove(&intrin->instr);
358 nir_deref_instr_remove_if_unused(deref);
359 }
360 break;
361 }
362 case nir_intrinsic_copy_deref:
363 default:
364 continue;
365 }
366 }
367 }
368
369 /* Clean up the now unused variables */
370 for (int i = 0; i < num_locals; i++) {
371 struct var_info *info = &var_infos[i];
372 if (info->is_constant)
373 exec_node_remove(&info->var->node);
374 }
375
376 ralloc_free(var_infos);
377
378 nir_metadata_preserve(impl, nir_metadata_block_index |
379 nir_metadata_dominance);
380 return true;
381 }