2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 #include "nir_builder.h"
28 emit_load_store(nir_builder
*b
, nir_intrinsic_instr
*orig_instr
,
29 nir_deref_var
*deref
, nir_deref
*tail
,
30 nir_ssa_def
**dest
, nir_ssa_def
*src
);
33 emit_indirect_load_store(nir_builder
*b
, nir_intrinsic_instr
*orig_instr
,
34 nir_deref_var
*deref
, nir_deref
*arr_parent
,
36 nir_ssa_def
**dest
, nir_ssa_def
*src
)
38 nir_deref_array
*arr
= nir_deref_as_array(arr_parent
->child
);
39 assert(arr
->deref_array_type
== nir_deref_array_type_indirect
);
40 assert(arr
->indirect
.is_ssa
);
43 if (start
== end
- 1) {
44 /* Base case. Just emit the load/store op */
45 nir_deref_array direct
= *arr
;
46 direct
.deref_array_type
= nir_deref_array_type_direct
;
47 direct
.base_offset
+= start
;
48 direct
.indirect
= NIR_SRC_INIT
;
50 arr_parent
->child
= &direct
.deref
;
51 emit_load_store(b
, orig_instr
, deref
, &direct
.deref
, dest
, src
);
52 arr_parent
->child
= &arr
->deref
;
54 int mid
= start
+ (end
- start
) / 2;
56 nir_ssa_def
*then_dest
, *else_dest
;
58 nir_push_if(b
, nir_ilt(b
, arr
->indirect
.ssa
, nir_imm_int(b
, mid
)));
59 emit_indirect_load_store(b
, orig_instr
, deref
, arr_parent
,
60 start
, mid
, &then_dest
, src
);
61 nir_push_else(b
, NULL
);
62 emit_indirect_load_store(b
, orig_instr
, deref
, arr_parent
,
63 mid
, end
, &else_dest
, src
);
67 *dest
= nir_if_phi(b
, then_dest
, else_dest
);
72 emit_load_store(nir_builder
*b
, nir_intrinsic_instr
*orig_instr
,
73 nir_deref_var
*deref
, nir_deref
*tail
,
74 nir_ssa_def
**dest
, nir_ssa_def
*src
)
76 for (; tail
->child
; tail
= tail
->child
) {
77 if (tail
->child
->deref_type
!= nir_deref_type_array
)
80 nir_deref_array
*arr
= nir_deref_as_array(tail
->child
);
81 if (arr
->deref_array_type
!= nir_deref_array_type_indirect
)
84 int length
= glsl_get_length(tail
->type
);
86 emit_indirect_load_store(b
, orig_instr
, deref
, tail
, -arr
->base_offset
,
87 length
- arr
->base_offset
, dest
, src
);
91 assert(tail
&& tail
->child
== NULL
);
93 /* We reached the end of the deref chain. Emit the instruction */
96 /* This is a load instruction */
97 nir_intrinsic_instr
*load
=
98 nir_intrinsic_instr_create(b
->shader
, orig_instr
->intrinsic
);
99 load
->num_components
= orig_instr
->num_components
;
100 load
->variables
[0] = nir_deref_var_clone(deref
, load
);
102 /* Copy over any sources. This is needed for interp_var_at */
104 i
< nir_intrinsic_infos
[orig_instr
->intrinsic
].num_srcs
; i
++)
105 nir_src_copy(&load
->src
[i
], &orig_instr
->src
[i
], load
);
107 unsigned bit_size
= orig_instr
->dest
.ssa
.bit_size
;
108 nir_ssa_dest_init(&load
->instr
, &load
->dest
,
109 load
->num_components
, bit_size
, NULL
);
110 nir_builder_instr_insert(b
, &load
->instr
);
111 *dest
= &load
->dest
.ssa
;
113 /* This is a store instruction */
114 nir_intrinsic_instr
*store
=
115 nir_intrinsic_instr_create(b
->shader
, nir_intrinsic_store_var
);
116 store
->num_components
= orig_instr
->num_components
;
117 nir_intrinsic_set_write_mask(store
, nir_intrinsic_write_mask(orig_instr
));
118 store
->variables
[0] = nir_deref_var_clone(deref
, store
);
119 store
->src
[0] = nir_src_for_ssa(src
);
120 nir_builder_instr_insert(b
, &store
->instr
);
125 deref_has_indirect(nir_deref_var
*deref
)
127 for (nir_deref
*tail
= deref
->deref
.child
; tail
; tail
= tail
->child
) {
128 if (tail
->deref_type
!= nir_deref_type_array
)
131 nir_deref_array
*arr
= nir_deref_as_array(tail
);
132 if (arr
->deref_array_type
== nir_deref_array_type_indirect
)
140 lower_indirect_block(nir_block
*block
, nir_builder
*b
,
141 nir_variable_mode modes
)
143 bool progress
= false;
145 nir_foreach_instr_safe(instr
, block
) {
146 if (instr
->type
!= nir_instr_type_intrinsic
)
149 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
150 if (intrin
->intrinsic
!= nir_intrinsic_load_var
&&
151 intrin
->intrinsic
!= nir_intrinsic_interp_var_at_centroid
&&
152 intrin
->intrinsic
!= nir_intrinsic_interp_var_at_sample
&&
153 intrin
->intrinsic
!= nir_intrinsic_interp_var_at_offset
&&
154 intrin
->intrinsic
!= nir_intrinsic_store_var
)
157 if (!deref_has_indirect(intrin
->variables
[0]))
160 /* Only lower variables whose mode is in the mask, or compact
161 * array variables. (We can't handle indirects on tightly packed
162 * scalar arrays, so we need to lower them regardless.)
164 if (!(modes
& intrin
->variables
[0]->var
->data
.mode
) &&
165 !intrin
->variables
[0]->var
->data
.compact
)
168 b
->cursor
= nir_before_instr(&intrin
->instr
);
170 if (intrin
->intrinsic
!= nir_intrinsic_store_var
) {
172 emit_load_store(b
, intrin
, intrin
->variables
[0],
173 &intrin
->variables
[0]->deref
, &result
, NULL
);
174 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
, nir_src_for_ssa(result
));
176 assert(intrin
->src
[0].is_ssa
);
177 emit_load_store(b
, intrin
, intrin
->variables
[0],
178 &intrin
->variables
[0]->deref
, NULL
, intrin
->src
[0].ssa
);
180 nir_instr_remove(&intrin
->instr
);
188 lower_indirects_impl(nir_function_impl
*impl
, nir_variable_mode modes
)
191 nir_builder_init(&builder
, impl
);
192 bool progress
= false;
194 nir_foreach_block_safe(block
, impl
) {
195 progress
|= lower_indirect_block(block
, &builder
, modes
);
199 nir_metadata_preserve(impl
, nir_metadata_none
);
204 /** Lowers indirect variable loads/stores to direct loads/stores.
206 * The pass works by replacing any indirect load or store with an if-ladder
207 * that does a binary search on the array index.
210 nir_lower_indirect_derefs(nir_shader
*shader
, nir_variable_mode modes
)
212 bool progress
= false;
214 nir_foreach_function(function
, shader
) {
216 progress
= lower_indirects_impl(function
->impl
, modes
) || progress
;