2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
29 #include "nir_builder.h"
34 struct deref_node
*parent
;
35 const struct glsl_type
*type
;
39 /* Only valid for things that end up in the direct list.
40 * Note that multiple nir_deref_vars may correspond to this node, but they
41 * will all be equivalent, so any is as good as the other.
44 struct exec_node direct_derefs_link
;
50 nir_ssa_def
**def_stack
;
51 nir_ssa_def
**def_stack_tail
;
53 struct deref_node
*wildcard
;
54 struct deref_node
*indirect
;
55 struct deref_node
*children
[0];
58 struct lower_variables_state
{
61 nir_function_impl
*impl
;
63 /* A hash table mapping variables to deref_node data */
64 struct hash_table
*deref_var_nodes
;
66 /* A hash table mapping fully-qualified direct dereferences, i.e.
67 * dereferences with no indirect or wildcard array dereferences, to
70 * At the moment, we only lower loads, stores, and copies that can be
71 * trivially lowered to loads and stores, i.e. copies with no indirects
72 * and no wildcards. If a part of a variable that is being loaded from
73 * and/or stored into is also involved in a copy operation with
74 * wildcards, then we lower that copy operation to loads and stores, but
75 * otherwise we leave copies with wildcards alone. Since the only derefs
76 * used in these loads, stores, and trivial copies are ones with no
77 * wildcards and no indirects, these are precisely the derefs that we
78 * can actually consider lowering.
80 struct exec_list direct_deref_nodes
;
82 /* Controls whether get_deref_node will add variables to the
83 * direct_deref_nodes table. This is turned on when we are initially
84 * scanning for load/store instructions. It is then turned off so we
85 * don't accidentally change the direct_deref_nodes table while we're
86 * iterating throug it.
88 bool add_to_direct_deref_nodes
;
90 /* A hash table mapping phi nodes to deref_state data */
91 struct hash_table
*phi_table
;
94 static struct deref_node
*
95 deref_node_create(struct deref_node
*parent
,
96 const struct glsl_type
*type
, nir_shader
*shader
)
98 size_t size
= sizeof(struct deref_node
) +
99 glsl_get_length(type
) * sizeof(struct deref_node
*);
101 struct deref_node
*node
= rzalloc_size(shader
, size
);
103 node
->parent
= parent
;
105 exec_node_init(&node
->direct_derefs_link
);
110 /* Returns the deref node associated with the given variable. This will be
111 * the root of the tree representing all of the derefs of the given variable.
113 static struct deref_node
*
114 get_deref_node_for_var(nir_variable
*var
, struct lower_variables_state
*state
)
116 struct deref_node
*node
;
118 struct hash_entry
*var_entry
=
119 _mesa_hash_table_search(state
->deref_var_nodes
, var
);
122 return var_entry
->data
;
124 node
= deref_node_create(NULL
, var
->type
, state
->dead_ctx
);
125 _mesa_hash_table_insert(state
->deref_var_nodes
, var
, node
);
130 /* Gets the deref_node for the given deref chain and creates it if it
131 * doesn't yet exist. If the deref is fully-qualified and direct and
132 * state->add_to_direct_deref_nodes is true, it will be added to the hash
133 * table of of fully-qualified direct derefs.
135 static struct deref_node
*
136 get_deref_node(nir_deref_var
*deref
, struct lower_variables_state
*state
)
138 bool is_direct
= true;
140 /* Start at the base of the chain. */
141 struct deref_node
*node
= get_deref_node_for_var(deref
->var
, state
);
142 assert(deref
->deref
.type
== node
->type
);
144 for (nir_deref
*tail
= deref
->deref
.child
; tail
; tail
= tail
->child
) {
145 switch (tail
->deref_type
) {
146 case nir_deref_type_struct
: {
147 nir_deref_struct
*deref_struct
= nir_deref_as_struct(tail
);
149 assert(deref_struct
->index
< glsl_get_length(node
->type
));
151 if (node
->children
[deref_struct
->index
] == NULL
)
152 node
->children
[deref_struct
->index
] =
153 deref_node_create(node
, tail
->type
, state
->dead_ctx
);
155 node
= node
->children
[deref_struct
->index
];
159 case nir_deref_type_array
: {
160 nir_deref_array
*arr
= nir_deref_as_array(tail
);
162 switch (arr
->deref_array_type
) {
163 case nir_deref_array_type_direct
:
164 /* This is possible if a loop unrolls and generates an
165 * out-of-bounds offset. We need to handle this at least
166 * somewhat gracefully.
168 if (arr
->base_offset
>= glsl_get_length(node
->type
))
171 if (node
->children
[arr
->base_offset
] == NULL
)
172 node
->children
[arr
->base_offset
] =
173 deref_node_create(node
, tail
->type
, state
->dead_ctx
);
175 node
= node
->children
[arr
->base_offset
];
178 case nir_deref_array_type_indirect
:
179 if (node
->indirect
== NULL
)
180 node
->indirect
= deref_node_create(node
, tail
->type
,
183 node
= node
->indirect
;
187 case nir_deref_array_type_wildcard
:
188 if (node
->wildcard
== NULL
)
189 node
->wildcard
= deref_node_create(node
, tail
->type
,
192 node
= node
->wildcard
;
197 unreachable("Invalid array deref type");
202 unreachable("Invalid deref type");
208 /* Only insert if it isn't already in the list. */
209 if (is_direct
&& state
->add_to_direct_deref_nodes
&&
210 node
->direct_derefs_link
.next
== NULL
) {
212 assert(deref
->var
!= NULL
);
213 exec_list_push_tail(&state
->direct_deref_nodes
,
214 &node
->direct_derefs_link
);
220 /* \sa foreach_deref_node_match */
222 foreach_deref_node_worker(struct deref_node
*node
, nir_deref
*deref
,
223 bool (* cb
)(struct deref_node
*node
,
224 struct lower_variables_state
*state
),
225 struct lower_variables_state
*state
)
227 if (deref
->child
== NULL
) {
228 return cb(node
, state
);
230 switch (deref
->child
->deref_type
) {
231 case nir_deref_type_array
: {
232 nir_deref_array
*arr
= nir_deref_as_array(deref
->child
);
233 assert(arr
->deref_array_type
== nir_deref_array_type_direct
);
234 if (node
->children
[arr
->base_offset
] &&
235 !foreach_deref_node_worker(node
->children
[arr
->base_offset
],
236 deref
->child
, cb
, state
))
239 if (node
->wildcard
&&
240 !foreach_deref_node_worker(node
->wildcard
,
241 deref
->child
, cb
, state
))
247 case nir_deref_type_struct
: {
248 nir_deref_struct
*str
= nir_deref_as_struct(deref
->child
);
249 return foreach_deref_node_worker(node
->children
[str
->index
],
250 deref
->child
, cb
, state
);
254 unreachable("Invalid deref child type");
259 /* Walks over every "matching" deref_node and calls the callback. A node
260 * is considered to "match" if either refers to that deref or matches up t
261 * a wildcard. In other words, the following would match a[6].foo[3].bar:
268 * The given deref must be a full-length and fully qualified (no wildcards
269 * or indirects) deref chain.
272 foreach_deref_node_match(nir_deref_var
*deref
,
273 bool (* cb
)(struct deref_node
*node
,
274 struct lower_variables_state
*state
),
275 struct lower_variables_state
*state
)
277 nir_deref_var var_deref
= *deref
;
278 var_deref
.deref
.child
= NULL
;
279 struct deref_node
*node
= get_deref_node(&var_deref
, state
);
284 return foreach_deref_node_worker(node
, &deref
->deref
, cb
, state
);
287 /* \sa deref_may_be_aliased */
289 deref_may_be_aliased_node(struct deref_node
*node
, nir_deref
*deref
,
290 struct lower_variables_state
*state
)
292 if (deref
->child
== NULL
) {
295 switch (deref
->child
->deref_type
) {
296 case nir_deref_type_array
: {
297 nir_deref_array
*arr
= nir_deref_as_array(deref
->child
);
298 if (arr
->deref_array_type
== nir_deref_array_type_indirect
)
301 /* If there is an indirect at this level, we're aliased. */
305 assert(arr
->deref_array_type
== nir_deref_array_type_direct
);
307 if (node
->children
[arr
->base_offset
] &&
308 deref_may_be_aliased_node(node
->children
[arr
->base_offset
],
309 deref
->child
, state
))
312 if (node
->wildcard
&&
313 deref_may_be_aliased_node(node
->wildcard
, deref
->child
, state
))
319 case nir_deref_type_struct
: {
320 nir_deref_struct
*str
= nir_deref_as_struct(deref
->child
);
321 if (node
->children
[str
->index
]) {
322 return deref_may_be_aliased_node(node
->children
[str
->index
],
323 deref
->child
, state
);
330 unreachable("Invalid nir_deref child type");
335 /* Returns true if there are no indirects that can ever touch this deref.
337 * For example, if the given deref is a[6].foo, then any uses of a[i].foo
338 * would cause this to return false, but a[i].bar would not affect it
339 * because it's a different structure member. A var_copy involving of
340 * a[*].bar also doesn't affect it because that can be lowered to entirely
341 * direct load/stores.
343 * We only support asking this question about fully-qualified derefs.
344 * Obviously, it's pointless to ask this about indirects, but we also
345 * rule-out wildcards. Handling Wildcard dereferences would involve
346 * checking each array index to make sure that there aren't any indirect
350 deref_may_be_aliased(nir_deref_var
*deref
,
351 struct lower_variables_state
*state
)
353 return deref_may_be_aliased_node(get_deref_node_for_var(deref
->var
, state
),
354 &deref
->deref
, state
);
358 register_load_instr(nir_intrinsic_instr
*load_instr
,
359 struct lower_variables_state
*state
)
361 struct deref_node
*node
= get_deref_node(load_instr
->variables
[0], state
);
365 if (node
->loads
== NULL
)
366 node
->loads
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
367 _mesa_key_pointer_equal
);
369 _mesa_set_add(node
->loads
, load_instr
);
373 register_store_instr(nir_intrinsic_instr
*store_instr
,
374 struct lower_variables_state
*state
)
376 struct deref_node
*node
= get_deref_node(store_instr
->variables
[0], state
);
380 if (node
->stores
== NULL
)
381 node
->stores
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
382 _mesa_key_pointer_equal
);
384 _mesa_set_add(node
->stores
, store_instr
);
388 register_copy_instr(nir_intrinsic_instr
*copy_instr
,
389 struct lower_variables_state
*state
)
391 for (unsigned idx
= 0; idx
< 2; idx
++) {
392 struct deref_node
*node
=
393 get_deref_node(copy_instr
->variables
[idx
], state
);
398 if (node
->copies
== NULL
)
399 node
->copies
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
400 _mesa_key_pointer_equal
);
402 _mesa_set_add(node
->copies
, copy_instr
);
406 /* Registers all variable uses in the given block. */
408 register_variable_uses_block(nir_block
*block
, void *void_state
)
410 struct lower_variables_state
*state
= void_state
;
412 nir_foreach_instr_safe(block
, instr
) {
413 if (instr
->type
!= nir_instr_type_intrinsic
)
416 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
418 switch (intrin
->intrinsic
) {
419 case nir_intrinsic_load_var
:
420 register_load_instr(intrin
, state
);
423 case nir_intrinsic_store_var
:
424 register_store_instr(intrin
, state
);
427 case nir_intrinsic_copy_var
:
428 register_copy_instr(intrin
, state
);
439 /* Walks over all of the copy instructions to or from the given deref_node
440 * and lowers them to load/store intrinsics.
443 lower_copies_to_load_store(struct deref_node
*node
,
444 struct lower_variables_state
*state
)
449 struct set_entry
*copy_entry
;
450 set_foreach(node
->copies
, copy_entry
) {
451 nir_intrinsic_instr
*copy
= (void *)copy_entry
->key
;
453 nir_lower_var_copy_instr(copy
, state
->shader
);
455 for (unsigned i
= 0; i
< 2; ++i
) {
456 struct deref_node
*arg_node
=
457 get_deref_node(copy
->variables
[i
], state
);
459 /* Only bother removing copy entries for other nodes */
460 if (arg_node
== NULL
|| arg_node
== node
)
463 struct set_entry
*arg_entry
= _mesa_set_search(arg_node
->copies
, copy
);
465 _mesa_set_remove(node
->copies
, arg_entry
);
468 nir_instr_remove(©
->instr
);
476 /** Pushes an SSA def onto the def stack for the given node
478 * Each node is potentially associated with a stack of SSA definitions.
479 * This stack is used for determining what SSA definition reaches a given
480 * point in the program for variable renaming. The stack is always kept in
481 * dominance-order with at most one SSA def per block. If the SSA
482 * definition on the top of the stack is in the same block as the one being
483 * pushed, the top element is replaced.
486 def_stack_push(struct deref_node
*node
, nir_ssa_def
*def
,
487 struct lower_variables_state
*state
)
489 if (node
->def_stack
== NULL
) {
490 node
->def_stack
= ralloc_array(state
->dead_ctx
, nir_ssa_def
*,
491 state
->impl
->num_blocks
);
492 node
->def_stack_tail
= node
->def_stack
- 1;
495 if (node
->def_stack_tail
>= node
->def_stack
) {
496 nir_ssa_def
*top_def
= *node
->def_stack_tail
;
498 if (def
->parent_instr
->block
== top_def
->parent_instr
->block
) {
499 /* They're in the same block, just replace the top */
500 *node
->def_stack_tail
= def
;
505 *(++node
->def_stack_tail
) = def
;
508 /* Pop the top of the def stack if it's in the given block */
510 def_stack_pop_if_in_block(struct deref_node
*node
, nir_block
*block
)
512 /* If we're popping, then we have presumably pushed at some time in the
513 * past so this should exist.
515 assert(node
->def_stack
!= NULL
);
517 /* The stack is already empty. Do nothing. */
518 if (node
->def_stack_tail
< node
->def_stack
)
521 nir_ssa_def
*def
= *node
->def_stack_tail
;
522 if (def
->parent_instr
->block
== block
)
523 node
->def_stack_tail
--;
526 /** Retrieves the SSA definition on the top of the stack for the given
527 * node, if one exists. If the stack is empty, then we return the constant
528 * initializer (if it exists) or an SSA undef.
531 get_ssa_def_for_block(struct deref_node
*node
, nir_block
*block
,
532 struct lower_variables_state
*state
)
534 /* If we have something on the stack, go ahead and return it. We're
535 * assuming that the top of the stack dominates the given block.
537 if (node
->def_stack
&& node
->def_stack_tail
>= node
->def_stack
)
538 return *node
->def_stack_tail
;
540 /* If we got here then we don't have a definition that dominates the
541 * given block. This means that we need to add an undef and use that.
543 nir_ssa_undef_instr
*undef
=
544 nir_ssa_undef_instr_create(state
->shader
,
545 glsl_get_vector_elements(node
->type
));
546 nir_instr_insert_before_cf_list(&state
->impl
->body
, &undef
->instr
);
547 def_stack_push(node
, &undef
->def
, state
);
551 /* Given a block and one of its predecessors, this function fills in the
552 * souces of the phi nodes to take SSA defs from the given predecessor.
553 * This function must be called exactly once per block/predecessor pair.
556 add_phi_sources(nir_block
*block
, nir_block
*pred
,
557 struct lower_variables_state
*state
)
559 nir_foreach_instr(block
, instr
) {
560 if (instr
->type
!= nir_instr_type_phi
)
563 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
565 struct hash_entry
*entry
=
566 _mesa_hash_table_search(state
->phi_table
, phi
);
570 struct deref_node
*node
= entry
->data
;
572 nir_phi_src
*src
= ralloc(phi
, nir_phi_src
);
574 src
->src
.parent_instr
= &phi
->instr
;
575 src
->src
.is_ssa
= true;
576 src
->src
.ssa
= get_ssa_def_for_block(node
, pred
, state
);
578 list_addtail(&src
->src
.use_link
, &src
->src
.ssa
->uses
);
580 exec_list_push_tail(&phi
->srcs
, &src
->node
);
584 /* Performs variable renaming by doing a DFS of the dominance tree
586 * This algorithm is very similar to the one outlined in "Efficiently
587 * Computing Static Single Assignment Form and the Control Dependence
588 * Graph" by Cytron et. al. The primary difference is that we only put one
589 * SSA def on the stack per block.
592 rename_variables_block(nir_block
*block
, struct lower_variables_state
*state
)
595 nir_builder_init(&b
, state
->impl
);
597 nir_foreach_instr_safe(block
, instr
) {
598 if (instr
->type
== nir_instr_type_phi
) {
599 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
601 struct hash_entry
*entry
=
602 _mesa_hash_table_search(state
->phi_table
, phi
);
604 /* This can happen if we already have phi nodes in the program
605 * that were not created in this pass.
610 struct deref_node
*node
= entry
->data
;
612 def_stack_push(node
, &phi
->dest
.ssa
, state
);
613 } else if (instr
->type
== nir_instr_type_intrinsic
) {
614 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
616 switch (intrin
->intrinsic
) {
617 case nir_intrinsic_load_var
: {
618 struct deref_node
*node
=
619 get_deref_node(intrin
->variables
[0], state
);
622 /* If we hit this path then we are referencing an invalid
623 * value. Most likely, we unrolled something and are
624 * reading past the end of some array. In any case, this
625 * should result in an undefined value.
627 nir_ssa_undef_instr
*undef
=
628 nir_ssa_undef_instr_create(state
->shader
,
629 intrin
->num_components
);
631 nir_instr_insert_before(&intrin
->instr
, &undef
->instr
);
632 nir_instr_remove(&intrin
->instr
);
634 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
635 nir_src_for_ssa(&undef
->def
));
639 if (!node
->lower_to_ssa
)
642 nir_alu_instr
*mov
= nir_alu_instr_create(state
->shader
,
644 mov
->src
[0].src
.is_ssa
= true;
645 mov
->src
[0].src
.ssa
= get_ssa_def_for_block(node
, block
, state
);
646 for (unsigned i
= intrin
->num_components
; i
< 4; i
++)
647 mov
->src
[0].swizzle
[i
] = 0;
649 assert(intrin
->dest
.is_ssa
);
651 mov
->dest
.write_mask
= (1 << intrin
->num_components
) - 1;
652 nir_ssa_dest_init(&mov
->instr
, &mov
->dest
.dest
,
653 intrin
->num_components
, NULL
);
655 nir_instr_insert_before(&intrin
->instr
, &mov
->instr
);
656 nir_instr_remove(&intrin
->instr
);
658 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
659 nir_src_for_ssa(&mov
->dest
.dest
.ssa
));
663 case nir_intrinsic_store_var
: {
664 struct deref_node
*node
=
665 get_deref_node(intrin
->variables
[0], state
);
668 /* Probably an out-of-bounds array store. That should be a
670 nir_instr_remove(&intrin
->instr
);
674 if (!node
->lower_to_ssa
)
677 assert(intrin
->num_components
==
678 glsl_get_vector_elements(node
->type
));
680 assert(intrin
->src
[0].is_ssa
);
682 nir_ssa_def
*new_def
;
683 b
.cursor
= nir_before_instr(&intrin
->instr
);
685 if (intrin
->const_index
[0] == (1 << intrin
->num_components
) - 1) {
686 /* Whole variable store - just copy the source. Note that
687 * intrin->num_components and intrin->src[0].ssa->num_components
691 for (unsigned i
= 0; i
< 4; i
++)
692 swiz
[i
] = i
< intrin
->num_components
? i
: 0;
694 new_def
= nir_swizzle(&b
, intrin
->src
[0].ssa
, swiz
,
695 intrin
->num_components
, false);
697 nir_ssa_def
*old_def
= get_ssa_def_for_block(node
, block
, state
);
698 /* For writemasked store_var intrinsics, we combine the newly
699 * written values with the existing contents of unwritten
700 * channels, creating a new SSA value for the whole vector.
702 nir_ssa_def
*srcs
[4];
703 for (unsigned i
= 0; i
< intrin
->num_components
; i
++) {
704 if (intrin
->const_index
[0] & (1 << i
)) {
705 srcs
[i
] = nir_channel(&b
, intrin
->src
[0].ssa
, i
);
707 srcs
[i
] = nir_channel(&b
, old_def
, i
);
710 new_def
= nir_vec(&b
, srcs
, intrin
->num_components
);
713 assert(new_def
->num_components
== intrin
->num_components
);
715 def_stack_push(node
, new_def
, state
);
717 /* We'll wait to remove the instruction until the next pass
718 * where we pop the node we just pushed back off the stack.
729 if (block
->successors
[0])
730 add_phi_sources(block
->successors
[0], block
, state
);
731 if (block
->successors
[1])
732 add_phi_sources(block
->successors
[1], block
, state
);
734 for (unsigned i
= 0; i
< block
->num_dom_children
; ++i
)
735 rename_variables_block(block
->dom_children
[i
], state
);
737 /* Now we iterate over the instructions and pop off any SSA defs that we
738 * pushed in the first loop.
740 nir_foreach_instr_safe(block
, instr
) {
741 if (instr
->type
== nir_instr_type_phi
) {
742 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
744 struct hash_entry
*entry
=
745 _mesa_hash_table_search(state
->phi_table
, phi
);
747 /* This can happen if we already have phi nodes in the program
748 * that were not created in this pass.
753 struct deref_node
*node
= entry
->data
;
755 def_stack_pop_if_in_block(node
, block
);
756 } else if (instr
->type
== nir_instr_type_intrinsic
) {
757 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
759 if (intrin
->intrinsic
!= nir_intrinsic_store_var
)
762 struct deref_node
*node
= get_deref_node(intrin
->variables
[0], state
);
766 if (!node
->lower_to_ssa
)
769 def_stack_pop_if_in_block(node
, block
);
770 nir_instr_remove(&intrin
->instr
);
777 /* Inserts phi nodes for all variables marked lower_to_ssa
779 * This is the same algorithm as presented in "Efficiently Computing Static
780 * Single Assignment Form and the Control Dependence Graph" by Cytron et.
784 insert_phi_nodes(struct lower_variables_state
*state
)
786 NIR_VLA_ZERO(unsigned, work
, state
->impl
->num_blocks
);
787 NIR_VLA_ZERO(unsigned, has_already
, state
->impl
->num_blocks
);
790 * Since the work flags already prevent us from inserting a node that has
791 * ever been inserted into W, we don't need to use a set to represent W.
792 * Also, since no block can ever be inserted into W more than once, we know
793 * that the maximum size of W is the number of basic blocks in the
794 * function. So all we need to handle W is an array and a pointer to the
795 * next element to be inserted and the next element to be removed.
797 NIR_VLA(nir_block
*, W
, state
->impl
->num_blocks
);
799 unsigned w_start
, w_end
;
800 unsigned iter_count
= 0;
802 foreach_list_typed(struct deref_node
, node
, direct_derefs_link
,
803 &state
->direct_deref_nodes
) {
804 if (node
->stores
== NULL
)
807 if (!node
->lower_to_ssa
)
813 struct set_entry
*store_entry
;
814 set_foreach(node
->stores
, store_entry
) {
815 nir_intrinsic_instr
*store
= (nir_intrinsic_instr
*)store_entry
->key
;
816 if (work
[store
->instr
.block
->index
] < iter_count
)
817 W
[w_end
++] = store
->instr
.block
;
818 work
[store
->instr
.block
->index
] = iter_count
;
821 while (w_start
!= w_end
) {
822 nir_block
*cur
= W
[w_start
++];
823 struct set_entry
*dom_entry
;
824 set_foreach(cur
->dom_frontier
, dom_entry
) {
825 nir_block
*next
= (nir_block
*) dom_entry
->key
;
828 * If there's more than one return statement, then the end block
829 * can be a join point for some definitions. However, there are
830 * no instructions in the end block, so nothing would use those
831 * phi nodes. Of course, we couldn't place those phi nodes
832 * anyways due to the restriction of having no instructions in the
835 if (next
== state
->impl
->end_block
)
838 if (has_already
[next
->index
] < iter_count
) {
839 nir_phi_instr
*phi
= nir_phi_instr_create(state
->shader
);
840 nir_ssa_dest_init(&phi
->instr
, &phi
->dest
,
841 glsl_get_vector_elements(node
->type
), NULL
);
842 nir_instr_insert_before_block(next
, &phi
->instr
);
844 _mesa_hash_table_insert(state
->phi_table
, phi
, node
);
846 has_already
[next
->index
] = iter_count
;
847 if (work
[next
->index
] < iter_count
) {
848 work
[next
->index
] = iter_count
;
858 /** Implements a pass to lower variable uses to SSA values
860 * This path walks the list of instructions and tries to lower as many
861 * local variable load/store operations to SSA defs and uses as it can.
862 * The process involves four passes:
864 * 1) Iterate over all of the instructions and mark where each local
865 * variable deref is used in a load, store, or copy. While we're at
866 * it, we keep track of all of the fully-qualified (no wildcards) and
867 * fully-direct references we see and store them in the
868 * direct_deref_nodes hash table.
870 * 2) Walk over the the list of fully-qualified direct derefs generated in
871 * the previous pass. For each deref, we determine if it can ever be
872 * aliased, i.e. if there is an indirect reference anywhere that may
873 * refer to it. If it cannot be aliased, we mark it for lowering to an
874 * SSA value. At this point, we lower any var_copy instructions that
875 * use the given deref to load/store operations and, if the deref has a
876 * constant initializer, we go ahead and add a load_const value at the
877 * beginning of the function with the initialized value.
879 * 3) Walk over the list of derefs we plan to lower to SSA values and
880 * insert phi nodes as needed.
882 * 4) Perform "variable renaming" by replacing the load/store instructions
883 * with SSA definitions and SSA uses.
886 nir_lower_vars_to_ssa_impl(nir_function_impl
*impl
)
888 struct lower_variables_state state
;
890 state
.shader
= impl
->function
->shader
;
891 state
.dead_ctx
= ralloc_context(state
.shader
);
894 state
.deref_var_nodes
= _mesa_hash_table_create(state
.dead_ctx
,
896 _mesa_key_pointer_equal
);
897 exec_list_make_empty(&state
.direct_deref_nodes
);
898 state
.phi_table
= _mesa_hash_table_create(state
.dead_ctx
,
900 _mesa_key_pointer_equal
);
902 /* Build the initial deref structures and direct_deref_nodes table */
903 state
.add_to_direct_deref_nodes
= true;
904 nir_foreach_block(impl
, register_variable_uses_block
, &state
);
906 bool progress
= false;
908 nir_metadata_require(impl
, nir_metadata_block_index
);
910 /* We're about to iterate through direct_deref_nodes. Don't modify it. */
911 state
.add_to_direct_deref_nodes
= false;
913 foreach_list_typed_safe(struct deref_node
, node
, direct_derefs_link
,
914 &state
.direct_deref_nodes
) {
915 nir_deref_var
*deref
= node
->deref
;
917 if (deref
->var
->data
.mode
!= nir_var_local
) {
918 exec_node_remove(&node
->direct_derefs_link
);
922 if (deref_may_be_aliased(deref
, &state
)) {
923 exec_node_remove(&node
->direct_derefs_link
);
927 node
->lower_to_ssa
= true;
930 if (deref
->var
->constant_initializer
) {
931 nir_load_const_instr
*load
=
932 nir_deref_get_const_initializer_load(state
.shader
, deref
);
933 nir_ssa_def_init(&load
->instr
, &load
->def
,
934 glsl_get_vector_elements(node
->type
), NULL
);
935 nir_instr_insert_before_cf_list(&impl
->body
, &load
->instr
);
936 def_stack_push(node
, &load
->def
, &state
);
939 foreach_deref_node_match(deref
, lower_copies_to_load_store
, &state
);
945 nir_metadata_require(impl
, nir_metadata_dominance
);
947 /* We may have lowered some copy instructions to load/store
948 * instructions. The uses from the copy instructions hav already been
949 * removed but we need to rescan to ensure that the uses from the newly
950 * added load/store instructions are registered. We need this
951 * information for phi node insertion below.
953 nir_foreach_block(impl
, register_variable_uses_block
, &state
);
955 insert_phi_nodes(&state
);
956 rename_variables_block(nir_start_block(impl
), &state
);
958 nir_metadata_preserve(impl
, nir_metadata_block_index
|
959 nir_metadata_dominance
);
961 ralloc_free(state
.dead_ctx
);
967 nir_lower_vars_to_ssa(nir_shader
*shader
)
969 nir_foreach_function(shader
, function
) {
971 nir_lower_vars_to_ssa_impl(function
->impl
);