2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
33 struct deref_node
*parent
;
34 const struct glsl_type
*type
;
38 /* Only valid for things that end up in the direct list.
39 * Note that multiple nir_deref_vars may correspond to this node, but they
40 * will all be equivalent, so any is as good as the other.
43 struct exec_node direct_derefs_link
;
49 nir_ssa_def
**def_stack
;
50 nir_ssa_def
**def_stack_tail
;
52 struct deref_node
*wildcard
;
53 struct deref_node
*indirect
;
54 struct deref_node
*children
[0];
57 struct lower_variables_state
{
60 nir_function_impl
*impl
;
62 /* A hash table mapping variables to deref_node data */
63 struct hash_table
*deref_var_nodes
;
65 /* A hash table mapping fully-qualified direct dereferences, i.e.
66 * dereferences with no indirect or wildcard array dereferences, to
69 * At the moment, we only lower loads, stores, and copies that can be
70 * trivially lowered to loads and stores, i.e. copies with no indirects
71 * and no wildcards. If a part of a variable that is being loaded from
72 * and/or stored into is also involved in a copy operation with
73 * wildcards, then we lower that copy operation to loads and stores, but
74 * otherwise we leave copies with wildcards alone. Since the only derefs
75 * used in these loads, stores, and trivial copies are ones with no
76 * wildcards and no indirects, these are precisely the derefs that we
77 * can actually consider lowering.
79 struct exec_list direct_deref_nodes
;
81 /* Controls whether get_deref_node will add variables to the
82 * direct_deref_nodes table. This is turned on when we are initially
83 * scanning for load/store instructions. It is then turned off so we
84 * don't accidentally change the direct_deref_nodes table while we're
85 * iterating throug it.
87 bool add_to_direct_deref_nodes
;
89 /* A hash table mapping phi nodes to deref_state data */
90 struct hash_table
*phi_table
;
93 static struct deref_node
*
94 deref_node_create(struct deref_node
*parent
,
95 const struct glsl_type
*type
, nir_shader
*shader
)
97 size_t size
= sizeof(struct deref_node
) +
98 glsl_get_length(type
) * sizeof(struct deref_node
*);
100 struct deref_node
*node
= rzalloc_size(shader
, size
);
102 node
->parent
= parent
;
104 exec_node_init(&node
->direct_derefs_link
);
109 /* Returns the deref node associated with the given variable. This will be
110 * the root of the tree representing all of the derefs of the given variable.
112 static struct deref_node
*
113 get_deref_node_for_var(nir_variable
*var
, struct lower_variables_state
*state
)
115 struct deref_node
*node
;
117 struct hash_entry
*var_entry
=
118 _mesa_hash_table_search(state
->deref_var_nodes
, var
);
121 return var_entry
->data
;
123 node
= deref_node_create(NULL
, var
->type
, state
->dead_ctx
);
124 _mesa_hash_table_insert(state
->deref_var_nodes
, var
, node
);
129 /* Gets the deref_node for the given deref chain and creates it if it
130 * doesn't yet exist. If the deref is fully-qualified and direct and
131 * state->add_to_direct_deref_nodes is true, it will be added to the hash
132 * table of of fully-qualified direct derefs.
134 static struct deref_node
*
135 get_deref_node(nir_deref_var
*deref
, struct lower_variables_state
*state
)
137 bool is_direct
= true;
139 /* Start at the base of the chain. */
140 struct deref_node
*node
= get_deref_node_for_var(deref
->var
, state
);
141 assert(deref
->deref
.type
== node
->type
);
143 for (nir_deref
*tail
= deref
->deref
.child
; tail
; tail
= tail
->child
) {
144 switch (tail
->deref_type
) {
145 case nir_deref_type_struct
: {
146 nir_deref_struct
*deref_struct
= nir_deref_as_struct(tail
);
148 assert(deref_struct
->index
< glsl_get_length(node
->type
));
150 if (node
->children
[deref_struct
->index
] == NULL
)
151 node
->children
[deref_struct
->index
] =
152 deref_node_create(node
, tail
->type
, state
->dead_ctx
);
154 node
= node
->children
[deref_struct
->index
];
158 case nir_deref_type_array
: {
159 nir_deref_array
*arr
= nir_deref_as_array(tail
);
161 switch (arr
->deref_array_type
) {
162 case nir_deref_array_type_direct
:
163 /* This is possible if a loop unrolls and generates an
164 * out-of-bounds offset. We need to handle this at least
165 * somewhat gracefully.
167 if (arr
->base_offset
>= glsl_get_length(node
->type
))
170 if (node
->children
[arr
->base_offset
] == NULL
)
171 node
->children
[arr
->base_offset
] =
172 deref_node_create(node
, tail
->type
, state
->dead_ctx
);
174 node
= node
->children
[arr
->base_offset
];
177 case nir_deref_array_type_indirect
:
178 if (node
->indirect
== NULL
)
179 node
->indirect
= deref_node_create(node
, tail
->type
,
182 node
= node
->indirect
;
186 case nir_deref_array_type_wildcard
:
187 if (node
->wildcard
== NULL
)
188 node
->wildcard
= deref_node_create(node
, tail
->type
,
191 node
= node
->wildcard
;
196 unreachable("Invalid array deref type");
201 unreachable("Invalid deref type");
207 /* Only insert if it isn't already in the list. */
208 if (is_direct
&& state
->add_to_direct_deref_nodes
&&
209 node
->direct_derefs_link
.next
== NULL
) {
211 assert(deref
->var
!= NULL
);
212 exec_list_push_tail(&state
->direct_deref_nodes
,
213 &node
->direct_derefs_link
);
219 /* \sa foreach_deref_node_match */
221 foreach_deref_node_worker(struct deref_node
*node
, nir_deref
*deref
,
222 bool (* cb
)(struct deref_node
*node
,
223 struct lower_variables_state
*state
),
224 struct lower_variables_state
*state
)
226 if (deref
->child
== NULL
) {
227 return cb(node
, state
);
229 switch (deref
->child
->deref_type
) {
230 case nir_deref_type_array
: {
231 nir_deref_array
*arr
= nir_deref_as_array(deref
->child
);
232 assert(arr
->deref_array_type
== nir_deref_array_type_direct
);
233 if (node
->children
[arr
->base_offset
] &&
234 !foreach_deref_node_worker(node
->children
[arr
->base_offset
],
235 deref
->child
, cb
, state
))
238 if (node
->wildcard
&&
239 !foreach_deref_node_worker(node
->wildcard
,
240 deref
->child
, cb
, state
))
246 case nir_deref_type_struct
: {
247 nir_deref_struct
*str
= nir_deref_as_struct(deref
->child
);
248 return foreach_deref_node_worker(node
->children
[str
->index
],
249 deref
->child
, cb
, state
);
253 unreachable("Invalid deref child type");
258 /* Walks over every "matching" deref_node and calls the callback. A node
259 * is considered to "match" if either refers to that deref or matches up t
260 * a wildcard. In other words, the following would match a[6].foo[3].bar:
267 * The given deref must be a full-length and fully qualified (no wildcards
268 * or indirects) deref chain.
271 foreach_deref_node_match(nir_deref_var
*deref
,
272 bool (* cb
)(struct deref_node
*node
,
273 struct lower_variables_state
*state
),
274 struct lower_variables_state
*state
)
276 nir_deref_var var_deref
= *deref
;
277 var_deref
.deref
.child
= NULL
;
278 struct deref_node
*node
= get_deref_node(&var_deref
, state
);
283 return foreach_deref_node_worker(node
, &deref
->deref
, cb
, state
);
286 /* \sa deref_may_be_aliased */
288 deref_may_be_aliased_node(struct deref_node
*node
, nir_deref
*deref
,
289 struct lower_variables_state
*state
)
291 if (deref
->child
== NULL
) {
294 switch (deref
->child
->deref_type
) {
295 case nir_deref_type_array
: {
296 nir_deref_array
*arr
= nir_deref_as_array(deref
->child
);
297 if (arr
->deref_array_type
== nir_deref_array_type_indirect
)
300 /* If there is an indirect at this level, we're aliased. */
304 assert(arr
->deref_array_type
== nir_deref_array_type_direct
);
306 if (node
->children
[arr
->base_offset
] &&
307 deref_may_be_aliased_node(node
->children
[arr
->base_offset
],
308 deref
->child
, state
))
311 if (node
->wildcard
&&
312 deref_may_be_aliased_node(node
->wildcard
, deref
->child
, state
))
318 case nir_deref_type_struct
: {
319 nir_deref_struct
*str
= nir_deref_as_struct(deref
->child
);
320 if (node
->children
[str
->index
]) {
321 return deref_may_be_aliased_node(node
->children
[str
->index
],
322 deref
->child
, state
);
329 unreachable("Invalid nir_deref child type");
334 /* Returns true if there are no indirects that can ever touch this deref.
336 * For example, if the given deref is a[6].foo, then any uses of a[i].foo
337 * would cause this to return false, but a[i].bar would not affect it
338 * because it's a different structure member. A var_copy involving of
339 * a[*].bar also doesn't affect it because that can be lowered to entirely
340 * direct load/stores.
342 * We only support asking this question about fully-qualified derefs.
343 * Obviously, it's pointless to ask this about indirects, but we also
344 * rule-out wildcards. Handling Wildcard dereferences would involve
345 * checking each array index to make sure that there aren't any indirect
349 deref_may_be_aliased(nir_deref_var
*deref
,
350 struct lower_variables_state
*state
)
352 return deref_may_be_aliased_node(get_deref_node_for_var(deref
->var
, state
),
353 &deref
->deref
, state
);
357 register_load_instr(nir_intrinsic_instr
*load_instr
,
358 struct lower_variables_state
*state
)
360 struct deref_node
*node
= get_deref_node(load_instr
->variables
[0], state
);
364 if (node
->loads
== NULL
)
365 node
->loads
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
366 _mesa_key_pointer_equal
);
368 _mesa_set_add(node
->loads
, load_instr
);
372 register_store_instr(nir_intrinsic_instr
*store_instr
,
373 struct lower_variables_state
*state
)
375 struct deref_node
*node
= get_deref_node(store_instr
->variables
[0], state
);
379 if (node
->stores
== NULL
)
380 node
->stores
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
381 _mesa_key_pointer_equal
);
383 _mesa_set_add(node
->stores
, store_instr
);
387 register_copy_instr(nir_intrinsic_instr
*copy_instr
,
388 struct lower_variables_state
*state
)
390 for (unsigned idx
= 0; idx
< 2; idx
++) {
391 struct deref_node
*node
=
392 get_deref_node(copy_instr
->variables
[idx
], state
);
397 if (node
->copies
== NULL
)
398 node
->copies
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
399 _mesa_key_pointer_equal
);
401 _mesa_set_add(node
->copies
, copy_instr
);
405 /* Registers all variable uses in the given block. */
407 register_variable_uses_block(nir_block
*block
, void *void_state
)
409 struct lower_variables_state
*state
= void_state
;
411 nir_foreach_instr_safe(block
, instr
) {
412 if (instr
->type
!= nir_instr_type_intrinsic
)
415 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
417 switch (intrin
->intrinsic
) {
418 case nir_intrinsic_load_var
:
419 register_load_instr(intrin
, state
);
422 case nir_intrinsic_store_var
:
423 register_store_instr(intrin
, state
);
426 case nir_intrinsic_copy_var
:
427 register_copy_instr(intrin
, state
);
438 /* Walks over all of the copy instructions to or from the given deref_node
439 * and lowers them to load/store intrinsics.
442 lower_copies_to_load_store(struct deref_node
*node
,
443 struct lower_variables_state
*state
)
448 struct set_entry
*copy_entry
;
449 set_foreach(node
->copies
, copy_entry
) {
450 nir_intrinsic_instr
*copy
= (void *)copy_entry
->key
;
452 nir_lower_var_copy_instr(copy
, state
->shader
);
454 for (unsigned i
= 0; i
< 2; ++i
) {
455 struct deref_node
*arg_node
=
456 get_deref_node(copy
->variables
[i
], state
);
458 if (arg_node
== NULL
)
461 struct set_entry
*arg_entry
= _mesa_set_search(arg_node
->copies
, copy
);
463 _mesa_set_remove(node
->copies
, arg_entry
);
466 nir_instr_remove(©
->instr
);
472 /** Pushes an SSA def onto the def stack for the given node
474 * Each node is potentially associated with a stack of SSA definitions.
475 * This stack is used for determining what SSA definition reaches a given
476 * point in the program for variable renaming. The stack is always kept in
477 * dominance-order with at most one SSA def per block. If the SSA
478 * definition on the top of the stack is in the same block as the one being
479 * pushed, the top element is replaced.
482 def_stack_push(struct deref_node
*node
, nir_ssa_def
*def
,
483 struct lower_variables_state
*state
)
485 if (node
->def_stack
== NULL
) {
486 node
->def_stack
= ralloc_array(state
->dead_ctx
, nir_ssa_def
*,
487 state
->impl
->num_blocks
);
488 node
->def_stack_tail
= node
->def_stack
- 1;
491 if (node
->def_stack_tail
>= node
->def_stack
) {
492 nir_ssa_def
*top_def
= *node
->def_stack_tail
;
494 if (def
->parent_instr
->block
== top_def
->parent_instr
->block
) {
495 /* They're in the same block, just replace the top */
496 *node
->def_stack_tail
= def
;
501 *(++node
->def_stack_tail
) = def
;
504 /* Pop the top of the def stack if it's in the given block */
506 def_stack_pop_if_in_block(struct deref_node
*node
, nir_block
*block
)
508 /* If we're popping, then we have presumably pushed at some time in the
509 * past so this should exist.
511 assert(node
->def_stack
!= NULL
);
513 /* The stack is already empty. Do nothing. */
514 if (node
->def_stack_tail
< node
->def_stack
)
517 nir_ssa_def
*def
= *node
->def_stack_tail
;
518 if (def
->parent_instr
->block
== block
)
519 node
->def_stack_tail
--;
522 /** Retrieves the SSA definition on the top of the stack for the given
523 * node, if one exists. If the stack is empty, then we return the constant
524 * initializer (if it exists) or an SSA undef.
527 get_ssa_def_for_block(struct deref_node
*node
, nir_block
*block
,
528 struct lower_variables_state
*state
)
530 /* If we have something on the stack, go ahead and return it. We're
531 * assuming that the top of the stack dominates the given block.
533 if (node
->def_stack
&& node
->def_stack_tail
>= node
->def_stack
)
534 return *node
->def_stack_tail
;
536 /* If we got here then we don't have a definition that dominates the
537 * given block. This means that we need to add an undef and use that.
539 nir_ssa_undef_instr
*undef
=
540 nir_ssa_undef_instr_create(state
->shader
,
541 glsl_get_vector_elements(node
->type
));
542 nir_instr_insert_before_cf_list(&state
->impl
->body
, &undef
->instr
);
543 def_stack_push(node
, &undef
->def
, state
);
547 /* Given a block and one of its predecessors, this function fills in the
548 * souces of the phi nodes to take SSA defs from the given predecessor.
549 * This function must be called exactly once per block/predecessor pair.
552 add_phi_sources(nir_block
*block
, nir_block
*pred
,
553 struct lower_variables_state
*state
)
555 nir_foreach_instr(block
, instr
) {
556 if (instr
->type
!= nir_instr_type_phi
)
559 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
561 struct hash_entry
*entry
=
562 _mesa_hash_table_search(state
->phi_table
, phi
);
566 struct deref_node
*node
= entry
->data
;
568 nir_phi_src
*src
= ralloc(phi
, nir_phi_src
);
570 src
->src
.is_ssa
= true;
571 src
->src
.ssa
= get_ssa_def_for_block(node
, pred
, state
);
573 _mesa_set_add(src
->src
.ssa
->uses
, instr
);
575 exec_list_push_tail(&phi
->srcs
, &src
->node
);
579 /* Performs variable renaming by doing a DFS of the dominance tree
581 * This algorithm is very similar to the one outlined in "Efficiently
582 * Computing Static Single Assignment Form and the Control Dependence
583 * Graph" by Cytron et. al. The primary difference is that we only put one
584 * SSA def on the stack per block.
587 rename_variables_block(nir_block
*block
, struct lower_variables_state
*state
)
589 nir_foreach_instr_safe(block
, instr
) {
590 if (instr
->type
== nir_instr_type_phi
) {
591 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
593 struct hash_entry
*entry
=
594 _mesa_hash_table_search(state
->phi_table
, phi
);
596 /* This can happen if we already have phi nodes in the program
597 * that were not created in this pass.
602 struct deref_node
*node
= entry
->data
;
604 def_stack_push(node
, &phi
->dest
.ssa
, state
);
605 } else if (instr
->type
== nir_instr_type_intrinsic
) {
606 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
608 switch (intrin
->intrinsic
) {
609 case nir_intrinsic_load_var
: {
610 struct deref_node
*node
=
611 get_deref_node(intrin
->variables
[0], state
);
614 /* If we hit this path then we are referencing an invalid
615 * value. Most likely, we unrolled something and are
616 * reading past the end of some array. In any case, this
617 * should result in an undefined value.
619 nir_ssa_undef_instr
*undef
=
620 nir_ssa_undef_instr_create(state
->shader
,
621 intrin
->num_components
);
623 nir_instr_insert_before(&intrin
->instr
, &undef
->instr
);
624 nir_instr_remove(&intrin
->instr
);
626 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
627 nir_src_for_ssa(&undef
->def
),
632 if (!node
->lower_to_ssa
)
635 nir_alu_instr
*mov
= nir_alu_instr_create(state
->shader
,
637 mov
->src
[0].src
.is_ssa
= true;
638 mov
->src
[0].src
.ssa
= get_ssa_def_for_block(node
, block
, state
);
639 for (unsigned i
= intrin
->num_components
; i
< 4; i
++)
640 mov
->src
[0].swizzle
[i
] = 0;
642 assert(intrin
->dest
.is_ssa
);
644 mov
->dest
.write_mask
= (1 << intrin
->num_components
) - 1;
645 nir_ssa_dest_init(&mov
->instr
, &mov
->dest
.dest
,
646 intrin
->num_components
, NULL
);
648 nir_instr_insert_before(&intrin
->instr
, &mov
->instr
);
649 nir_instr_remove(&intrin
->instr
);
651 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
652 nir_src_for_ssa(&mov
->dest
.dest
.ssa
),
657 case nir_intrinsic_store_var
: {
658 struct deref_node
*node
=
659 get_deref_node(intrin
->variables
[0], state
);
662 /* Probably an out-of-bounds array store. That should be a
664 nir_instr_remove(&intrin
->instr
);
668 if (!node
->lower_to_ssa
)
671 assert(intrin
->num_components
==
672 glsl_get_vector_elements(node
->type
));
674 assert(intrin
->src
[0].is_ssa
);
676 nir_alu_instr
*mov
= nir_alu_instr_create(state
->shader
,
678 mov
->src
[0].src
.is_ssa
= true;
679 mov
->src
[0].src
.ssa
= intrin
->src
[0].ssa
;
680 for (unsigned i
= intrin
->num_components
; i
< 4; i
++)
681 mov
->src
[0].swizzle
[i
] = 0;
683 mov
->dest
.write_mask
= (1 << intrin
->num_components
) - 1;
684 nir_ssa_dest_init(&mov
->instr
, &mov
->dest
.dest
,
685 intrin
->num_components
, NULL
);
687 nir_instr_insert_before(&intrin
->instr
, &mov
->instr
);
689 def_stack_push(node
, &mov
->dest
.dest
.ssa
, state
);
691 /* We'll wait to remove the instruction until the next pass
692 * where we pop the node we just pushed back off the stack.
703 if (block
->successors
[0])
704 add_phi_sources(block
->successors
[0], block
, state
);
705 if (block
->successors
[1])
706 add_phi_sources(block
->successors
[1], block
, state
);
708 for (unsigned i
= 0; i
< block
->num_dom_children
; ++i
)
709 rename_variables_block(block
->dom_children
[i
], state
);
711 /* Now we iterate over the instructions and pop off any SSA defs that we
712 * pushed in the first loop.
714 nir_foreach_instr_safe(block
, instr
) {
715 if (instr
->type
== nir_instr_type_phi
) {
716 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
718 struct hash_entry
*entry
=
719 _mesa_hash_table_search(state
->phi_table
, phi
);
721 /* This can happen if we already have phi nodes in the program
722 * that were not created in this pass.
727 struct deref_node
*node
= entry
->data
;
729 def_stack_pop_if_in_block(node
, block
);
730 } else if (instr
->type
== nir_instr_type_intrinsic
) {
731 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
733 if (intrin
->intrinsic
!= nir_intrinsic_store_var
)
736 struct deref_node
*node
= get_deref_node(intrin
->variables
[0], state
);
740 if (!node
->lower_to_ssa
)
743 def_stack_pop_if_in_block(node
, block
);
744 nir_instr_remove(&intrin
->instr
);
751 /* Inserts phi nodes for all variables marked lower_to_ssa
753 * This is the same algorithm as presented in "Efficiently Computing Static
754 * Single Assignment Form and the Control Dependence Graph" by Cytron et.
758 insert_phi_nodes(struct lower_variables_state
*state
)
760 NIR_VLA_ZERO(unsigned, work
, state
->impl
->num_blocks
);
761 NIR_VLA_ZERO(unsigned, has_already
, state
->impl
->num_blocks
);
764 * Since the work flags already prevent us from inserting a node that has
765 * ever been inserted into W, we don't need to use a set to represent W.
766 * Also, since no block can ever be inserted into W more than once, we know
767 * that the maximum size of W is the number of basic blocks in the
768 * function. So all we need to handle W is an array and a pointer to the
769 * next element to be inserted and the next element to be removed.
771 NIR_VLA(nir_block
*, W
, state
->impl
->num_blocks
);
773 unsigned w_start
, w_end
;
774 unsigned iter_count
= 0;
776 foreach_list_typed(struct deref_node
, node
, direct_derefs_link
,
777 &state
->direct_deref_nodes
) {
778 if (node
->stores
== NULL
)
781 if (!node
->lower_to_ssa
)
787 struct set_entry
*store_entry
;
788 set_foreach(node
->stores
, store_entry
) {
789 nir_intrinsic_instr
*store
= (nir_intrinsic_instr
*)store_entry
->key
;
790 if (work
[store
->instr
.block
->index
] < iter_count
)
791 W
[w_end
++] = store
->instr
.block
;
792 work
[store
->instr
.block
->index
] = iter_count
;
795 while (w_start
!= w_end
) {
796 nir_block
*cur
= W
[w_start
++];
797 struct set_entry
*dom_entry
;
798 set_foreach(cur
->dom_frontier
, dom_entry
) {
799 nir_block
*next
= (nir_block
*) dom_entry
->key
;
802 * If there's more than one return statement, then the end block
803 * can be a join point for some definitions. However, there are
804 * no instructions in the end block, so nothing would use those
805 * phi nodes. Of course, we couldn't place those phi nodes
806 * anyways due to the restriction of having no instructions in the
809 if (next
== state
->impl
->end_block
)
812 if (has_already
[next
->index
] < iter_count
) {
813 nir_phi_instr
*phi
= nir_phi_instr_create(state
->shader
);
814 nir_ssa_dest_init(&phi
->instr
, &phi
->dest
,
815 glsl_get_vector_elements(node
->type
), NULL
);
816 nir_instr_insert_before_block(next
, &phi
->instr
);
818 _mesa_hash_table_insert(state
->phi_table
, phi
, node
);
820 has_already
[next
->index
] = iter_count
;
821 if (work
[next
->index
] < iter_count
) {
822 work
[next
->index
] = iter_count
;
832 /** Implements a pass to lower variable uses to SSA values
834 * This path walks the list of instructions and tries to lower as many
835 * local variable load/store operations to SSA defs and uses as it can.
836 * The process involves four passes:
838 * 1) Iterate over all of the instructions and mark where each local
839 * variable deref is used in a load, store, or copy. While we're at
840 * it, we keep track of all of the fully-qualified (no wildcards) and
841 * fully-direct references we see and store them in the
842 * direct_deref_nodes hash table.
844 * 2) Walk over the the list of fully-qualified direct derefs generated in
845 * the previous pass. For each deref, we determine if it can ever be
846 * aliased, i.e. if there is an indirect reference anywhere that may
847 * refer to it. If it cannot be aliased, we mark it for lowering to an
848 * SSA value. At this point, we lower any var_copy instructions that
849 * use the given deref to load/store operations and, if the deref has a
850 * constant initializer, we go ahead and add a load_const value at the
851 * beginning of the function with the initialized value.
853 * 3) Walk over the list of derefs we plan to lower to SSA values and
854 * insert phi nodes as needed.
856 * 4) Perform "variable renaming" by replacing the load/store instructions
857 * with SSA definitions and SSA uses.
860 nir_lower_vars_to_ssa_impl(nir_function_impl
*impl
)
862 struct lower_variables_state state
;
864 state
.shader
= impl
->overload
->function
->shader
;
865 state
.dead_ctx
= ralloc_context(state
.shader
);
868 state
.deref_var_nodes
= _mesa_hash_table_create(state
.dead_ctx
,
870 _mesa_key_pointer_equal
);
871 exec_list_make_empty(&state
.direct_deref_nodes
);
872 state
.phi_table
= _mesa_hash_table_create(state
.dead_ctx
,
874 _mesa_key_pointer_equal
);
876 /* Build the initial deref structures and direct_deref_nodes table */
877 state
.add_to_direct_deref_nodes
= true;
878 nir_foreach_block(impl
, register_variable_uses_block
, &state
);
880 struct set
*outputs
= _mesa_set_create(state
.dead_ctx
,
882 _mesa_key_pointer_equal
);
884 bool progress
= false;
886 nir_metadata_require(impl
, nir_metadata_block_index
);
888 /* We're about to iterate through direct_deref_nodes. Don't modify it. */
889 state
.add_to_direct_deref_nodes
= false;
891 foreach_list_typed_safe(struct deref_node
, node
, direct_derefs_link
,
892 &state
.direct_deref_nodes
) {
893 nir_deref_var
*deref
= node
->deref
;
895 if (deref
->var
->data
.mode
!= nir_var_local
) {
896 exec_node_remove(&node
->direct_derefs_link
);
900 if (deref_may_be_aliased(deref
, &state
)) {
901 exec_node_remove(&node
->direct_derefs_link
);
905 node
->lower_to_ssa
= true;
908 if (deref
->var
->constant_initializer
) {
909 nir_load_const_instr
*load
=
910 nir_deref_get_const_initializer_load(state
.shader
, deref
);
911 nir_ssa_def_init(&load
->instr
, &load
->def
,
912 glsl_get_vector_elements(node
->type
), NULL
);
913 nir_instr_insert_before_cf_list(&impl
->body
, &load
->instr
);
914 def_stack_push(node
, &load
->def
, &state
);
917 if (deref
->var
->data
.mode
== nir_var_shader_out
)
918 _mesa_set_add(outputs
, node
);
920 foreach_deref_node_match(deref
, lower_copies_to_load_store
, &state
);
926 nir_metadata_require(impl
, nir_metadata_dominance
);
928 /* We may have lowered some copy instructions to load/store
929 * instructions. The uses from the copy instructions hav already been
930 * removed but we need to rescan to ensure that the uses from the newly
931 * added load/store instructions are registered. We need this
932 * information for phi node insertion below.
934 nir_foreach_block(impl
, register_variable_uses_block
, &state
);
936 insert_phi_nodes(&state
);
937 rename_variables_block(impl
->start_block
, &state
);
939 nir_metadata_preserve(impl
, nir_metadata_block_index
|
940 nir_metadata_dominance
);
942 ralloc_free(state
.dead_ctx
);
948 nir_lower_vars_to_ssa(nir_shader
*shader
)
950 nir_foreach_overload(shader
, overload
) {
952 nir_lower_vars_to_ssa_impl(overload
->impl
);