2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
29 #include "nir_builder.h"
30 #include "nir_deref.h"
31 #include "nir_phi_builder.h"
36 struct deref_node
*parent
;
37 const struct glsl_type
*type
;
41 /* Only valid for things that end up in the direct list.
42 * Note that multiple nir_deref_instrs may correspond to this node, but
43 * they will all be equivalent, so any is as good as the other.
46 struct exec_node direct_derefs_link
;
52 struct nir_phi_builder_value
*pb_value
;
54 /* True if this node is fully direct. If set, it must be in the children
55 * array of its parent.
59 struct deref_node
*wildcard
;
60 struct deref_node
*indirect
;
61 struct deref_node
*children
[0];
64 struct lower_variables_state
{
67 nir_function_impl
*impl
;
69 /* A hash table mapping variables to deref_node data */
70 struct hash_table
*deref_var_nodes
;
72 /* A hash table mapping fully-qualified direct dereferences, i.e.
73 * dereferences with no indirect or wildcard array dereferences, to
76 * At the moment, we only lower loads, stores, and copies that can be
77 * trivially lowered to loads and stores, i.e. copies with no indirects
78 * and no wildcards. If a part of a variable that is being loaded from
79 * and/or stored into is also involved in a copy operation with
80 * wildcards, then we lower that copy operation to loads and stores, but
81 * otherwise we leave copies with wildcards alone. Since the only derefs
82 * used in these loads, stores, and trivial copies are ones with no
83 * wildcards and no indirects, these are precisely the derefs that we
84 * can actually consider lowering.
86 struct exec_list direct_deref_nodes
;
88 /* Controls whether get_deref_node will add variables to the
89 * direct_deref_nodes table. This is turned on when we are initially
90 * scanning for load/store instructions. It is then turned off so we
91 * don't accidentally change the direct_deref_nodes table while we're
92 * iterating throug it.
94 bool add_to_direct_deref_nodes
;
96 struct nir_phi_builder
*phi_builder
;
99 static struct deref_node
*
100 deref_node_create(struct deref_node
*parent
,
101 const struct glsl_type
*type
,
102 bool is_direct
, void *mem_ctx
)
104 size_t size
= sizeof(struct deref_node
) +
105 glsl_get_length(type
) * sizeof(struct deref_node
*);
107 struct deref_node
*node
= rzalloc_size(mem_ctx
, size
);
109 node
->parent
= parent
;
110 exec_node_init(&node
->direct_derefs_link
);
111 node
->is_direct
= is_direct
;
116 /* Returns the deref node associated with the given variable. This will be
117 * the root of the tree representing all of the derefs of the given variable.
119 static struct deref_node
*
120 get_deref_node_for_var(nir_variable
*var
, struct lower_variables_state
*state
)
122 struct deref_node
*node
;
124 struct hash_entry
*var_entry
=
125 _mesa_hash_table_search(state
->deref_var_nodes
, var
);
128 return var_entry
->data
;
130 node
= deref_node_create(NULL
, var
->type
, true, state
->dead_ctx
);
131 _mesa_hash_table_insert(state
->deref_var_nodes
, var
, node
);
136 /* Gets the deref_node for the given deref chain and creates it if it
137 * doesn't yet exist. If the deref is fully-qualified and direct and
138 * state->add_to_direct_deref_nodes is true, it will be added to the hash
139 * table of of fully-qualified direct derefs.
141 static struct deref_node
*
142 get_deref_node_recur(nir_deref_instr
*deref
,
143 struct lower_variables_state
*state
)
145 if (deref
->deref_type
== nir_deref_type_var
)
146 return get_deref_node_for_var(deref
->var
, state
);
148 struct deref_node
*parent
=
149 get_deref_node_recur(nir_deref_instr_parent(deref
), state
);
151 switch (deref
->deref_type
) {
152 case nir_deref_type_struct
:
153 assert(glsl_type_is_struct(parent
->type
));
154 assert(deref
->strct
.index
< glsl_get_length(parent
->type
));
156 if (parent
->children
[deref
->strct
.index
] == NULL
) {
157 parent
->children
[deref
->strct
.index
] =
158 deref_node_create(parent
, deref
->type
, parent
->is_direct
,
162 return parent
->children
[deref
->strct
.index
];
164 case nir_deref_type_array
: {
165 nir_const_value
*const_index
= nir_src_as_const_value(deref
->arr
.index
);
167 uint32_t index
= const_index
->u32
[0];
168 /* This is possible if a loop unrolls and generates an
169 * out-of-bounds offset. We need to handle this at least
170 * somewhat gracefully.
172 if (index
>= glsl_get_length(parent
->type
))
175 if (parent
->children
[index
] == NULL
) {
176 parent
->children
[index
] =
177 deref_node_create(parent
, deref
->type
, parent
->is_direct
,
181 return parent
->children
[index
];
183 if (parent
->indirect
== NULL
) {
185 deref_node_create(parent
, deref
->type
, false, state
->dead_ctx
);
188 return parent
->indirect
;
193 case nir_deref_type_array_wildcard
:
194 if (parent
->wildcard
== NULL
) {
196 deref_node_create(parent
, deref
->type
, false, state
->dead_ctx
);
199 return parent
->wildcard
;
202 unreachable("Invalid deref type");
206 static struct deref_node
*
207 get_deref_node(nir_deref_instr
*deref
, struct lower_variables_state
*state
)
209 struct deref_node
*node
= get_deref_node_recur(deref
, state
);
213 /* Insert the node in the direct derefs list. We only do this if it's not
214 * already in the list and we only bother for deref nodes which are used
215 * directly in a load or store.
217 if (node
->is_direct
&& state
->add_to_direct_deref_nodes
&&
218 node
->direct_derefs_link
.next
== NULL
) {
219 nir_deref_path_init(&node
->path
, deref
, state
->dead_ctx
);
220 assert(deref
->var
!= NULL
);
221 exec_list_push_tail(&state
->direct_deref_nodes
,
222 &node
->direct_derefs_link
);
228 /* \sa foreach_deref_node_match */
230 foreach_deref_node_worker(struct deref_node
*node
, nir_deref_instr
**path
,
231 void (* cb
)(struct deref_node
*node
,
232 struct lower_variables_state
*state
),
233 struct lower_variables_state
*state
)
240 switch ((*path
)->deref_type
) {
241 case nir_deref_type_struct
:
242 if (node
->children
[(*path
)->strct
.index
]) {
243 foreach_deref_node_worker(node
->children
[(*path
)->strct
.index
],
244 path
+ 1, cb
, state
);
248 case nir_deref_type_array
: {
249 nir_const_value
*const_index
= nir_src_as_const_value((*path
)->arr
.index
);
251 uint32_t index
= const_index
->u32
[0];
253 if (node
->children
[index
]) {
254 foreach_deref_node_worker(node
->children
[index
],
255 path
+ 1, cb
, state
);
258 if (node
->wildcard
) {
259 foreach_deref_node_worker(node
->wildcard
,
260 path
+ 1, cb
, state
);
266 unreachable("Unsupported deref type");
270 /* Walks over every "matching" deref_node and calls the callback. A node
271 * is considered to "match" if either refers to that deref or matches up t
272 * a wildcard. In other words, the following would match a[6].foo[3].bar:
279 * The given deref must be a full-length and fully qualified (no wildcards
280 * or indirects) deref chain.
283 foreach_deref_node_match(nir_deref_path
*path
,
284 void (* cb
)(struct deref_node
*node
,
285 struct lower_variables_state
*state
),
286 struct lower_variables_state
*state
)
288 assert(path
->path
[0]->deref_type
== nir_deref_type_var
);
289 struct deref_node
*node
= get_deref_node_for_var(path
->path
[0]->var
, state
);
294 foreach_deref_node_worker(node
, &path
->path
[1], cb
, state
);
297 /* \sa deref_may_be_aliased */
299 path_may_be_aliased_node(struct deref_node
*node
, nir_deref_instr
**path
,
300 struct lower_variables_state
*state
)
305 switch ((*path
)->deref_type
) {
306 case nir_deref_type_struct
:
307 if (node
->children
[(*path
)->strct
.index
]) {
308 return path_may_be_aliased_node(node
->children
[(*path
)->strct
.index
],
314 case nir_deref_type_array
: {
315 nir_const_value
*const_index
= nir_src_as_const_value((*path
)->arr
.index
);
319 uint32_t index
= const_index
->u32
[0];
321 /* If there is an indirect at this level, we're aliased. */
325 if (node
->children
[index
] &&
326 path_may_be_aliased_node(node
->children
[index
],
330 if (node
->wildcard
&&
331 path_may_be_aliased_node(node
->wildcard
, path
+ 1, state
))
338 unreachable("Unsupported deref type");
342 /* Returns true if there are no indirects that can ever touch this deref.
344 * For example, if the given deref is a[6].foo, then any uses of a[i].foo
345 * would cause this to return false, but a[i].bar would not affect it
346 * because it's a different structure member. A var_copy involving of
347 * a[*].bar also doesn't affect it because that can be lowered to entirely
348 * direct load/stores.
350 * We only support asking this question about fully-qualified derefs.
351 * Obviously, it's pointless to ask this about indirects, but we also
352 * rule-out wildcards. Handling Wildcard dereferences would involve
353 * checking each array index to make sure that there aren't any indirect
357 path_may_be_aliased(nir_deref_path
*path
,
358 struct lower_variables_state
*state
)
360 assert(path
->path
[0]->deref_type
== nir_deref_type_var
);
361 nir_variable
*var
= path
->path
[0]->var
;
363 return path_may_be_aliased_node(get_deref_node_for_var(var
, state
),
364 &path
->path
[1], state
);
368 register_load_instr(nir_intrinsic_instr
*load_instr
,
369 struct lower_variables_state
*state
)
371 nir_deref_instr
*deref
= nir_src_as_deref(load_instr
->src
[0]);
372 struct deref_node
*node
= get_deref_node(deref
, state
);
376 if (node
->loads
== NULL
)
377 node
->loads
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
378 _mesa_key_pointer_equal
);
380 _mesa_set_add(node
->loads
, load_instr
);
384 register_store_instr(nir_intrinsic_instr
*store_instr
,
385 struct lower_variables_state
*state
)
387 nir_deref_instr
*deref
= nir_src_as_deref(store_instr
->src
[0]);
388 struct deref_node
*node
= get_deref_node(deref
, state
);
392 if (node
->stores
== NULL
)
393 node
->stores
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
394 _mesa_key_pointer_equal
);
396 _mesa_set_add(node
->stores
, store_instr
);
400 register_copy_instr(nir_intrinsic_instr
*copy_instr
,
401 struct lower_variables_state
*state
)
403 for (unsigned idx
= 0; idx
< 2; idx
++) {
404 nir_deref_instr
*deref
= nir_src_as_deref(copy_instr
->src
[idx
]);
405 struct deref_node
*node
= get_deref_node(deref
, state
);
409 if (node
->copies
== NULL
)
410 node
->copies
= _mesa_set_create(state
->dead_ctx
, _mesa_hash_pointer
,
411 _mesa_key_pointer_equal
);
413 _mesa_set_add(node
->copies
, copy_instr
);
418 register_variable_uses(nir_function_impl
*impl
,
419 struct lower_variables_state
*state
)
421 nir_foreach_block(block
, impl
) {
422 nir_foreach_instr_safe(instr
, block
) {
423 if (instr
->type
!= nir_instr_type_intrinsic
)
426 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
428 switch (intrin
->intrinsic
) {
429 case nir_intrinsic_load_deref
:
430 register_load_instr(intrin
, state
);
433 case nir_intrinsic_store_deref
:
434 register_store_instr(intrin
, state
);
437 case nir_intrinsic_copy_deref
:
438 register_copy_instr(intrin
, state
);
448 /* Walks over all of the copy instructions to or from the given deref_node
449 * and lowers them to load/store intrinsics.
452 lower_copies_to_load_store(struct deref_node
*node
,
453 struct lower_variables_state
*state
)
459 nir_builder_init(&b
, state
->impl
);
461 struct set_entry
*copy_entry
;
462 set_foreach(node
->copies
, copy_entry
) {
463 nir_intrinsic_instr
*copy
= (void *)copy_entry
->key
;
465 nir_lower_deref_copy_instr(&b
, copy
);
467 for (unsigned i
= 0; i
< 2; ++i
) {
468 nir_deref_instr
*arg_deref
= nir_src_as_deref(copy
->src
[i
]);
469 struct deref_node
*arg_node
= get_deref_node(arg_deref
, state
);
471 /* Only bother removing copy entries for other nodes */
472 if (arg_node
== NULL
|| arg_node
== node
)
475 struct set_entry
*arg_entry
= _mesa_set_search(arg_node
->copies
, copy
);
477 _mesa_set_remove(arg_node
->copies
, arg_entry
);
480 nir_instr_remove(©
->instr
);
486 /* Performs variable renaming
488 * This algorithm is very similar to the one outlined in "Efficiently
489 * Computing Static Single Assignment Form and the Control Dependence
490 * Graph" by Cytron et al. The primary difference is that we only put one
491 * SSA def on the stack per block.
494 rename_variables(struct lower_variables_state
*state
)
497 nir_builder_init(&b
, state
->impl
);
499 nir_foreach_block(block
, state
->impl
) {
500 nir_foreach_instr_safe(instr
, block
) {
501 if (instr
->type
!= nir_instr_type_intrinsic
)
504 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
506 switch (intrin
->intrinsic
) {
507 case nir_intrinsic_load_deref
: {
508 nir_deref_instr
*deref
= nir_src_as_deref(intrin
->src
[0]);
509 struct deref_node
*node
= get_deref_node(deref
, state
);
511 /* If we hit this path then we are referencing an invalid
512 * value. Most likely, we unrolled something and are
513 * reading past the end of some array. In any case, this
514 * should result in an undefined value.
516 nir_ssa_undef_instr
*undef
=
517 nir_ssa_undef_instr_create(state
->shader
,
518 intrin
->num_components
,
519 intrin
->dest
.ssa
.bit_size
);
521 nir_instr_insert_before(&intrin
->instr
, &undef
->instr
);
522 nir_instr_remove(&intrin
->instr
);
524 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
525 nir_src_for_ssa(&undef
->def
));
529 if (!node
->lower_to_ssa
)
532 nir_alu_instr
*mov
= nir_alu_instr_create(state
->shader
,
534 mov
->src
[0].src
= nir_src_for_ssa(
535 nir_phi_builder_value_get_block_def(node
->pb_value
, block
));
536 for (unsigned i
= intrin
->num_components
; i
< 4; i
++)
537 mov
->src
[0].swizzle
[i
] = 0;
539 assert(intrin
->dest
.is_ssa
);
541 mov
->dest
.write_mask
= (1 << intrin
->num_components
) - 1;
542 nir_ssa_dest_init(&mov
->instr
, &mov
->dest
.dest
,
543 intrin
->num_components
,
544 intrin
->dest
.ssa
.bit_size
, NULL
);
546 nir_instr_insert_before(&intrin
->instr
, &mov
->instr
);
547 nir_instr_remove(&intrin
->instr
);
549 nir_ssa_def_rewrite_uses(&intrin
->dest
.ssa
,
550 nir_src_for_ssa(&mov
->dest
.dest
.ssa
));
554 case nir_intrinsic_store_deref
: {
555 nir_deref_instr
*deref
= nir_src_as_deref(intrin
->src
[0]);
556 struct deref_node
*node
= get_deref_node(deref
, state
);
558 assert(intrin
->src
[1].is_ssa
);
559 nir_ssa_def
*value
= intrin
->src
[1].ssa
;
562 /* Probably an out-of-bounds array store. That should be a
564 nir_instr_remove(&intrin
->instr
);
568 if (!node
->lower_to_ssa
)
571 assert(intrin
->num_components
==
572 glsl_get_vector_elements(node
->type
));
574 nir_ssa_def
*new_def
;
575 b
.cursor
= nir_before_instr(&intrin
->instr
);
577 unsigned wrmask
= nir_intrinsic_write_mask(intrin
);
578 if (wrmask
== (1 << intrin
->num_components
) - 1) {
579 /* Whole variable store - just copy the source. Note that
580 * intrin->num_components and value->num_components
584 for (unsigned i
= 0; i
< 4; i
++)
585 swiz
[i
] = i
< intrin
->num_components
? i
: 0;
587 new_def
= nir_swizzle(&b
, value
, swiz
,
588 intrin
->num_components
, false);
590 nir_ssa_def
*old_def
=
591 nir_phi_builder_value_get_block_def(node
->pb_value
, block
);
592 /* For writemasked store_var intrinsics, we combine the newly
593 * written values with the existing contents of unwritten
594 * channels, creating a new SSA value for the whole vector.
596 nir_ssa_def
*srcs
[4];
597 for (unsigned i
= 0; i
< intrin
->num_components
; i
++) {
598 if (wrmask
& (1 << i
)) {
599 srcs
[i
] = nir_channel(&b
, value
, i
);
601 srcs
[i
] = nir_channel(&b
, old_def
, i
);
604 new_def
= nir_vec(&b
, srcs
, intrin
->num_components
);
607 assert(new_def
->num_components
== intrin
->num_components
);
609 nir_phi_builder_value_set_block_def(node
->pb_value
, block
, new_def
);
610 nir_instr_remove(&intrin
->instr
);
623 /** Implements a pass to lower variable uses to SSA values
625 * This path walks the list of instructions and tries to lower as many
626 * local variable load/store operations to SSA defs and uses as it can.
627 * The process involves four passes:
629 * 1) Iterate over all of the instructions and mark where each local
630 * variable deref is used in a load, store, or copy. While we're at
631 * it, we keep track of all of the fully-qualified (no wildcards) and
632 * fully-direct references we see and store them in the
633 * direct_deref_nodes hash table.
635 * 2) Walk over the list of fully-qualified direct derefs generated in
636 * the previous pass. For each deref, we determine if it can ever be
637 * aliased, i.e. if there is an indirect reference anywhere that may
638 * refer to it. If it cannot be aliased, we mark it for lowering to an
639 * SSA value. At this point, we lower any var_copy instructions that
640 * use the given deref to load/store operations.
642 * 3) Walk over the list of derefs we plan to lower to SSA values and
643 * insert phi nodes as needed.
645 * 4) Perform "variable renaming" by replacing the load/store instructions
646 * with SSA definitions and SSA uses.
649 nir_lower_vars_to_ssa_impl(nir_function_impl
*impl
)
651 struct lower_variables_state state
;
653 state
.shader
= impl
->function
->shader
;
654 state
.dead_ctx
= ralloc_context(state
.shader
);
657 state
.deref_var_nodes
= _mesa_hash_table_create(state
.dead_ctx
,
659 _mesa_key_pointer_equal
);
660 exec_list_make_empty(&state
.direct_deref_nodes
);
662 /* Build the initial deref structures and direct_deref_nodes table */
663 state
.add_to_direct_deref_nodes
= true;
665 register_variable_uses(impl
, &state
);
667 bool progress
= false;
669 nir_metadata_require(impl
, nir_metadata_block_index
);
671 /* We're about to iterate through direct_deref_nodes. Don't modify it. */
672 state
.add_to_direct_deref_nodes
= false;
674 foreach_list_typed_safe(struct deref_node
, node
, direct_derefs_link
,
675 &state
.direct_deref_nodes
) {
676 nir_deref_path
*path
= &node
->path
;
678 assert(path
->path
[0]->deref_type
== nir_deref_type_var
);
679 nir_variable
*var
= path
->path
[0]->var
;
681 if (var
->data
.mode
!= nir_var_local
) {
682 exec_node_remove(&node
->direct_derefs_link
);
686 if (path_may_be_aliased(path
, &state
)) {
687 exec_node_remove(&node
->direct_derefs_link
);
691 node
->lower_to_ssa
= true;
694 foreach_deref_node_match(path
, lower_copies_to_load_store
, &state
);
700 nir_metadata_require(impl
, nir_metadata_dominance
);
702 /* We may have lowered some copy instructions to load/store
703 * instructions. The uses from the copy instructions hav already been
704 * removed but we need to rescan to ensure that the uses from the newly
705 * added load/store instructions are registered. We need this
706 * information for phi node insertion below.
708 register_variable_uses(impl
, &state
);
710 state
.phi_builder
= nir_phi_builder_create(state
.impl
);
712 NIR_VLA(BITSET_WORD
, store_blocks
, BITSET_WORDS(state
.impl
->num_blocks
));
713 foreach_list_typed(struct deref_node
, node
, direct_derefs_link
,
714 &state
.direct_deref_nodes
) {
715 if (!node
->lower_to_ssa
)
718 memset(store_blocks
, 0,
719 BITSET_WORDS(state
.impl
->num_blocks
) * sizeof(*store_blocks
));
721 assert(node
->path
.path
[0]->var
->constant_initializer
== NULL
);
724 struct set_entry
*store_entry
;
725 set_foreach(node
->stores
, store_entry
) {
726 nir_intrinsic_instr
*store
=
727 (nir_intrinsic_instr
*)store_entry
->key
;
728 BITSET_SET(store_blocks
, store
->instr
.block
->index
);
733 nir_phi_builder_add_value(state
.phi_builder
,
734 glsl_get_vector_elements(node
->type
),
735 glsl_get_bit_size(node
->type
),
739 rename_variables(&state
);
741 nir_phi_builder_finish(state
.phi_builder
);
743 nir_metadata_preserve(impl
, nir_metadata_block_index
|
744 nir_metadata_dominance
);
746 ralloc_free(state
.dead_ctx
);
752 nir_lower_vars_to_ssa(nir_shader
*shader
)
754 bool progress
= false;
756 nir_foreach_function(function
, shader
) {
758 progress
|= nir_lower_vars_to_ssa_impl(function
->impl
);