2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Connor Abbott (cwabbott0@gmail.com)
32 nir_shader_create(void *mem_ctx
)
34 nir_shader
*shader
= ralloc(mem_ctx
, nir_shader
);
36 shader
->uniforms
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
37 _mesa_key_string_equal
);
38 shader
->inputs
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
39 _mesa_key_string_equal
);
40 shader
->outputs
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
41 _mesa_key_string_equal
);
43 shader
->num_user_structures
= 0;
44 shader
->user_structures
= NULL
;
46 exec_list_make_empty(&shader
->functions
);
47 exec_list_make_empty(&shader
->registers
);
48 exec_list_make_empty(&shader
->globals
);
49 exec_list_make_empty(&shader
->system_values
);
50 shader
->reg_alloc
= 0;
52 shader
->num_inputs
= 0;
53 shader
->num_outputs
= 0;
54 shader
->num_uniforms
= 0;
60 reg_create(void *mem_ctx
, struct exec_list
*list
)
62 nir_register
*reg
= ralloc(mem_ctx
, nir_register
);
64 reg
->uses
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
65 reg
->defs
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
66 reg
->if_uses
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
68 reg
->num_components
= 0;
69 reg
->num_array_elems
= 0;
70 reg
->is_packed
= false;
73 exec_list_push_tail(list
, ®
->node
);
79 nir_global_reg_create(nir_shader
*shader
)
81 nir_register
*reg
= reg_create(shader
, &shader
->registers
);
82 reg
->index
= shader
->reg_alloc
++;
83 reg
->is_global
= true;
89 nir_local_reg_create(nir_function_impl
*impl
)
91 nir_register
*reg
= reg_create(ralloc_parent(impl
), &impl
->registers
);
92 reg
->index
= impl
->reg_alloc
++;
93 reg
->is_global
= false;
99 nir_reg_remove(nir_register
*reg
)
101 exec_node_remove(®
->node
);
105 nir_function_create(nir_shader
*shader
, const char *name
)
107 nir_function
*func
= ralloc(shader
, nir_function
);
109 exec_list_push_tail(&shader
->functions
, &func
->node
);
110 exec_list_make_empty(&func
->overload_list
);
112 func
->shader
= shader
;
117 nir_function_overload
*
118 nir_function_overload_create(nir_function
*func
)
120 void *mem_ctx
= ralloc_parent(func
);
122 nir_function_overload
*overload
= ralloc(mem_ctx
, nir_function_overload
);
124 overload
->num_params
= 0;
125 overload
->params
= NULL
;
126 overload
->return_type
= glsl_void_type();
127 overload
->impl
= NULL
;
129 exec_list_push_tail(&func
->overload_list
, &overload
->node
);
130 overload
->function
= func
;
135 nir_src
nir_src_copy(nir_src src
, void *mem_ctx
)
138 ret
.is_ssa
= src
.is_ssa
;
142 ret
.reg
.base_offset
= src
.reg
.base_offset
;
143 ret
.reg
.reg
= src
.reg
.reg
;
144 if (src
.reg
.indirect
) {
145 ret
.reg
.indirect
= ralloc(mem_ctx
, nir_src
);
146 *ret
.reg
.indirect
= *src
.reg
.indirect
;
148 ret
.reg
.indirect
= NULL
;
155 nir_dest
nir_dest_copy(nir_dest dest
, void *mem_ctx
)
158 ret
.is_ssa
= dest
.is_ssa
;
162 ret
.reg
.base_offset
= dest
.reg
.base_offset
;
163 ret
.reg
.reg
= dest
.reg
.reg
;
164 if (dest
.reg
.indirect
) {
165 ret
.reg
.indirect
= ralloc(mem_ctx
, nir_src
);
166 *ret
.reg
.indirect
= *dest
.reg
.indirect
;
168 ret
.reg
.indirect
= NULL
;
176 block_add_pred(nir_block
*block
, nir_block
*pred
)
178 _mesa_set_add(block
->predecessors
, _mesa_hash_pointer(pred
), pred
);
182 cf_init(nir_cf_node
*node
, nir_cf_node_type type
)
184 exec_node_init(&node
->node
);
190 link_blocks(nir_block
*pred
, nir_block
*succ1
, nir_block
*succ2
)
192 pred
->successors
[0] = succ1
;
193 block_add_pred(succ1
, pred
);
195 pred
->successors
[1] = succ2
;
197 block_add_pred(succ2
, pred
);
201 unlink_blocks(nir_block
*pred
, nir_block
*succ
)
203 if (pred
->successors
[0] == succ
) {
204 pred
->successors
[0] = pred
->successors
[1];
205 pred
->successors
[1] = NULL
;
207 assert(pred
->successors
[1] == succ
);
208 pred
->successors
[1] = NULL
;
211 struct set_entry
*entry
= _mesa_set_search(succ
->predecessors
,
212 _mesa_hash_pointer(pred
), pred
);
216 _mesa_set_remove(succ
->predecessors
, entry
);
220 unlink_block_successors(nir_block
*block
)
222 if (block
->successors
[0] != NULL
)
223 unlink_blocks(block
, block
->successors
[0]);
224 if (block
->successors
[1] != NULL
)
225 unlink_blocks(block
, block
->successors
[1]);
230 nir_function_impl_create(nir_function_overload
*overload
)
232 assert(overload
->impl
== NULL
);
234 void *mem_ctx
= ralloc_parent(overload
);
236 nir_function_impl
*impl
= ralloc(mem_ctx
, nir_function_impl
);
238 overload
->impl
= impl
;
239 impl
->overload
= overload
;
241 cf_init(&impl
->cf_node
, nir_cf_node_function
);
243 exec_list_make_empty(&impl
->body
);
244 exec_list_make_empty(&impl
->registers
);
245 exec_list_make_empty(&impl
->locals
);
246 impl
->num_params
= 0;
248 impl
->return_var
= NULL
;
251 impl
->valid_metadata
= nir_metadata_none
;
253 /* create start & end blocks */
254 nir_block
*start_block
= nir_block_create(mem_ctx
);
255 nir_block
*end_block
= nir_block_create(mem_ctx
);
256 start_block
->cf_node
.parent
= &impl
->cf_node
;
257 end_block
->cf_node
.parent
= &impl
->cf_node
;
258 impl
->start_block
= start_block
;
259 impl
->end_block
= end_block
;
261 exec_list_push_tail(&impl
->body
, &start_block
->cf_node
.node
);
263 start_block
->successors
[0] = end_block
;
264 block_add_pred(end_block
, start_block
);
270 nir_block_create(void *mem_ctx
)
272 nir_block
*block
= ralloc(mem_ctx
, nir_block
);
274 cf_init(&block
->cf_node
, nir_cf_node_block
);
276 block
->successors
[0] = block
->successors
[1] = NULL
;
277 block
->predecessors
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
278 block
->imm_dom
= NULL
;
279 block
->dom_frontier
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
281 exec_list_make_empty(&block
->instr_list
);
287 src_init(nir_src
*src
)
291 src
->reg
.indirect
= NULL
;
292 src
->reg
.base_offset
= 0;
296 nir_if_create(void *mem_ctx
)
298 nir_if
*if_stmt
= ralloc(mem_ctx
, nir_if
);
300 cf_init(&if_stmt
->cf_node
, nir_cf_node_if
);
301 src_init(&if_stmt
->condition
);
303 nir_block
*then
= nir_block_create(mem_ctx
);
304 exec_list_make_empty(&if_stmt
->then_list
);
305 exec_list_push_tail(&if_stmt
->then_list
, &then
->cf_node
.node
);
306 then
->cf_node
.parent
= &if_stmt
->cf_node
;
308 nir_block
*else_stmt
= nir_block_create(mem_ctx
);
309 exec_list_make_empty(&if_stmt
->else_list
);
310 exec_list_push_tail(&if_stmt
->else_list
, &else_stmt
->cf_node
.node
);
311 else_stmt
->cf_node
.parent
= &if_stmt
->cf_node
;
317 nir_loop_create(void *mem_ctx
)
319 nir_loop
*loop
= ralloc(mem_ctx
, nir_loop
);
321 cf_init(&loop
->cf_node
, nir_cf_node_loop
);
323 nir_block
*body
= nir_block_create(mem_ctx
);
324 exec_list_make_empty(&loop
->body
);
325 exec_list_push_tail(&loop
->body
, &body
->cf_node
.node
);
326 body
->cf_node
.parent
= &loop
->cf_node
;
328 body
->successors
[0] = body
;
329 block_add_pred(body
, body
);
335 instr_init(nir_instr
*instr
, nir_instr_type type
)
339 exec_node_init(&instr
->node
);
343 dest_init(nir_dest
*dest
)
345 dest
->is_ssa
= false;
346 dest
->reg
.reg
= NULL
;
347 dest
->reg
.indirect
= NULL
;
348 dest
->reg
.base_offset
= 0;
352 alu_dest_init(nir_alu_dest
*dest
)
354 dest_init(&dest
->dest
);
355 dest
->saturate
= false;
356 dest
->write_mask
= 0xf;
360 alu_src_init(nir_alu_src
*src
)
363 src
->abs
= src
->negate
= false;
371 nir_alu_instr_create(void *mem_ctx
, nir_op op
)
373 unsigned num_srcs
= nir_op_infos
[op
].num_inputs
;
374 nir_alu_instr
*instr
=
376 sizeof(nir_alu_instr
) + num_srcs
* sizeof(nir_alu_src
));
378 instr_init(&instr
->instr
, nir_instr_type_alu
);
380 alu_dest_init(&instr
->dest
);
381 for (unsigned i
= 0; i
< num_srcs
; i
++)
382 alu_src_init(&instr
->src
[i
]);
388 nir_jump_instr_create(void *mem_ctx
, nir_jump_type type
)
390 nir_jump_instr
*instr
= ralloc(mem_ctx
, nir_jump_instr
);
391 instr_init(&instr
->instr
, nir_instr_type_jump
);
396 nir_load_const_instr
*
397 nir_load_const_instr_create(void *mem_ctx
, unsigned num_components
)
399 nir_load_const_instr
*instr
= ralloc(mem_ctx
, nir_load_const_instr
);
400 instr_init(&instr
->instr
, nir_instr_type_load_const
);
402 nir_ssa_def_init(&instr
->instr
, &instr
->def
, num_components
, NULL
);
407 nir_intrinsic_instr
*
408 nir_intrinsic_instr_create(void *mem_ctx
, nir_intrinsic_op op
)
410 unsigned num_srcs
= nir_intrinsic_infos
[op
].num_srcs
;
411 nir_intrinsic_instr
*instr
=
413 sizeof(nir_intrinsic_instr
) + num_srcs
* sizeof(nir_src
));
415 instr_init(&instr
->instr
, nir_instr_type_intrinsic
);
416 instr
->intrinsic
= op
;
418 if (nir_intrinsic_infos
[op
].has_dest
)
419 dest_init(&instr
->dest
);
421 for (unsigned i
= 0; i
< num_srcs
; i
++)
422 src_init(&instr
->src
[i
]);
428 nir_call_instr_create(void *mem_ctx
, nir_function_overload
*callee
)
430 nir_call_instr
*instr
= ralloc(mem_ctx
, nir_call_instr
);
431 instr_init(&instr
->instr
, nir_instr_type_call
);
433 instr
->callee
= callee
;
434 instr
->num_params
= callee
->num_params
;
435 instr
->params
= ralloc_array(mem_ctx
, nir_deref_var
*, instr
->num_params
);
436 instr
->return_deref
= NULL
;
442 nir_tex_instr_create(void *mem_ctx
, unsigned num_srcs
)
444 nir_tex_instr
*instr
= ralloc(mem_ctx
, nir_tex_instr
);
445 instr_init(&instr
->instr
, nir_instr_type_tex
);
447 dest_init(&instr
->dest
);
449 instr
->num_srcs
= num_srcs
;
450 for (unsigned i
= 0; i
< 4; i
++)
451 src_init(&instr
->src
[i
]);
453 instr
->sampler_index
= 0;
454 instr
->sampler_array_size
= 0;
455 instr
->sampler
= NULL
;
461 nir_phi_instr_create(void *mem_ctx
)
463 nir_phi_instr
*instr
= ralloc(mem_ctx
, nir_phi_instr
);
464 instr_init(&instr
->instr
, nir_instr_type_phi
);
466 dest_init(&instr
->dest
);
467 exec_list_make_empty(&instr
->srcs
);
471 nir_parallel_copy_instr
*
472 nir_parallel_copy_instr_create(void *mem_ctx
)
474 nir_parallel_copy_instr
*instr
= ralloc(mem_ctx
, nir_parallel_copy_instr
);
475 instr_init(&instr
->instr
, nir_instr_type_parallel_copy
);
477 instr
->at_end
= false;
478 exec_list_make_empty(&instr
->copies
);
483 nir_ssa_undef_instr
*
484 nir_ssa_undef_instr_create(void *mem_ctx
, unsigned num_components
)
486 nir_ssa_undef_instr
*instr
= ralloc(mem_ctx
, nir_ssa_undef_instr
);
487 instr_init(&instr
->instr
, nir_instr_type_ssa_undef
);
489 nir_ssa_def_init(&instr
->instr
, &instr
->def
, num_components
, NULL
);
495 nir_deref_var_create(void *mem_ctx
, nir_variable
*var
)
497 nir_deref_var
*deref
= ralloc(mem_ctx
, nir_deref_var
);
498 deref
->deref
.deref_type
= nir_deref_type_var
;
499 deref
->deref
.child
= NULL
;
500 deref
->deref
.type
= var
->type
;
506 nir_deref_array_create(void *mem_ctx
)
508 nir_deref_array
*deref
= ralloc(mem_ctx
, nir_deref_array
);
509 deref
->deref
.deref_type
= nir_deref_type_array
;
510 deref
->deref
.child
= NULL
;
511 deref
->deref_array_type
= nir_deref_array_type_direct
;
512 src_init(&deref
->indirect
);
513 deref
->base_offset
= 0;
518 nir_deref_struct_create(void *mem_ctx
, unsigned field_index
)
520 nir_deref_struct
*deref
= ralloc(mem_ctx
, nir_deref_struct
);
521 deref
->deref
.deref_type
= nir_deref_type_struct
;
522 deref
->deref
.child
= NULL
;
523 deref
->index
= field_index
;
527 static nir_deref_var
*
528 copy_deref_var(void *mem_ctx
, nir_deref_var
*deref
)
530 nir_deref_var
*ret
= nir_deref_var_create(mem_ctx
, deref
->var
);
531 ret
->deref
.type
= deref
->deref
.type
;
532 if (deref
->deref
.child
)
533 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
537 static nir_deref_array
*
538 copy_deref_array(void *mem_ctx
, nir_deref_array
*deref
)
540 nir_deref_array
*ret
= nir_deref_array_create(mem_ctx
);
541 ret
->base_offset
= deref
->base_offset
;
542 ret
->deref_array_type
= deref
->deref_array_type
;
543 if (deref
->deref_array_type
== nir_deref_array_type_indirect
) {
544 ret
->indirect
= nir_src_copy(deref
->indirect
, mem_ctx
);
546 ret
->deref
.type
= deref
->deref
.type
;
547 if (deref
->deref
.child
)
548 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
552 static nir_deref_struct
*
553 copy_deref_struct(void *mem_ctx
, nir_deref_struct
*deref
)
555 nir_deref_struct
*ret
= nir_deref_struct_create(mem_ctx
, deref
->index
);
556 ret
->deref
.type
= deref
->deref
.type
;
557 if (deref
->deref
.child
)
558 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
563 nir_copy_deref(void *mem_ctx
, nir_deref
*deref
)
565 switch (deref
->deref_type
) {
566 case nir_deref_type_var
:
567 return ©_deref_var(mem_ctx
, nir_deref_as_var(deref
))->deref
;
568 case nir_deref_type_array
:
569 return ©_deref_array(mem_ctx
, nir_deref_as_array(deref
))->deref
;
570 case nir_deref_type_struct
:
571 return ©_deref_struct(mem_ctx
, nir_deref_as_struct(deref
))->deref
;
573 unreachable("Invalid dereference type");
581 * \name Control flow modification
583 * These functions modify the control flow tree while keeping the control flow
584 * graph up-to-date. The invariants respected are:
585 * 1. Each then statement, else statement, or loop body must have at least one
587 * 2. Each if-statement and loop must have one basic block before it and one
589 * 3. Two basic blocks cannot be directly next to each other.
590 * 4. If a basic block has a jump instruction, there must be only one and it
591 * must be at the end of the block.
592 * 5. The CFG must always be connected - this means that we must insert a fake
593 * CFG edge for loops with no break statement.
595 * The purpose of the second one is so that we have places to insert code during
596 * GCM, as well as eliminating the possibility of critical edges.
601 link_non_block_to_block(nir_cf_node
*node
, nir_block
*block
)
603 if (node
->type
== nir_cf_node_if
) {
605 * We're trying to link an if to a block after it; this just means linking
606 * the last block of the then and else branches.
609 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
611 nir_cf_node
*last_then
= nir_if_last_then_node(if_stmt
);
612 assert(last_then
->type
== nir_cf_node_block
);
613 nir_block
*last_then_block
= nir_cf_node_as_block(last_then
);
615 nir_cf_node
*last_else
= nir_if_last_else_node(if_stmt
);
616 assert(last_else
->type
== nir_cf_node_block
);
617 nir_block
*last_else_block
= nir_cf_node_as_block(last_else
);
619 if (exec_list_is_empty(&last_then_block
->instr_list
) ||
620 nir_block_last_instr(last_then_block
)->type
!= nir_instr_type_jump
) {
621 unlink_block_successors(last_then_block
);
622 link_blocks(last_then_block
, block
, NULL
);
625 if (exec_list_is_empty(&last_else_block
->instr_list
) ||
626 nir_block_last_instr(last_else_block
)->type
!= nir_instr_type_jump
) {
627 unlink_block_successors(last_else_block
);
628 link_blocks(last_else_block
, block
, NULL
);
631 assert(node
->type
== nir_cf_node_loop
);
634 * We can only get to this codepath if we're inserting a new loop, or
635 * at least a loop with no break statements; we can't insert break
636 * statements into a loop when we haven't inserted it into the CFG
637 * because we wouldn't know which block comes after the loop
638 * and therefore, which block should be the successor of the block with
639 * the break). Therefore, we need to insert a fake edge (see invariant
643 nir_loop
*loop
= nir_cf_node_as_loop(node
);
645 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
646 assert(last
->type
== nir_cf_node_block
);
647 nir_block
*last_block
= nir_cf_node_as_block(last
);
649 last_block
->successors
[1] = block
;
650 block_add_pred(block
, last_block
);
655 link_block_to_non_block(nir_block
*block
, nir_cf_node
*node
)
657 if (node
->type
== nir_cf_node_if
) {
659 * We're trying to link a block to an if after it; this just means linking
660 * the block to the first block of the then and else branches.
663 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
665 nir_cf_node
*first_then
= nir_if_first_then_node(if_stmt
);
666 assert(first_then
->type
== nir_cf_node_block
);
667 nir_block
*first_then_block
= nir_cf_node_as_block(first_then
);
669 nir_cf_node
*first_else
= nir_if_first_else_node(if_stmt
);
670 assert(first_else
->type
== nir_cf_node_block
);
671 nir_block
*first_else_block
= nir_cf_node_as_block(first_else
);
673 unlink_block_successors(block
);
674 link_blocks(block
, first_then_block
, first_else_block
);
677 * For similar reasons as the corresponding case in
678 * link_non_block_to_block(), don't worry about if the loop header has
679 * any predecessors that need to be unlinked.
682 assert(node
->type
== nir_cf_node_loop
);
684 nir_loop
*loop
= nir_cf_node_as_loop(node
);
686 nir_cf_node
*loop_header
= nir_loop_first_cf_node(loop
);
687 assert(loop_header
->type
== nir_cf_node_block
);
688 nir_block
*loop_header_block
= nir_cf_node_as_block(loop_header
);
690 unlink_block_successors(block
);
691 link_blocks(block
, loop_header_block
, NULL
);
697 * Takes a basic block and inserts a new empty basic block before it, making its
698 * predecessors point to the new block. This essentially splits the block into
699 * an empty header and a body so that another non-block CF node can be inserted
700 * between the two. Note that this does *not* link the two basic blocks, so
701 * some kind of cleanup *must* be performed after this call.
705 split_block_beginning(nir_block
*block
)
707 nir_block
*new_block
= nir_block_create(ralloc_parent(block
));
708 new_block
->cf_node
.parent
= block
->cf_node
.parent
;
709 exec_node_insert_node_before(&block
->cf_node
.node
, &new_block
->cf_node
.node
);
711 struct set_entry
*entry
;
712 set_foreach(block
->predecessors
, entry
) {
713 nir_block
*pred
= (nir_block
*) entry
->key
;
715 unlink_blocks(pred
, block
);
716 link_blocks(pred
, new_block
, NULL
);
723 rewrite_phi_preds(nir_block
*block
, nir_block
*old_pred
, nir_block
*new_pred
)
725 nir_foreach_instr_safe(block
, instr
) {
726 if (instr
->type
!= nir_instr_type_phi
)
729 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
730 foreach_list_typed_safe(nir_phi_src
, src
, node
, &phi
->srcs
) {
731 if (src
->pred
== old_pred
) {
732 src
->pred
= new_pred
;
740 * Moves the successors of source to the successors of dest, leaving both
741 * successors of source NULL.
745 move_successors(nir_block
*source
, nir_block
*dest
)
747 nir_block
*succ1
= source
->successors
[0];
748 nir_block
*succ2
= source
->successors
[1];
751 unlink_blocks(source
, succ1
);
752 rewrite_phi_preds(succ1
, source
, dest
);
756 unlink_blocks(source
, succ2
);
757 rewrite_phi_preds(succ2
, source
, dest
);
760 unlink_block_successors(dest
);
761 link_blocks(dest
, succ1
, succ2
);
765 split_block_end(nir_block
*block
)
767 nir_block
*new_block
= nir_block_create(ralloc_parent(block
));
768 new_block
->cf_node
.parent
= block
->cf_node
.parent
;
769 exec_node_insert_after(&block
->cf_node
.node
, &new_block
->cf_node
.node
);
771 move_successors(block
, new_block
);
777 * Inserts a non-basic block between two basic blocks and links them together.
781 insert_non_block(nir_block
*before
, nir_cf_node
*node
, nir_block
*after
)
783 node
->parent
= before
->cf_node
.parent
;
784 exec_node_insert_after(&before
->cf_node
.node
, &node
->node
);
785 link_block_to_non_block(before
, node
);
786 link_non_block_to_block(node
, after
);
790 * Inserts a non-basic block before a basic block.
794 insert_non_block_before_block(nir_cf_node
*node
, nir_block
*block
)
796 /* split off the beginning of block into new_block */
797 nir_block
*new_block
= split_block_beginning(block
);
799 /* insert our node in between new_block and block */
800 insert_non_block(new_block
, node
, block
);
804 insert_non_block_after_block(nir_block
*block
, nir_cf_node
*node
)
806 /* split off the end of block into new_block */
807 nir_block
*new_block
= split_block_end(block
);
809 /* insert our node in between block and new_block */
810 insert_non_block(block
, node
, new_block
);
813 /* walk up the control flow tree to find the innermost enclosed loop */
815 nearest_loop(nir_cf_node
*node
)
817 while (node
->type
!= nir_cf_node_loop
) {
821 return nir_cf_node_as_loop(node
);
825 nir_cf_node_get_function(nir_cf_node
*node
)
827 while (node
->type
!= nir_cf_node_function
) {
831 return nir_cf_node_as_function(node
);
835 * update the CFG after a jump instruction has been added to the end of a block
839 handle_jump(nir_block
*block
)
841 nir_instr
*instr
= nir_block_last_instr(block
);
842 nir_jump_instr
*jump_instr
= nir_instr_as_jump(instr
);
844 unlink_block_successors(block
);
846 nir_function_impl
*impl
= nir_cf_node_get_function(&block
->cf_node
);
847 nir_metadata_preserve(impl
, nir_metadata_none
);
849 if (jump_instr
->type
== nir_jump_break
||
850 jump_instr
->type
== nir_jump_continue
) {
851 nir_loop
*loop
= nearest_loop(&block
->cf_node
);
853 if (jump_instr
->type
== nir_jump_continue
) {
854 nir_cf_node
*first_node
= nir_loop_first_cf_node(loop
);
855 assert(first_node
->type
== nir_cf_node_block
);
856 nir_block
*first_block
= nir_cf_node_as_block(first_node
);
857 link_blocks(block
, first_block
, NULL
);
859 nir_cf_node
*after
= nir_cf_node_next(&loop
->cf_node
);
860 assert(after
->type
== nir_cf_node_block
);
861 nir_block
*after_block
= nir_cf_node_as_block(after
);
862 link_blocks(block
, after_block
, NULL
);
864 /* If we inserted a fake link, remove it */
865 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
866 assert(last
->type
== nir_cf_node_block
);
867 nir_block
*last_block
= nir_cf_node_as_block(last
);
868 if (last_block
->successors
[1] != NULL
)
869 unlink_blocks(last_block
, after_block
);
872 assert(jump_instr
->type
== nir_jump_return
);
873 link_blocks(block
, impl
->end_block
, NULL
);
878 handle_remove_jump(nir_block
*block
, nir_jump_type type
)
880 unlink_block_successors(block
);
882 if (exec_node_is_tail_sentinel(block
->cf_node
.node
.next
)) {
883 nir_cf_node
*parent
= block
->cf_node
.parent
;
884 if (parent
->type
== nir_cf_node_if
) {
885 nir_cf_node
*next
= nir_cf_node_next(parent
);
886 assert(next
->type
== nir_cf_node_block
);
887 nir_block
*next_block
= nir_cf_node_as_block(next
);
889 link_blocks(block
, next_block
, NULL
);
891 assert(parent
->type
== nir_cf_node_loop
);
892 nir_loop
*loop
= nir_cf_node_as_loop(parent
);
894 nir_cf_node
*head
= nir_loop_first_cf_node(loop
);
895 assert(head
->type
== nir_cf_node_block
);
896 nir_block
*head_block
= nir_cf_node_as_block(head
);
898 link_blocks(block
, head_block
, NULL
);
901 nir_cf_node
*next
= nir_cf_node_next(&block
->cf_node
);
902 if (next
->type
== nir_cf_node_if
) {
903 nir_if
*next_if
= nir_cf_node_as_if(next
);
905 nir_cf_node
*first_then
= nir_if_first_then_node(next_if
);
906 assert(first_then
->type
== nir_cf_node_block
);
907 nir_block
*first_then_block
= nir_cf_node_as_block(first_then
);
909 nir_cf_node
*first_else
= nir_if_first_else_node(next_if
);
910 assert(first_else
->type
== nir_cf_node_block
);
911 nir_block
*first_else_block
= nir_cf_node_as_block(first_else
);
913 link_blocks(block
, first_then_block
, first_else_block
);
915 assert(next
->type
== nir_cf_node_loop
);
916 nir_loop
*next_loop
= nir_cf_node_as_loop(next
);
918 nir_cf_node
*first
= nir_loop_first_cf_node(next_loop
);
919 assert(first
->type
== nir_cf_node_block
);
920 nir_block
*first_block
= nir_cf_node_as_block(first
);
922 link_blocks(block
, first_block
, NULL
);
926 if (type
== nir_jump_break
) {
927 nir_loop
*loop
= nearest_loop(&block
->cf_node
);
929 nir_cf_node
*next
= nir_cf_node_next(&loop
->cf_node
);
930 assert(next
->type
== nir_cf_node_block
);
931 nir_block
*next_block
= nir_cf_node_as_block(next
);
933 if (next_block
->predecessors
->entries
== 0) {
934 /* insert fake link */
935 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
936 assert(last
->type
== nir_cf_node_block
);
937 nir_block
*last_block
= nir_cf_node_as_block(last
);
939 last_block
->successors
[1] = next_block
;
940 block_add_pred(next_block
, last_block
);
944 nir_function_impl
*impl
= nir_cf_node_get_function(&block
->cf_node
);
945 nir_metadata_preserve(impl
, nir_metadata_none
);
949 * Inserts a basic block before another by merging the instructions.
951 * @param block the target of the insertion
952 * @param before the block to be inserted - must not have been inserted before
953 * @param has_jump whether \before has a jump instruction at the end
957 insert_block_before_block(nir_block
*block
, nir_block
*before
, bool has_jump
)
959 assert(!has_jump
|| exec_list_is_empty(&block
->instr_list
));
961 foreach_list_typed(nir_instr
, instr
, node
, &before
->instr_list
) {
962 instr
->block
= block
;
965 exec_list_prepend(&block
->instr_list
, &before
->instr_list
);
972 * Inserts a basic block after another by merging the instructions.
974 * @param block the target of the insertion
975 * @param after the block to be inserted - must not have been inserted before
976 * @param has_jump whether \after has a jump instruction at the end
980 insert_block_after_block(nir_block
*block
, nir_block
*after
, bool has_jump
)
982 foreach_list_typed(nir_instr
, instr
, node
, &after
->instr_list
) {
983 instr
->block
= block
;
986 exec_list_append(&block
->instr_list
, &after
->instr_list
);
993 update_if_uses(nir_cf_node
*node
)
995 if (node
->type
!= nir_cf_node_if
)
998 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
1000 struct set
*if_uses_set
= if_stmt
->condition
.is_ssa
?
1001 if_stmt
->condition
.ssa
->if_uses
:
1002 if_stmt
->condition
.reg
.reg
->uses
;
1004 _mesa_set_add(if_uses_set
, _mesa_hash_pointer(if_stmt
), if_stmt
);
1008 nir_cf_node_insert_after(nir_cf_node
*node
, nir_cf_node
*after
)
1010 update_if_uses(after
);
1012 if (after
->type
== nir_cf_node_block
) {
1014 * either node or the one after it must be a basic block, by invariant #2;
1015 * in either case, just merge the blocks together.
1017 nir_block
*after_block
= nir_cf_node_as_block(after
);
1019 bool has_jump
= !exec_list_is_empty(&after_block
->instr_list
) &&
1020 nir_block_last_instr(after_block
)->type
== nir_instr_type_jump
;
1022 if (node
->type
== nir_cf_node_block
) {
1023 insert_block_after_block(nir_cf_node_as_block(node
), after_block
,
1026 nir_cf_node
*next
= nir_cf_node_next(node
);
1027 assert(next
->type
== nir_cf_node_block
);
1028 nir_block
*next_block
= nir_cf_node_as_block(next
);
1030 insert_block_before_block(next_block
, after_block
, has_jump
);
1033 if (node
->type
== nir_cf_node_block
) {
1034 insert_non_block_after_block(nir_cf_node_as_block(node
), after
);
1037 * We have to insert a non-basic block after a non-basic block. Since
1038 * every non-basic block has a basic block after it, this is equivalent
1039 * to inserting a non-basic block before a basic block.
1042 nir_cf_node
*next
= nir_cf_node_next(node
);
1043 assert(next
->type
== nir_cf_node_block
);
1044 nir_block
*next_block
= nir_cf_node_as_block(next
);
1046 insert_non_block_before_block(after
, next_block
);
1050 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1051 nir_metadata_preserve(impl
, nir_metadata_none
);
1055 nir_cf_node_insert_before(nir_cf_node
*node
, nir_cf_node
*before
)
1057 update_if_uses(before
);
1059 if (before
->type
== nir_cf_node_block
) {
1060 nir_block
*before_block
= nir_cf_node_as_block(before
);
1062 bool has_jump
= !exec_list_is_empty(&before_block
->instr_list
) &&
1063 nir_block_last_instr(before_block
)->type
== nir_instr_type_jump
;
1065 if (node
->type
== nir_cf_node_block
) {
1066 insert_block_before_block(nir_cf_node_as_block(node
), before_block
,
1069 nir_cf_node
*prev
= nir_cf_node_prev(node
);
1070 assert(prev
->type
== nir_cf_node_block
);
1071 nir_block
*prev_block
= nir_cf_node_as_block(prev
);
1073 insert_block_after_block(prev_block
, before_block
, has_jump
);
1076 if (node
->type
== nir_cf_node_block
) {
1077 insert_non_block_before_block(before
, nir_cf_node_as_block(node
));
1080 * We have to insert a non-basic block before a non-basic block. This
1081 * is equivalent to inserting a non-basic block after a basic block.
1084 nir_cf_node
*prev_node
= nir_cf_node_prev(node
);
1085 assert(prev_node
->type
== nir_cf_node_block
);
1086 nir_block
*prev_block
= nir_cf_node_as_block(prev_node
);
1088 insert_non_block_after_block(prev_block
, before
);
1092 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1093 nir_metadata_preserve(impl
, nir_metadata_none
);
1097 nir_cf_node_insert_begin(struct exec_list
*list
, nir_cf_node
*node
)
1099 nir_cf_node
*begin
= exec_node_data(nir_cf_node
, list
->head
, node
);
1100 nir_cf_node_insert_before(begin
, node
);
1104 nir_cf_node_insert_end(struct exec_list
*list
, nir_cf_node
*node
)
1106 nir_cf_node
*end
= exec_node_data(nir_cf_node
, list
->tail_pred
, node
);
1107 nir_cf_node_insert_after(end
, node
);
1111 * Stitch two basic blocks together into one. The aggregate must have the same
1112 * predecessors as the first and the same successors as the second.
1116 stitch_blocks(nir_block
*before
, nir_block
*after
)
1119 * We move after into before, so we have to deal with up to 2 successors vs.
1120 * possibly a large number of predecessors.
1122 * TODO: special case when before is empty and after isn't?
1125 move_successors(after
, before
);
1127 foreach_list_typed(nir_instr
, instr
, node
, &after
->instr_list
) {
1128 instr
->block
= before
;
1131 exec_list_append(&before
->instr_list
, &after
->instr_list
);
1132 exec_node_remove(&after
->cf_node
.node
);
1136 nir_cf_node_remove(nir_cf_node
*node
)
1138 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1139 nir_metadata_preserve(impl
, nir_metadata_none
);
1141 if (node
->type
== nir_cf_node_block
) {
1143 * Basic blocks can't really be removed by themselves, since they act as
1144 * padding between the non-basic blocks. So all we do here is empty the
1145 * block of instructions.
1147 * TODO: could we assert here?
1149 exec_list_make_empty(&nir_cf_node_as_block(node
)->instr_list
);
1151 nir_cf_node
*before
= nir_cf_node_prev(node
);
1152 assert(before
->type
== nir_cf_node_block
);
1153 nir_block
*before_block
= nir_cf_node_as_block(before
);
1155 nir_cf_node
*after
= nir_cf_node_next(node
);
1156 assert(after
->type
== nir_cf_node_block
);
1157 nir_block
*after_block
= nir_cf_node_as_block(after
);
1159 exec_node_remove(&node
->node
);
1160 stitch_blocks(before_block
, after_block
);
1165 add_use_cb(nir_src
*src
, void *state
)
1167 nir_instr
*instr
= (nir_instr
*) state
;
1169 struct set
*uses_set
= src
->is_ssa
? src
->ssa
->uses
: src
->reg
.reg
->uses
;
1171 _mesa_set_add(uses_set
, _mesa_hash_pointer(instr
), instr
);
1177 add_ssa_def_cb(nir_ssa_def
*def
, void *state
)
1179 nir_instr
*instr
= (nir_instr
*) state
;
1181 if (instr
->block
&& def
->index
== UINT_MAX
) {
1182 nir_function_impl
*impl
=
1183 nir_cf_node_get_function(&instr
->block
->cf_node
);
1185 def
->index
= impl
->ssa_alloc
++;
1192 add_reg_def_cb(nir_dest
*dest
, void *state
)
1194 nir_instr
*instr
= (nir_instr
*) state
;
1197 _mesa_set_add(dest
->reg
.reg
->defs
, _mesa_hash_pointer(instr
), instr
);
1203 add_defs_uses(nir_instr
*instr
)
1205 nir_foreach_src(instr
, add_use_cb
, instr
);
1206 nir_foreach_dest(instr
, add_reg_def_cb
, instr
);
1207 nir_foreach_ssa_def(instr
, add_ssa_def_cb
, instr
);
1211 nir_instr_insert_before(nir_instr
*instr
, nir_instr
*before
)
1213 assert(before
->type
!= nir_instr_type_jump
);
1214 before
->block
= instr
->block
;
1215 add_defs_uses(before
);
1216 exec_node_insert_node_before(&instr
->node
, &before
->node
);
1220 nir_instr_insert_after(nir_instr
*instr
, nir_instr
*after
)
1222 if (after
->type
== nir_instr_type_jump
) {
1223 assert(instr
== nir_block_last_instr(instr
->block
));
1224 assert(instr
->type
!= nir_instr_type_jump
);
1227 after
->block
= instr
->block
;
1228 add_defs_uses(after
);
1229 exec_node_insert_after(&instr
->node
, &after
->node
);
1231 if (after
->type
== nir_instr_type_jump
)
1232 handle_jump(after
->block
);
1236 nir_instr_insert_before_block(nir_block
*block
, nir_instr
*before
)
1238 if (before
->type
== nir_instr_type_jump
)
1239 assert(exec_list_is_empty(&block
->instr_list
));
1241 before
->block
= block
;
1242 add_defs_uses(before
);
1243 exec_list_push_head(&block
->instr_list
, &before
->node
);
1245 if (before
->type
== nir_instr_type_jump
)
1250 nir_instr_insert_after_block(nir_block
*block
, nir_instr
*after
)
1252 if (after
->type
== nir_instr_type_jump
) {
1253 assert(exec_list_is_empty(&block
->instr_list
) ||
1254 nir_block_last_instr(block
)->type
!= nir_instr_type_jump
);
1257 after
->block
= block
;
1258 add_defs_uses(after
);
1259 exec_list_push_tail(&block
->instr_list
, &after
->node
);
1261 if (after
->type
== nir_instr_type_jump
)
1266 nir_instr_insert_before_cf(nir_cf_node
*node
, nir_instr
*before
)
1268 if (node
->type
== nir_cf_node_block
) {
1269 nir_instr_insert_before_block(nir_cf_node_as_block(node
), before
);
1271 nir_cf_node
*prev
= nir_cf_node_prev(node
);
1272 assert(prev
->type
== nir_cf_node_block
);
1273 nir_block
*prev_block
= nir_cf_node_as_block(prev
);
1275 nir_instr_insert_before_block(prev_block
, before
);
1280 nir_instr_insert_after_cf(nir_cf_node
*node
, nir_instr
*after
)
1282 if (node
->type
== nir_cf_node_block
) {
1283 nir_instr_insert_after_block(nir_cf_node_as_block(node
), after
);
1285 nir_cf_node
*next
= nir_cf_node_next(node
);
1286 assert(next
->type
== nir_cf_node_block
);
1287 nir_block
*next_block
= nir_cf_node_as_block(next
);
1289 nir_instr_insert_before_block(next_block
, after
);
1294 nir_instr_insert_before_cf_list(struct exec_list
*list
, nir_instr
*before
)
1296 nir_cf_node
*first_node
= exec_node_data(nir_cf_node
,
1297 exec_list_get_head(list
), node
);
1298 nir_instr_insert_before_cf(first_node
, before
);
1302 nir_instr_insert_after_cf_list(struct exec_list
*list
, nir_instr
*after
)
1304 nir_cf_node
*last_node
= exec_node_data(nir_cf_node
,
1305 exec_list_get_tail(list
), node
);
1306 nir_instr_insert_after_cf(last_node
, after
);
1310 remove_use_cb(nir_src
*src
, void *state
)
1312 nir_instr
*instr
= (nir_instr
*) state
;
1314 struct set
*uses_set
= src
->is_ssa
? src
->ssa
->uses
: src
->reg
.reg
->uses
;
1316 struct set_entry
*entry
= _mesa_set_search(uses_set
,
1317 _mesa_hash_pointer(instr
),
1320 _mesa_set_remove(uses_set
, entry
);
1326 remove_def_cb(nir_dest
*dest
, void *state
)
1328 nir_instr
*instr
= (nir_instr
*) state
;
1333 nir_register
*reg
= dest
->reg
.reg
;
1335 struct set_entry
*entry
= _mesa_set_search(reg
->defs
,
1336 _mesa_hash_pointer(instr
),
1339 _mesa_set_remove(reg
->defs
, entry
);
1345 remove_defs_uses(nir_instr
*instr
)
1347 nir_foreach_dest(instr
, remove_def_cb
, instr
);
1348 nir_foreach_src(instr
, remove_use_cb
, instr
);
1351 void nir_instr_remove(nir_instr
*instr
)
1353 remove_defs_uses(instr
);
1354 exec_node_remove(&instr
->node
);
1356 if (instr
->type
== nir_instr_type_jump
) {
1357 nir_jump_instr
*jump_instr
= nir_instr_as_jump(instr
);
1358 handle_remove_jump(instr
->block
, jump_instr
->type
);
1365 nir_index_local_regs(nir_function_impl
*impl
)
1368 foreach_list_typed(nir_register
, reg
, node
, &impl
->registers
) {
1369 reg
->index
= index
++;
1371 impl
->reg_alloc
= index
;
1375 nir_index_global_regs(nir_shader
*shader
)
1378 foreach_list_typed(nir_register
, reg
, node
, &shader
->registers
) {
1379 reg
->index
= index
++;
1381 shader
->reg_alloc
= index
;
1385 visit_alu_dest(nir_alu_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1387 return cb(&instr
->dest
.dest
, state
);
1391 visit_intrinsic_dest(nir_intrinsic_instr
*instr
, nir_foreach_dest_cb cb
,
1394 if (nir_intrinsic_infos
[instr
->intrinsic
].has_dest
)
1395 return cb(&instr
->dest
, state
);
1401 visit_texture_dest(nir_tex_instr
*instr
, nir_foreach_dest_cb cb
,
1404 return cb(&instr
->dest
, state
);
1408 visit_phi_dest(nir_phi_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1410 return cb(&instr
->dest
, state
);
1414 visit_parallel_copy_dest(nir_parallel_copy_instr
*instr
,
1415 nir_foreach_dest_cb cb
, void *state
)
1417 foreach_list_typed(nir_parallel_copy_copy
, copy
, node
, &instr
->copies
) {
1418 if (!cb(©
->dest
, state
))
1426 nir_foreach_dest(nir_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1428 switch (instr
->type
) {
1429 case nir_instr_type_alu
:
1430 return visit_alu_dest(nir_instr_as_alu(instr
), cb
, state
);
1431 case nir_instr_type_intrinsic
:
1432 return visit_intrinsic_dest(nir_instr_as_intrinsic(instr
), cb
, state
);
1433 case nir_instr_type_tex
:
1434 return visit_texture_dest(nir_instr_as_tex(instr
), cb
, state
);
1435 case nir_instr_type_phi
:
1436 return visit_phi_dest(nir_instr_as_phi(instr
), cb
, state
);
1437 case nir_instr_type_parallel_copy
:
1438 return visit_parallel_copy_dest(nir_instr_as_parallel_copy(instr
),
1441 case nir_instr_type_load_const
:
1442 case nir_instr_type_ssa_undef
:
1443 case nir_instr_type_call
:
1444 case nir_instr_type_jump
:
1448 unreachable("Invalid instruction type");
1455 struct foreach_ssa_def_state
{
1456 nir_foreach_ssa_def_cb cb
;
1461 nir_ssa_def_visitor(nir_dest
*dest
, void *void_state
)
1463 struct foreach_ssa_def_state
*state
= void_state
;
1466 return state
->cb(&dest
->ssa
, state
->client_state
);
1472 nir_foreach_ssa_def(nir_instr
*instr
, nir_foreach_ssa_def_cb cb
, void *state
)
1474 switch (instr
->type
) {
1475 case nir_instr_type_alu
:
1476 case nir_instr_type_tex
:
1477 case nir_instr_type_intrinsic
:
1478 case nir_instr_type_phi
:
1479 case nir_instr_type_parallel_copy
: {
1480 struct foreach_ssa_def_state foreach_state
= {cb
, state
};
1481 return nir_foreach_dest(instr
, nir_ssa_def_visitor
, &foreach_state
);
1484 case nir_instr_type_load_const
:
1485 return cb(&nir_instr_as_load_const(instr
)->def
, state
);
1486 case nir_instr_type_ssa_undef
:
1487 return cb(&nir_instr_as_ssa_undef(instr
)->def
, state
);
1488 case nir_instr_type_call
:
1489 case nir_instr_type_jump
:
1492 unreachable("Invalid instruction type");
1497 visit_src(nir_src
*src
, nir_foreach_src_cb cb
, void *state
)
1499 if (!cb(src
, state
))
1501 if (!src
->is_ssa
&& src
->reg
.indirect
)
1502 return cb(src
->reg
.indirect
, state
);
1507 visit_deref_array_src(nir_deref_array
*deref
, nir_foreach_src_cb cb
,
1510 if (deref
->deref_array_type
== nir_deref_array_type_indirect
)
1511 return visit_src(&deref
->indirect
, cb
, state
);
1516 visit_deref_src(nir_deref_var
*deref
, nir_foreach_src_cb cb
, void *state
)
1518 nir_deref
*cur
= &deref
->deref
;
1519 while (cur
!= NULL
) {
1520 if (cur
->deref_type
== nir_deref_type_array
)
1521 if (!visit_deref_array_src(nir_deref_as_array(cur
), cb
, state
))
1531 visit_alu_src(nir_alu_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1533 for (unsigned i
= 0; i
< nir_op_infos
[instr
->op
].num_inputs
; i
++)
1534 if (!visit_src(&instr
->src
[i
].src
, cb
, state
))
1541 visit_tex_src(nir_tex_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1543 for (unsigned i
= 0; i
< instr
->num_srcs
; i
++)
1544 if (!visit_src(&instr
->src
[i
], cb
, state
))
1547 if (instr
->sampler
!= NULL
)
1548 if (!visit_deref_src(instr
->sampler
, cb
, state
))
1555 visit_intrinsic_src(nir_intrinsic_instr
*instr
, nir_foreach_src_cb cb
,
1558 unsigned num_srcs
= nir_intrinsic_infos
[instr
->intrinsic
].num_srcs
;
1559 for (unsigned i
= 0; i
< num_srcs
; i
++)
1560 if (!visit_src(&instr
->src
[i
], cb
, state
))
1564 nir_intrinsic_infos
[instr
->intrinsic
].num_variables
;
1565 for (unsigned i
= 0; i
< num_vars
; i
++)
1566 if (!visit_deref_src(instr
->variables
[i
], cb
, state
))
1573 visit_call_src(nir_call_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1579 visit_load_const_src(nir_load_const_instr
*instr
, nir_foreach_src_cb cb
,
1586 visit_phi_src(nir_phi_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1588 foreach_list_typed(nir_phi_src
, src
, node
, &instr
->srcs
) {
1589 if (!visit_src(&src
->src
, cb
, state
))
1597 visit_parallel_copy_src(nir_parallel_copy_instr
*instr
,
1598 nir_foreach_src_cb cb
, void *state
)
1600 foreach_list_typed(nir_parallel_copy_copy
, copy
, node
, &instr
->copies
) {
1601 if (!visit_src(©
->src
, cb
, state
))
1610 nir_foreach_src_cb cb
;
1611 } visit_dest_indirect_state
;
1614 visit_dest_indirect(nir_dest
*dest
, void *_state
)
1616 visit_dest_indirect_state
*state
= (visit_dest_indirect_state
*) _state
;
1618 if (!dest
->is_ssa
&& dest
->reg
.indirect
)
1619 return state
->cb(dest
->reg
.indirect
, state
->state
);
1625 nir_foreach_src(nir_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1627 switch (instr
->type
) {
1628 case nir_instr_type_alu
:
1629 if (!visit_alu_src(nir_instr_as_alu(instr
), cb
, state
))
1632 case nir_instr_type_intrinsic
:
1633 if (!visit_intrinsic_src(nir_instr_as_intrinsic(instr
), cb
, state
))
1636 case nir_instr_type_tex
:
1637 if (!visit_tex_src(nir_instr_as_tex(instr
), cb
, state
))
1640 case nir_instr_type_call
:
1641 if (!visit_call_src(nir_instr_as_call(instr
), cb
, state
))
1644 case nir_instr_type_load_const
:
1645 if (!visit_load_const_src(nir_instr_as_load_const(instr
), cb
, state
))
1648 case nir_instr_type_phi
:
1649 if (!visit_phi_src(nir_instr_as_phi(instr
), cb
, state
))
1652 case nir_instr_type_parallel_copy
:
1653 if (!visit_parallel_copy_src(nir_instr_as_parallel_copy(instr
),
1657 case nir_instr_type_jump
:
1658 case nir_instr_type_ssa_undef
:
1662 unreachable("Invalid instruction type");
1666 visit_dest_indirect_state dest_state
;
1667 dest_state
.state
= state
;
1669 return nir_foreach_dest(instr
, visit_dest_indirect
, &dest_state
);
1673 nir_src_as_const_value(nir_src src
)
1678 if (src
.ssa
->parent_instr
->type
!= nir_instr_type_load_const
)
1681 nir_load_const_instr
*load
= nir_instr_as_load_const(src
.ssa
->parent_instr
);
1683 return &load
->value
;
1687 nir_srcs_equal(nir_src src1
, nir_src src2
)
1691 return src1
.ssa
== src2
.ssa
;
1699 if ((src1
.reg
.indirect
== NULL
) != (src2
.reg
.indirect
== NULL
))
1702 if (src1
.reg
.indirect
) {
1703 if (!nir_srcs_equal(*src1
.reg
.indirect
, *src2
.reg
.indirect
))
1707 return src1
.reg
.reg
== src2
.reg
.reg
&&
1708 src1
.reg
.base_offset
== src2
.reg
.base_offset
;
1714 src_does_not_use_def(nir_src
*src
, void *void_def
)
1716 nir_ssa_def
*def
= void_def
;
1719 return src
->ssa
!= def
;
1726 src_does_not_use_reg(nir_src
*src
, void *void_reg
)
1728 nir_register
*reg
= void_reg
;
1733 return src
->reg
.reg
!= reg
;
1738 nir_instr_rewrite_src(nir_instr
*instr
, nir_src
*src
, nir_src new_src
)
1741 nir_ssa_def
*old_ssa
= src
->ssa
;
1743 if (old_ssa
&& nir_foreach_src(instr
, src_does_not_use_def
, old_ssa
)) {
1744 struct set_entry
*entry
= _mesa_set_search(old_ssa
->uses
,
1745 _mesa_hash_pointer(instr
),
1748 _mesa_set_remove(old_ssa
->uses
, entry
);
1751 if (src
->reg
.indirect
)
1752 nir_instr_rewrite_src(instr
, src
->reg
.indirect
, new_src
);
1754 nir_register
*old_reg
= src
->reg
.reg
;
1756 if (old_reg
&& nir_foreach_src(instr
, src_does_not_use_reg
, old_reg
)) {
1757 struct set_entry
*entry
= _mesa_set_search(old_reg
->uses
,
1758 _mesa_hash_pointer(instr
),
1761 _mesa_set_remove(old_reg
->uses
, entry
);
1765 if (new_src
.is_ssa
) {
1767 _mesa_set_add(new_src
.ssa
->uses
, _mesa_hash_pointer(instr
), instr
);
1769 if (new_src
.reg
.reg
)
1770 _mesa_set_add(new_src
.reg
.reg
->uses
, _mesa_hash_pointer(instr
), instr
);
1775 nir_ssa_def_init(nir_instr
*instr
, nir_ssa_def
*def
,
1776 unsigned num_components
, const char *name
)
1778 void *mem_ctx
= ralloc_parent(instr
);
1781 def
->parent_instr
= instr
;
1782 def
->uses
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
1783 def
->if_uses
= _mesa_set_create(mem_ctx
, _mesa_key_pointer_equal
);
1784 def
->num_components
= num_components
;
1787 nir_function_impl
*impl
=
1788 nir_cf_node_get_function(&instr
->block
->cf_node
);
1790 def
->index
= impl
->ssa_alloc
++;
1792 def
->index
= UINT_MAX
;
1796 struct ssa_def_rewrite_state
{
1803 ssa_def_rewrite_uses_src(nir_src
*src
, void *void_state
)
1805 struct ssa_def_rewrite_state
*state
= void_state
;
1807 if (src
->is_ssa
&& src
->ssa
== state
->old
)
1808 *src
= nir_src_copy(state
->new_src
, state
->mem_ctx
);
1814 nir_ssa_def_rewrite_uses(nir_ssa_def
*def
, nir_src new_src
, void *mem_ctx
)
1816 struct ssa_def_rewrite_state state
;
1817 state
.mem_ctx
= mem_ctx
;
1819 state
.new_src
= new_src
;
1821 assert(!new_src
.is_ssa
|| def
!= new_src
.ssa
);
1823 struct set
*new_uses
, *new_if_uses
;
1824 if (new_src
.is_ssa
) {
1825 new_uses
= new_src
.ssa
->uses
;
1826 new_if_uses
= new_src
.ssa
->if_uses
;
1828 new_uses
= new_src
.reg
.reg
->uses
;
1829 new_if_uses
= new_src
.reg
.reg
->if_uses
;
1832 struct set_entry
*entry
;
1833 set_foreach(def
->uses
, entry
) {
1834 nir_instr
*instr
= (nir_instr
*)entry
->key
;
1836 _mesa_set_remove(def
->uses
, entry
);
1837 nir_foreach_src(instr
, ssa_def_rewrite_uses_src
, &state
);
1838 _mesa_set_add(new_uses
, _mesa_hash_pointer(instr
), instr
);
1841 set_foreach(def
->if_uses
, entry
) {
1842 nir_if
*if_use
= (nir_if
*)entry
->key
;
1844 _mesa_set_remove(def
->if_uses
, entry
);
1845 if_use
->condition
= nir_src_copy(new_src
, mem_ctx
);
1846 _mesa_set_add(new_if_uses
, _mesa_hash_pointer(if_use
), if_use
);
1851 static bool foreach_cf_node(nir_cf_node
*node
, nir_foreach_block_cb cb
,
1852 bool reverse
, void *state
);
1855 foreach_if(nir_if
*if_stmt
, nir_foreach_block_cb cb
, bool reverse
, void *state
)
1858 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
,
1859 &if_stmt
->else_list
) {
1860 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1864 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
,
1865 &if_stmt
->then_list
) {
1866 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1870 foreach_list_typed_safe(nir_cf_node
, node
, node
, &if_stmt
->then_list
) {
1871 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1875 foreach_list_typed_safe(nir_cf_node
, node
, node
, &if_stmt
->else_list
) {
1876 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1885 foreach_loop(nir_loop
*loop
, nir_foreach_block_cb cb
, bool reverse
, void *state
)
1888 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
, &loop
->body
) {
1889 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1893 foreach_list_typed_safe(nir_cf_node
, node
, node
, &loop
->body
) {
1894 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1903 foreach_cf_node(nir_cf_node
*node
, nir_foreach_block_cb cb
,
1904 bool reverse
, void *state
)
1906 switch (node
->type
) {
1907 case nir_cf_node_block
:
1908 return cb(nir_cf_node_as_block(node
), state
);
1909 case nir_cf_node_if
:
1910 return foreach_if(nir_cf_node_as_if(node
), cb
, reverse
, state
);
1911 case nir_cf_node_loop
:
1912 return foreach_loop(nir_cf_node_as_loop(node
), cb
, reverse
, state
);
1916 unreachable("Invalid CFG node type");
1924 nir_foreach_block(nir_function_impl
*impl
, nir_foreach_block_cb cb
, void *state
)
1926 foreach_list_typed_safe(nir_cf_node
, node
, node
, &impl
->body
) {
1927 if (!foreach_cf_node(node
, cb
, false, state
))
1931 return cb(impl
->end_block
, state
);
1935 nir_foreach_block_reverse(nir_function_impl
*impl
, nir_foreach_block_cb cb
,
1938 if (!cb(impl
->end_block
, state
))
1941 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
, &impl
->body
) {
1942 if (!foreach_cf_node(node
, cb
, true, state
))
1950 nir_block_get_following_if(nir_block
*block
)
1952 if (exec_node_is_tail_sentinel(&block
->cf_node
.node
))
1955 if (nir_cf_node_is_last(&block
->cf_node
))
1958 nir_cf_node
*next_node
= nir_cf_node_next(&block
->cf_node
);
1960 if (next_node
->type
!= nir_cf_node_if
)
1963 return nir_cf_node_as_if(next_node
);
1967 index_block(nir_block
*block
, void *state
)
1969 unsigned *index
= (unsigned *) state
;
1970 block
->index
= (*index
)++;
1975 nir_index_blocks(nir_function_impl
*impl
)
1979 if (impl
->valid_metadata
& nir_metadata_block_index
)
1982 nir_foreach_block(impl
, index_block
, &index
);
1984 impl
->num_blocks
= index
;
1988 index_ssa_def(nir_ssa_def
*def
, unsigned *index
)
1990 def
->index
= (*index
)++;
1994 index_ssa_def_cb(nir_dest
*dest
, void *state
)
1996 unsigned *index
= (unsigned *) state
;
1998 index_ssa_def(&dest
->ssa
, index
);
2003 index_ssa_undef(nir_ssa_undef_instr
*instr
, unsigned *index
)
2005 index_ssa_def(&instr
->def
, index
);
2009 index_ssa_block(nir_block
*block
, void *state
)
2011 unsigned *index
= (unsigned *) state
;
2013 nir_foreach_instr(block
, instr
) {
2014 if (instr
->type
== nir_instr_type_ssa_undef
)
2015 index_ssa_undef(nir_instr_as_ssa_undef(instr
), index
);
2017 nir_foreach_dest(instr
, index_ssa_def_cb
, state
);
2024 nir_index_ssa_defs(nir_function_impl
*impl
)
2027 nir_foreach_block(impl
, index_ssa_block
, &index
);
2028 impl
->ssa_alloc
= index
;