2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Connor Abbott (cwabbott0@gmail.com)
32 nir_shader_create(void *mem_ctx
, const nir_shader_compiler_options
*options
)
34 nir_shader
*shader
= ralloc(mem_ctx
, nir_shader
);
36 shader
->uniforms
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
37 _mesa_key_string_equal
);
38 shader
->inputs
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
39 _mesa_key_string_equal
);
40 shader
->outputs
= _mesa_hash_table_create(shader
, _mesa_key_hash_string
,
41 _mesa_key_string_equal
);
43 shader
->options
= options
;
45 shader
->num_user_structures
= 0;
46 shader
->user_structures
= NULL
;
48 exec_list_make_empty(&shader
->functions
);
49 exec_list_make_empty(&shader
->registers
);
50 exec_list_make_empty(&shader
->globals
);
51 exec_list_make_empty(&shader
->system_values
);
52 shader
->reg_alloc
= 0;
54 shader
->num_inputs
= 0;
55 shader
->num_outputs
= 0;
56 shader
->num_uniforms
= 0;
62 reg_create(void *mem_ctx
, struct exec_list
*list
)
64 nir_register
*reg
= ralloc(mem_ctx
, nir_register
);
66 reg
->uses
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
67 _mesa_key_pointer_equal
);
68 reg
->defs
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
69 _mesa_key_pointer_equal
);
70 reg
->if_uses
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
71 _mesa_key_pointer_equal
);
73 reg
->num_components
= 0;
74 reg
->num_array_elems
= 0;
75 reg
->is_packed
= false;
78 exec_list_push_tail(list
, ®
->node
);
84 nir_global_reg_create(nir_shader
*shader
)
86 nir_register
*reg
= reg_create(shader
, &shader
->registers
);
87 reg
->index
= shader
->reg_alloc
++;
88 reg
->is_global
= true;
94 nir_local_reg_create(nir_function_impl
*impl
)
96 nir_register
*reg
= reg_create(ralloc_parent(impl
), &impl
->registers
);
97 reg
->index
= impl
->reg_alloc
++;
98 reg
->is_global
= false;
104 nir_reg_remove(nir_register
*reg
)
106 exec_node_remove(®
->node
);
110 nir_function_create(nir_shader
*shader
, const char *name
)
112 nir_function
*func
= ralloc(shader
, nir_function
);
114 exec_list_push_tail(&shader
->functions
, &func
->node
);
115 exec_list_make_empty(&func
->overload_list
);
117 func
->shader
= shader
;
122 nir_function_overload
*
123 nir_function_overload_create(nir_function
*func
)
125 void *mem_ctx
= ralloc_parent(func
);
127 nir_function_overload
*overload
= ralloc(mem_ctx
, nir_function_overload
);
129 overload
->num_params
= 0;
130 overload
->params
= NULL
;
131 overload
->return_type
= glsl_void_type();
132 overload
->impl
= NULL
;
134 exec_list_push_tail(&func
->overload_list
, &overload
->node
);
135 overload
->function
= func
;
140 void nir_src_copy(nir_src
*dest
, const nir_src
*src
, void *mem_ctx
)
142 dest
->is_ssa
= src
->is_ssa
;
144 dest
->ssa
= src
->ssa
;
146 dest
->reg
.base_offset
= src
->reg
.base_offset
;
147 dest
->reg
.reg
= src
->reg
.reg
;
148 if (src
->reg
.indirect
) {
149 dest
->reg
.indirect
= ralloc(mem_ctx
, nir_src
);
150 nir_src_copy(dest
->reg
.indirect
, src
->reg
.indirect
, mem_ctx
);
152 dest
->reg
.indirect
= NULL
;
157 void nir_dest_copy(nir_dest
*dest
, const nir_dest
*src
, void *mem_ctx
)
159 dest
->is_ssa
= src
->is_ssa
;
161 dest
->ssa
= src
->ssa
;
163 dest
->reg
.base_offset
= src
->reg
.base_offset
;
164 dest
->reg
.reg
= src
->reg
.reg
;
165 if (src
->reg
.indirect
) {
166 dest
->reg
.indirect
= ralloc(mem_ctx
, nir_src
);
167 nir_src_copy(dest
->reg
.indirect
, src
->reg
.indirect
, mem_ctx
);
169 dest
->reg
.indirect
= NULL
;
175 nir_alu_src_copy(nir_alu_src
*dest
, const nir_alu_src
*src
, void *mem_ctx
)
177 nir_src_copy(&dest
->src
, &src
->src
, mem_ctx
);
178 dest
->abs
= src
->abs
;
179 dest
->negate
= src
->negate
;
180 for (unsigned i
= 0; i
< 4; i
++)
181 dest
->swizzle
[i
] = src
->swizzle
[i
];
185 nir_alu_dest_copy(nir_alu_dest
*dest
, const nir_alu_dest
*src
, void *mem_ctx
)
187 nir_dest_copy(&dest
->dest
, &src
->dest
, mem_ctx
);
188 dest
->write_mask
= src
->write_mask
;
189 dest
->saturate
= src
->saturate
;
193 block_add_pred(nir_block
*block
, nir_block
*pred
)
195 _mesa_set_add(block
->predecessors
, pred
);
199 cf_init(nir_cf_node
*node
, nir_cf_node_type type
)
201 exec_node_init(&node
->node
);
207 link_blocks(nir_block
*pred
, nir_block
*succ1
, nir_block
*succ2
)
209 pred
->successors
[0] = succ1
;
210 block_add_pred(succ1
, pred
);
212 pred
->successors
[1] = succ2
;
214 block_add_pred(succ2
, pred
);
218 unlink_blocks(nir_block
*pred
, nir_block
*succ
)
220 if (pred
->successors
[0] == succ
) {
221 pred
->successors
[0] = pred
->successors
[1];
222 pred
->successors
[1] = NULL
;
224 assert(pred
->successors
[1] == succ
);
225 pred
->successors
[1] = NULL
;
228 struct set_entry
*entry
= _mesa_set_search(succ
->predecessors
, pred
);
232 _mesa_set_remove(succ
->predecessors
, entry
);
236 unlink_block_successors(nir_block
*block
)
238 if (block
->successors
[0] != NULL
)
239 unlink_blocks(block
, block
->successors
[0]);
240 if (block
->successors
[1] != NULL
)
241 unlink_blocks(block
, block
->successors
[1]);
246 nir_function_impl_create(nir_function_overload
*overload
)
248 assert(overload
->impl
== NULL
);
250 void *mem_ctx
= ralloc_parent(overload
);
252 nir_function_impl
*impl
= ralloc(mem_ctx
, nir_function_impl
);
254 overload
->impl
= impl
;
255 impl
->overload
= overload
;
257 cf_init(&impl
->cf_node
, nir_cf_node_function
);
259 exec_list_make_empty(&impl
->body
);
260 exec_list_make_empty(&impl
->registers
);
261 exec_list_make_empty(&impl
->locals
);
262 impl
->num_params
= 0;
264 impl
->return_var
= NULL
;
267 impl
->valid_metadata
= nir_metadata_none
;
269 /* create start & end blocks */
270 nir_block
*start_block
= nir_block_create(mem_ctx
);
271 nir_block
*end_block
= nir_block_create(mem_ctx
);
272 start_block
->cf_node
.parent
= &impl
->cf_node
;
273 end_block
->cf_node
.parent
= &impl
->cf_node
;
274 impl
->start_block
= start_block
;
275 impl
->end_block
= end_block
;
277 exec_list_push_tail(&impl
->body
, &start_block
->cf_node
.node
);
279 start_block
->successors
[0] = end_block
;
280 block_add_pred(end_block
, start_block
);
286 nir_block_create(void *mem_ctx
)
288 nir_block
*block
= ralloc(mem_ctx
, nir_block
);
290 cf_init(&block
->cf_node
, nir_cf_node_block
);
292 block
->successors
[0] = block
->successors
[1] = NULL
;
293 block
->predecessors
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
294 _mesa_key_pointer_equal
);
295 block
->imm_dom
= NULL
;
296 block
->dom_frontier
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
297 _mesa_key_pointer_equal
);
299 exec_list_make_empty(&block
->instr_list
);
305 src_init(nir_src
*src
)
309 src
->reg
.indirect
= NULL
;
310 src
->reg
.base_offset
= 0;
314 nir_if_create(void *mem_ctx
)
316 nir_if
*if_stmt
= ralloc(mem_ctx
, nir_if
);
318 cf_init(&if_stmt
->cf_node
, nir_cf_node_if
);
319 src_init(&if_stmt
->condition
);
321 nir_block
*then
= nir_block_create(mem_ctx
);
322 exec_list_make_empty(&if_stmt
->then_list
);
323 exec_list_push_tail(&if_stmt
->then_list
, &then
->cf_node
.node
);
324 then
->cf_node
.parent
= &if_stmt
->cf_node
;
326 nir_block
*else_stmt
= nir_block_create(mem_ctx
);
327 exec_list_make_empty(&if_stmt
->else_list
);
328 exec_list_push_tail(&if_stmt
->else_list
, &else_stmt
->cf_node
.node
);
329 else_stmt
->cf_node
.parent
= &if_stmt
->cf_node
;
335 nir_loop_create(void *mem_ctx
)
337 nir_loop
*loop
= ralloc(mem_ctx
, nir_loop
);
339 cf_init(&loop
->cf_node
, nir_cf_node_loop
);
341 nir_block
*body
= nir_block_create(mem_ctx
);
342 exec_list_make_empty(&loop
->body
);
343 exec_list_push_tail(&loop
->body
, &body
->cf_node
.node
);
344 body
->cf_node
.parent
= &loop
->cf_node
;
346 body
->successors
[0] = body
;
347 block_add_pred(body
, body
);
353 instr_init(nir_instr
*instr
, nir_instr_type type
)
357 exec_node_init(&instr
->node
);
361 dest_init(nir_dest
*dest
)
363 dest
->is_ssa
= false;
364 dest
->reg
.reg
= NULL
;
365 dest
->reg
.indirect
= NULL
;
366 dest
->reg
.base_offset
= 0;
370 alu_dest_init(nir_alu_dest
*dest
)
372 dest_init(&dest
->dest
);
373 dest
->saturate
= false;
374 dest
->write_mask
= 0xf;
378 alu_src_init(nir_alu_src
*src
)
381 src
->abs
= src
->negate
= false;
389 nir_alu_instr_create(void *mem_ctx
, nir_op op
)
391 unsigned num_srcs
= nir_op_infos
[op
].num_inputs
;
392 nir_alu_instr
*instr
=
394 sizeof(nir_alu_instr
) + num_srcs
* sizeof(nir_alu_src
));
396 instr_init(&instr
->instr
, nir_instr_type_alu
);
398 alu_dest_init(&instr
->dest
);
399 for (unsigned i
= 0; i
< num_srcs
; i
++)
400 alu_src_init(&instr
->src
[i
]);
406 nir_jump_instr_create(void *mem_ctx
, nir_jump_type type
)
408 nir_jump_instr
*instr
= ralloc(mem_ctx
, nir_jump_instr
);
409 instr_init(&instr
->instr
, nir_instr_type_jump
);
414 nir_load_const_instr
*
415 nir_load_const_instr_create(void *mem_ctx
, unsigned num_components
)
417 nir_load_const_instr
*instr
= ralloc(mem_ctx
, nir_load_const_instr
);
418 instr_init(&instr
->instr
, nir_instr_type_load_const
);
420 nir_ssa_def_init(&instr
->instr
, &instr
->def
, num_components
, NULL
);
425 nir_intrinsic_instr
*
426 nir_intrinsic_instr_create(void *mem_ctx
, nir_intrinsic_op op
)
428 unsigned num_srcs
= nir_intrinsic_infos
[op
].num_srcs
;
429 nir_intrinsic_instr
*instr
=
431 sizeof(nir_intrinsic_instr
) + num_srcs
* sizeof(nir_src
));
433 instr_init(&instr
->instr
, nir_instr_type_intrinsic
);
434 instr
->intrinsic
= op
;
436 if (nir_intrinsic_infos
[op
].has_dest
)
437 dest_init(&instr
->dest
);
439 for (unsigned i
= 0; i
< num_srcs
; i
++)
440 src_init(&instr
->src
[i
]);
446 nir_call_instr_create(void *mem_ctx
, nir_function_overload
*callee
)
448 nir_call_instr
*instr
= ralloc(mem_ctx
, nir_call_instr
);
449 instr_init(&instr
->instr
, nir_instr_type_call
);
451 instr
->callee
= callee
;
452 instr
->num_params
= callee
->num_params
;
453 instr
->params
= ralloc_array(mem_ctx
, nir_deref_var
*, instr
->num_params
);
454 instr
->return_deref
= NULL
;
460 nir_tex_instr_create(void *mem_ctx
, unsigned num_srcs
)
462 nir_tex_instr
*instr
= ralloc(mem_ctx
, nir_tex_instr
);
463 instr_init(&instr
->instr
, nir_instr_type_tex
);
465 dest_init(&instr
->dest
);
467 instr
->num_srcs
= num_srcs
;
468 instr
->src
= ralloc_array(mem_ctx
, nir_tex_src
, num_srcs
);
469 for (unsigned i
= 0; i
< num_srcs
; i
++)
470 src_init(&instr
->src
[i
].src
);
472 instr
->sampler_index
= 0;
473 instr
->sampler_array_size
= 0;
474 instr
->sampler
= NULL
;
480 nir_phi_instr_create(void *mem_ctx
)
482 nir_phi_instr
*instr
= ralloc(mem_ctx
, nir_phi_instr
);
483 instr_init(&instr
->instr
, nir_instr_type_phi
);
485 dest_init(&instr
->dest
);
486 exec_list_make_empty(&instr
->srcs
);
490 nir_parallel_copy_instr
*
491 nir_parallel_copy_instr_create(void *mem_ctx
)
493 nir_parallel_copy_instr
*instr
= ralloc(mem_ctx
, nir_parallel_copy_instr
);
494 instr_init(&instr
->instr
, nir_instr_type_parallel_copy
);
496 exec_list_make_empty(&instr
->entries
);
501 nir_ssa_undef_instr
*
502 nir_ssa_undef_instr_create(void *mem_ctx
, unsigned num_components
)
504 nir_ssa_undef_instr
*instr
= ralloc(mem_ctx
, nir_ssa_undef_instr
);
505 instr_init(&instr
->instr
, nir_instr_type_ssa_undef
);
507 nir_ssa_def_init(&instr
->instr
, &instr
->def
, num_components
, NULL
);
513 nir_deref_var_create(void *mem_ctx
, nir_variable
*var
)
515 nir_deref_var
*deref
= ralloc(mem_ctx
, nir_deref_var
);
516 deref
->deref
.deref_type
= nir_deref_type_var
;
517 deref
->deref
.child
= NULL
;
518 deref
->deref
.type
= var
->type
;
524 nir_deref_array_create(void *mem_ctx
)
526 nir_deref_array
*deref
= ralloc(mem_ctx
, nir_deref_array
);
527 deref
->deref
.deref_type
= nir_deref_type_array
;
528 deref
->deref
.child
= NULL
;
529 deref
->deref_array_type
= nir_deref_array_type_direct
;
530 src_init(&deref
->indirect
);
531 deref
->base_offset
= 0;
536 nir_deref_struct_create(void *mem_ctx
, unsigned field_index
)
538 nir_deref_struct
*deref
= ralloc(mem_ctx
, nir_deref_struct
);
539 deref
->deref
.deref_type
= nir_deref_type_struct
;
540 deref
->deref
.child
= NULL
;
541 deref
->index
= field_index
;
545 static nir_deref_var
*
546 copy_deref_var(void *mem_ctx
, nir_deref_var
*deref
)
548 nir_deref_var
*ret
= nir_deref_var_create(mem_ctx
, deref
->var
);
549 ret
->deref
.type
= deref
->deref
.type
;
550 if (deref
->deref
.child
)
551 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
555 static nir_deref_array
*
556 copy_deref_array(void *mem_ctx
, nir_deref_array
*deref
)
558 nir_deref_array
*ret
= nir_deref_array_create(mem_ctx
);
559 ret
->base_offset
= deref
->base_offset
;
560 ret
->deref_array_type
= deref
->deref_array_type
;
561 if (deref
->deref_array_type
== nir_deref_array_type_indirect
) {
562 nir_src_copy(&ret
->indirect
, &deref
->indirect
, mem_ctx
);
564 ret
->deref
.type
= deref
->deref
.type
;
565 if (deref
->deref
.child
)
566 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
570 static nir_deref_struct
*
571 copy_deref_struct(void *mem_ctx
, nir_deref_struct
*deref
)
573 nir_deref_struct
*ret
= nir_deref_struct_create(mem_ctx
, deref
->index
);
574 ret
->deref
.type
= deref
->deref
.type
;
575 if (deref
->deref
.child
)
576 ret
->deref
.child
= nir_copy_deref(mem_ctx
, deref
->deref
.child
);
581 nir_copy_deref(void *mem_ctx
, nir_deref
*deref
)
583 switch (deref
->deref_type
) {
584 case nir_deref_type_var
:
585 return ©_deref_var(mem_ctx
, nir_deref_as_var(deref
))->deref
;
586 case nir_deref_type_array
:
587 return ©_deref_array(mem_ctx
, nir_deref_as_array(deref
))->deref
;
588 case nir_deref_type_struct
:
589 return ©_deref_struct(mem_ctx
, nir_deref_as_struct(deref
))->deref
;
591 unreachable("Invalid dereference type");
599 * \name Control flow modification
601 * These functions modify the control flow tree while keeping the control flow
602 * graph up-to-date. The invariants respected are:
603 * 1. Each then statement, else statement, or loop body must have at least one
605 * 2. Each if-statement and loop must have one basic block before it and one
607 * 3. Two basic blocks cannot be directly next to each other.
608 * 4. If a basic block has a jump instruction, there must be only one and it
609 * must be at the end of the block.
610 * 5. The CFG must always be connected - this means that we must insert a fake
611 * CFG edge for loops with no break statement.
613 * The purpose of the second one is so that we have places to insert code during
614 * GCM, as well as eliminating the possibility of critical edges.
619 link_non_block_to_block(nir_cf_node
*node
, nir_block
*block
)
621 if (node
->type
== nir_cf_node_if
) {
623 * We're trying to link an if to a block after it; this just means linking
624 * the last block of the then and else branches.
627 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
629 nir_cf_node
*last_then
= nir_if_last_then_node(if_stmt
);
630 assert(last_then
->type
== nir_cf_node_block
);
631 nir_block
*last_then_block
= nir_cf_node_as_block(last_then
);
633 nir_cf_node
*last_else
= nir_if_last_else_node(if_stmt
);
634 assert(last_else
->type
== nir_cf_node_block
);
635 nir_block
*last_else_block
= nir_cf_node_as_block(last_else
);
637 if (exec_list_is_empty(&last_then_block
->instr_list
) ||
638 nir_block_last_instr(last_then_block
)->type
!= nir_instr_type_jump
) {
639 unlink_block_successors(last_then_block
);
640 link_blocks(last_then_block
, block
, NULL
);
643 if (exec_list_is_empty(&last_else_block
->instr_list
) ||
644 nir_block_last_instr(last_else_block
)->type
!= nir_instr_type_jump
) {
645 unlink_block_successors(last_else_block
);
646 link_blocks(last_else_block
, block
, NULL
);
649 assert(node
->type
== nir_cf_node_loop
);
652 * We can only get to this codepath if we're inserting a new loop, or
653 * at least a loop with no break statements; we can't insert break
654 * statements into a loop when we haven't inserted it into the CFG
655 * because we wouldn't know which block comes after the loop
656 * and therefore, which block should be the successor of the block with
657 * the break). Therefore, we need to insert a fake edge (see invariant
661 nir_loop
*loop
= nir_cf_node_as_loop(node
);
663 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
664 assert(last
->type
== nir_cf_node_block
);
665 nir_block
*last_block
= nir_cf_node_as_block(last
);
667 last_block
->successors
[1] = block
;
668 block_add_pred(block
, last_block
);
673 link_block_to_non_block(nir_block
*block
, nir_cf_node
*node
)
675 if (node
->type
== nir_cf_node_if
) {
677 * We're trying to link a block to an if after it; this just means linking
678 * the block to the first block of the then and else branches.
681 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
683 nir_cf_node
*first_then
= nir_if_first_then_node(if_stmt
);
684 assert(first_then
->type
== nir_cf_node_block
);
685 nir_block
*first_then_block
= nir_cf_node_as_block(first_then
);
687 nir_cf_node
*first_else
= nir_if_first_else_node(if_stmt
);
688 assert(first_else
->type
== nir_cf_node_block
);
689 nir_block
*first_else_block
= nir_cf_node_as_block(first_else
);
691 unlink_block_successors(block
);
692 link_blocks(block
, first_then_block
, first_else_block
);
695 * For similar reasons as the corresponding case in
696 * link_non_block_to_block(), don't worry about if the loop header has
697 * any predecessors that need to be unlinked.
700 assert(node
->type
== nir_cf_node_loop
);
702 nir_loop
*loop
= nir_cf_node_as_loop(node
);
704 nir_cf_node
*loop_header
= nir_loop_first_cf_node(loop
);
705 assert(loop_header
->type
== nir_cf_node_block
);
706 nir_block
*loop_header_block
= nir_cf_node_as_block(loop_header
);
708 unlink_block_successors(block
);
709 link_blocks(block
, loop_header_block
, NULL
);
715 * Takes a basic block and inserts a new empty basic block before it, making its
716 * predecessors point to the new block. This essentially splits the block into
717 * an empty header and a body so that another non-block CF node can be inserted
718 * between the two. Note that this does *not* link the two basic blocks, so
719 * some kind of cleanup *must* be performed after this call.
723 split_block_beginning(nir_block
*block
)
725 nir_block
*new_block
= nir_block_create(ralloc_parent(block
));
726 new_block
->cf_node
.parent
= block
->cf_node
.parent
;
727 exec_node_insert_node_before(&block
->cf_node
.node
, &new_block
->cf_node
.node
);
729 struct set_entry
*entry
;
730 set_foreach(block
->predecessors
, entry
) {
731 nir_block
*pred
= (nir_block
*) entry
->key
;
733 unlink_blocks(pred
, block
);
734 link_blocks(pred
, new_block
, NULL
);
741 rewrite_phi_preds(nir_block
*block
, nir_block
*old_pred
, nir_block
*new_pred
)
743 nir_foreach_instr_safe(block
, instr
) {
744 if (instr
->type
!= nir_instr_type_phi
)
747 nir_phi_instr
*phi
= nir_instr_as_phi(instr
);
748 nir_foreach_phi_src(phi
, src
) {
749 if (src
->pred
== old_pred
) {
750 src
->pred
= new_pred
;
758 * Moves the successors of source to the successors of dest, leaving both
759 * successors of source NULL.
763 move_successors(nir_block
*source
, nir_block
*dest
)
765 nir_block
*succ1
= source
->successors
[0];
766 nir_block
*succ2
= source
->successors
[1];
769 unlink_blocks(source
, succ1
);
770 rewrite_phi_preds(succ1
, source
, dest
);
774 unlink_blocks(source
, succ2
);
775 rewrite_phi_preds(succ2
, source
, dest
);
778 unlink_block_successors(dest
);
779 link_blocks(dest
, succ1
, succ2
);
783 split_block_end(nir_block
*block
)
785 nir_block
*new_block
= nir_block_create(ralloc_parent(block
));
786 new_block
->cf_node
.parent
= block
->cf_node
.parent
;
787 exec_node_insert_after(&block
->cf_node
.node
, &new_block
->cf_node
.node
);
789 move_successors(block
, new_block
);
795 * Inserts a non-basic block between two basic blocks and links them together.
799 insert_non_block(nir_block
*before
, nir_cf_node
*node
, nir_block
*after
)
801 node
->parent
= before
->cf_node
.parent
;
802 exec_node_insert_after(&before
->cf_node
.node
, &node
->node
);
803 link_block_to_non_block(before
, node
);
804 link_non_block_to_block(node
, after
);
808 * Inserts a non-basic block before a basic block.
812 insert_non_block_before_block(nir_cf_node
*node
, nir_block
*block
)
814 /* split off the beginning of block into new_block */
815 nir_block
*new_block
= split_block_beginning(block
);
817 /* insert our node in between new_block and block */
818 insert_non_block(new_block
, node
, block
);
822 insert_non_block_after_block(nir_block
*block
, nir_cf_node
*node
)
824 /* split off the end of block into new_block */
825 nir_block
*new_block
= split_block_end(block
);
827 /* insert our node in between block and new_block */
828 insert_non_block(block
, node
, new_block
);
831 /* walk up the control flow tree to find the innermost enclosed loop */
833 nearest_loop(nir_cf_node
*node
)
835 while (node
->type
!= nir_cf_node_loop
) {
839 return nir_cf_node_as_loop(node
);
843 nir_cf_node_get_function(nir_cf_node
*node
)
845 while (node
->type
!= nir_cf_node_function
) {
849 return nir_cf_node_as_function(node
);
853 * update the CFG after a jump instruction has been added to the end of a block
857 handle_jump(nir_block
*block
)
859 nir_instr
*instr
= nir_block_last_instr(block
);
860 nir_jump_instr
*jump_instr
= nir_instr_as_jump(instr
);
862 unlink_block_successors(block
);
864 nir_function_impl
*impl
= nir_cf_node_get_function(&block
->cf_node
);
865 nir_metadata_preserve(impl
, nir_metadata_none
);
867 if (jump_instr
->type
== nir_jump_break
||
868 jump_instr
->type
== nir_jump_continue
) {
869 nir_loop
*loop
= nearest_loop(&block
->cf_node
);
871 if (jump_instr
->type
== nir_jump_continue
) {
872 nir_cf_node
*first_node
= nir_loop_first_cf_node(loop
);
873 assert(first_node
->type
== nir_cf_node_block
);
874 nir_block
*first_block
= nir_cf_node_as_block(first_node
);
875 link_blocks(block
, first_block
, NULL
);
877 nir_cf_node
*after
= nir_cf_node_next(&loop
->cf_node
);
878 assert(after
->type
== nir_cf_node_block
);
879 nir_block
*after_block
= nir_cf_node_as_block(after
);
880 link_blocks(block
, after_block
, NULL
);
882 /* If we inserted a fake link, remove it */
883 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
884 assert(last
->type
== nir_cf_node_block
);
885 nir_block
*last_block
= nir_cf_node_as_block(last
);
886 if (last_block
->successors
[1] != NULL
)
887 unlink_blocks(last_block
, after_block
);
890 assert(jump_instr
->type
== nir_jump_return
);
891 link_blocks(block
, impl
->end_block
, NULL
);
896 handle_remove_jump(nir_block
*block
, nir_jump_type type
)
898 unlink_block_successors(block
);
900 if (exec_node_is_tail_sentinel(block
->cf_node
.node
.next
)) {
901 nir_cf_node
*parent
= block
->cf_node
.parent
;
902 if (parent
->type
== nir_cf_node_if
) {
903 nir_cf_node
*next
= nir_cf_node_next(parent
);
904 assert(next
->type
== nir_cf_node_block
);
905 nir_block
*next_block
= nir_cf_node_as_block(next
);
907 link_blocks(block
, next_block
, NULL
);
909 assert(parent
->type
== nir_cf_node_loop
);
910 nir_loop
*loop
= nir_cf_node_as_loop(parent
);
912 nir_cf_node
*head
= nir_loop_first_cf_node(loop
);
913 assert(head
->type
== nir_cf_node_block
);
914 nir_block
*head_block
= nir_cf_node_as_block(head
);
916 link_blocks(block
, head_block
, NULL
);
919 nir_cf_node
*next
= nir_cf_node_next(&block
->cf_node
);
920 if (next
->type
== nir_cf_node_if
) {
921 nir_if
*next_if
= nir_cf_node_as_if(next
);
923 nir_cf_node
*first_then
= nir_if_first_then_node(next_if
);
924 assert(first_then
->type
== nir_cf_node_block
);
925 nir_block
*first_then_block
= nir_cf_node_as_block(first_then
);
927 nir_cf_node
*first_else
= nir_if_first_else_node(next_if
);
928 assert(first_else
->type
== nir_cf_node_block
);
929 nir_block
*first_else_block
= nir_cf_node_as_block(first_else
);
931 link_blocks(block
, first_then_block
, first_else_block
);
933 assert(next
->type
== nir_cf_node_loop
);
934 nir_loop
*next_loop
= nir_cf_node_as_loop(next
);
936 nir_cf_node
*first
= nir_loop_first_cf_node(next_loop
);
937 assert(first
->type
== nir_cf_node_block
);
938 nir_block
*first_block
= nir_cf_node_as_block(first
);
940 link_blocks(block
, first_block
, NULL
);
944 if (type
== nir_jump_break
) {
945 nir_loop
*loop
= nearest_loop(&block
->cf_node
);
947 nir_cf_node
*next
= nir_cf_node_next(&loop
->cf_node
);
948 assert(next
->type
== nir_cf_node_block
);
949 nir_block
*next_block
= nir_cf_node_as_block(next
);
951 if (next_block
->predecessors
->entries
== 0) {
952 /* insert fake link */
953 nir_cf_node
*last
= nir_loop_last_cf_node(loop
);
954 assert(last
->type
== nir_cf_node_block
);
955 nir_block
*last_block
= nir_cf_node_as_block(last
);
957 last_block
->successors
[1] = next_block
;
958 block_add_pred(next_block
, last_block
);
962 nir_function_impl
*impl
= nir_cf_node_get_function(&block
->cf_node
);
963 nir_metadata_preserve(impl
, nir_metadata_none
);
967 * Inserts a basic block before another by merging the instructions.
969 * @param block the target of the insertion
970 * @param before the block to be inserted - must not have been inserted before
971 * @param has_jump whether \before has a jump instruction at the end
975 insert_block_before_block(nir_block
*block
, nir_block
*before
, bool has_jump
)
977 assert(!has_jump
|| exec_list_is_empty(&block
->instr_list
));
979 foreach_list_typed(nir_instr
, instr
, node
, &before
->instr_list
) {
980 instr
->block
= block
;
983 exec_list_prepend(&block
->instr_list
, &before
->instr_list
);
990 * Inserts a basic block after another by merging the instructions.
992 * @param block the target of the insertion
993 * @param after the block to be inserted - must not have been inserted before
994 * @param has_jump whether \after has a jump instruction at the end
998 insert_block_after_block(nir_block
*block
, nir_block
*after
, bool has_jump
)
1000 foreach_list_typed(nir_instr
, instr
, node
, &after
->instr_list
) {
1001 instr
->block
= block
;
1004 exec_list_append(&block
->instr_list
, &after
->instr_list
);
1011 update_if_uses(nir_cf_node
*node
)
1013 if (node
->type
!= nir_cf_node_if
)
1016 nir_if
*if_stmt
= nir_cf_node_as_if(node
);
1018 struct set
*if_uses_set
= if_stmt
->condition
.is_ssa
?
1019 if_stmt
->condition
.ssa
->if_uses
:
1020 if_stmt
->condition
.reg
.reg
->uses
;
1022 _mesa_set_add(if_uses_set
, if_stmt
);
1026 nir_cf_node_insert_after(nir_cf_node
*node
, nir_cf_node
*after
)
1028 update_if_uses(after
);
1030 if (after
->type
== nir_cf_node_block
) {
1032 * either node or the one after it must be a basic block, by invariant #2;
1033 * in either case, just merge the blocks together.
1035 nir_block
*after_block
= nir_cf_node_as_block(after
);
1037 bool has_jump
= !exec_list_is_empty(&after_block
->instr_list
) &&
1038 nir_block_last_instr(after_block
)->type
== nir_instr_type_jump
;
1040 if (node
->type
== nir_cf_node_block
) {
1041 insert_block_after_block(nir_cf_node_as_block(node
), after_block
,
1044 nir_cf_node
*next
= nir_cf_node_next(node
);
1045 assert(next
->type
== nir_cf_node_block
);
1046 nir_block
*next_block
= nir_cf_node_as_block(next
);
1048 insert_block_before_block(next_block
, after_block
, has_jump
);
1051 if (node
->type
== nir_cf_node_block
) {
1052 insert_non_block_after_block(nir_cf_node_as_block(node
), after
);
1055 * We have to insert a non-basic block after a non-basic block. Since
1056 * every non-basic block has a basic block after it, this is equivalent
1057 * to inserting a non-basic block before a basic block.
1060 nir_cf_node
*next
= nir_cf_node_next(node
);
1061 assert(next
->type
== nir_cf_node_block
);
1062 nir_block
*next_block
= nir_cf_node_as_block(next
);
1064 insert_non_block_before_block(after
, next_block
);
1068 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1069 nir_metadata_preserve(impl
, nir_metadata_none
);
1073 nir_cf_node_insert_before(nir_cf_node
*node
, nir_cf_node
*before
)
1075 update_if_uses(before
);
1077 if (before
->type
== nir_cf_node_block
) {
1078 nir_block
*before_block
= nir_cf_node_as_block(before
);
1080 bool has_jump
= !exec_list_is_empty(&before_block
->instr_list
) &&
1081 nir_block_last_instr(before_block
)->type
== nir_instr_type_jump
;
1083 if (node
->type
== nir_cf_node_block
) {
1084 insert_block_before_block(nir_cf_node_as_block(node
), before_block
,
1087 nir_cf_node
*prev
= nir_cf_node_prev(node
);
1088 assert(prev
->type
== nir_cf_node_block
);
1089 nir_block
*prev_block
= nir_cf_node_as_block(prev
);
1091 insert_block_after_block(prev_block
, before_block
, has_jump
);
1094 if (node
->type
== nir_cf_node_block
) {
1095 insert_non_block_before_block(before
, nir_cf_node_as_block(node
));
1098 * We have to insert a non-basic block before a non-basic block. This
1099 * is equivalent to inserting a non-basic block after a basic block.
1102 nir_cf_node
*prev_node
= nir_cf_node_prev(node
);
1103 assert(prev_node
->type
== nir_cf_node_block
);
1104 nir_block
*prev_block
= nir_cf_node_as_block(prev_node
);
1106 insert_non_block_after_block(prev_block
, before
);
1110 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1111 nir_metadata_preserve(impl
, nir_metadata_none
);
1115 nir_cf_node_insert_begin(struct exec_list
*list
, nir_cf_node
*node
)
1117 nir_cf_node
*begin
= exec_node_data(nir_cf_node
, list
->head
, node
);
1118 nir_cf_node_insert_before(begin
, node
);
1122 nir_cf_node_insert_end(struct exec_list
*list
, nir_cf_node
*node
)
1124 nir_cf_node
*end
= exec_node_data(nir_cf_node
, list
->tail_pred
, node
);
1125 nir_cf_node_insert_after(end
, node
);
1129 * Stitch two basic blocks together into one. The aggregate must have the same
1130 * predecessors as the first and the same successors as the second.
1134 stitch_blocks(nir_block
*before
, nir_block
*after
)
1137 * We move after into before, so we have to deal with up to 2 successors vs.
1138 * possibly a large number of predecessors.
1140 * TODO: special case when before is empty and after isn't?
1143 move_successors(after
, before
);
1145 foreach_list_typed(nir_instr
, instr
, node
, &after
->instr_list
) {
1146 instr
->block
= before
;
1149 exec_list_append(&before
->instr_list
, &after
->instr_list
);
1150 exec_node_remove(&after
->cf_node
.node
);
1154 nir_cf_node_remove(nir_cf_node
*node
)
1156 nir_function_impl
*impl
= nir_cf_node_get_function(node
);
1157 nir_metadata_preserve(impl
, nir_metadata_none
);
1159 if (node
->type
== nir_cf_node_block
) {
1161 * Basic blocks can't really be removed by themselves, since they act as
1162 * padding between the non-basic blocks. So all we do here is empty the
1163 * block of instructions.
1165 * TODO: could we assert here?
1167 exec_list_make_empty(&nir_cf_node_as_block(node
)->instr_list
);
1169 nir_cf_node
*before
= nir_cf_node_prev(node
);
1170 assert(before
->type
== nir_cf_node_block
);
1171 nir_block
*before_block
= nir_cf_node_as_block(before
);
1173 nir_cf_node
*after
= nir_cf_node_next(node
);
1174 assert(after
->type
== nir_cf_node_block
);
1175 nir_block
*after_block
= nir_cf_node_as_block(after
);
1177 exec_node_remove(&node
->node
);
1178 stitch_blocks(before_block
, after_block
);
1183 add_use_cb(nir_src
*src
, void *state
)
1185 nir_instr
*instr
= state
;
1187 struct set
*uses_set
= src
->is_ssa
? src
->ssa
->uses
: src
->reg
.reg
->uses
;
1189 _mesa_set_add(uses_set
, instr
);
1195 add_ssa_def_cb(nir_ssa_def
*def
, void *state
)
1197 nir_instr
*instr
= state
;
1199 if (instr
->block
&& def
->index
== UINT_MAX
) {
1200 nir_function_impl
*impl
=
1201 nir_cf_node_get_function(&instr
->block
->cf_node
);
1203 def
->index
= impl
->ssa_alloc
++;
1210 add_reg_def_cb(nir_dest
*dest
, void *state
)
1212 nir_instr
*instr
= state
;
1215 _mesa_set_add(dest
->reg
.reg
->defs
, instr
);
1221 add_defs_uses(nir_instr
*instr
)
1223 nir_foreach_src(instr
, add_use_cb
, instr
);
1224 nir_foreach_dest(instr
, add_reg_def_cb
, instr
);
1225 nir_foreach_ssa_def(instr
, add_ssa_def_cb
, instr
);
1229 nir_instr_insert_before(nir_instr
*instr
, nir_instr
*before
)
1231 assert(before
->type
!= nir_instr_type_jump
);
1232 before
->block
= instr
->block
;
1233 add_defs_uses(before
);
1234 exec_node_insert_node_before(&instr
->node
, &before
->node
);
1238 nir_instr_insert_after(nir_instr
*instr
, nir_instr
*after
)
1240 if (after
->type
== nir_instr_type_jump
) {
1241 assert(instr
== nir_block_last_instr(instr
->block
));
1242 assert(instr
->type
!= nir_instr_type_jump
);
1245 after
->block
= instr
->block
;
1246 add_defs_uses(after
);
1247 exec_node_insert_after(&instr
->node
, &after
->node
);
1249 if (after
->type
== nir_instr_type_jump
)
1250 handle_jump(after
->block
);
1254 nir_instr_insert_before_block(nir_block
*block
, nir_instr
*before
)
1256 if (before
->type
== nir_instr_type_jump
)
1257 assert(exec_list_is_empty(&block
->instr_list
));
1259 before
->block
= block
;
1260 add_defs_uses(before
);
1261 exec_list_push_head(&block
->instr_list
, &before
->node
);
1263 if (before
->type
== nir_instr_type_jump
)
1268 nir_instr_insert_after_block(nir_block
*block
, nir_instr
*after
)
1270 if (after
->type
== nir_instr_type_jump
) {
1271 assert(exec_list_is_empty(&block
->instr_list
) ||
1272 nir_block_last_instr(block
)->type
!= nir_instr_type_jump
);
1275 after
->block
= block
;
1276 add_defs_uses(after
);
1277 exec_list_push_tail(&block
->instr_list
, &after
->node
);
1279 if (after
->type
== nir_instr_type_jump
)
1284 nir_instr_insert_before_cf(nir_cf_node
*node
, nir_instr
*before
)
1286 if (node
->type
== nir_cf_node_block
) {
1287 nir_instr_insert_before_block(nir_cf_node_as_block(node
), before
);
1289 nir_cf_node
*prev
= nir_cf_node_prev(node
);
1290 assert(prev
->type
== nir_cf_node_block
);
1291 nir_block
*prev_block
= nir_cf_node_as_block(prev
);
1293 nir_instr_insert_before_block(prev_block
, before
);
1298 nir_instr_insert_after_cf(nir_cf_node
*node
, nir_instr
*after
)
1300 if (node
->type
== nir_cf_node_block
) {
1301 nir_instr_insert_after_block(nir_cf_node_as_block(node
), after
);
1303 nir_cf_node
*next
= nir_cf_node_next(node
);
1304 assert(next
->type
== nir_cf_node_block
);
1305 nir_block
*next_block
= nir_cf_node_as_block(next
);
1307 nir_instr_insert_before_block(next_block
, after
);
1312 nir_instr_insert_before_cf_list(struct exec_list
*list
, nir_instr
*before
)
1314 nir_cf_node
*first_node
= exec_node_data(nir_cf_node
,
1315 exec_list_get_head(list
), node
);
1316 nir_instr_insert_before_cf(first_node
, before
);
1320 nir_instr_insert_after_cf_list(struct exec_list
*list
, nir_instr
*after
)
1322 nir_cf_node
*last_node
= exec_node_data(nir_cf_node
,
1323 exec_list_get_tail(list
), node
);
1324 nir_instr_insert_after_cf(last_node
, after
);
1328 remove_use_cb(nir_src
*src
, void *state
)
1330 nir_instr
*instr
= state
;
1332 struct set
*uses_set
= src
->is_ssa
? src
->ssa
->uses
: src
->reg
.reg
->uses
;
1334 struct set_entry
*entry
= _mesa_set_search(uses_set
, instr
);
1336 _mesa_set_remove(uses_set
, entry
);
1342 remove_def_cb(nir_dest
*dest
, void *state
)
1344 nir_instr
*instr
= state
;
1349 nir_register
*reg
= dest
->reg
.reg
;
1351 struct set_entry
*entry
= _mesa_set_search(reg
->defs
, instr
);
1353 _mesa_set_remove(reg
->defs
, entry
);
1359 remove_defs_uses(nir_instr
*instr
)
1361 nir_foreach_dest(instr
, remove_def_cb
, instr
);
1362 nir_foreach_src(instr
, remove_use_cb
, instr
);
1365 void nir_instr_remove(nir_instr
*instr
)
1367 remove_defs_uses(instr
);
1368 exec_node_remove(&instr
->node
);
1370 if (instr
->type
== nir_instr_type_jump
) {
1371 nir_jump_instr
*jump_instr
= nir_instr_as_jump(instr
);
1372 handle_remove_jump(instr
->block
, jump_instr
->type
);
1379 nir_index_local_regs(nir_function_impl
*impl
)
1382 foreach_list_typed(nir_register
, reg
, node
, &impl
->registers
) {
1383 reg
->index
= index
++;
1385 impl
->reg_alloc
= index
;
1389 nir_index_global_regs(nir_shader
*shader
)
1392 foreach_list_typed(nir_register
, reg
, node
, &shader
->registers
) {
1393 reg
->index
= index
++;
1395 shader
->reg_alloc
= index
;
1399 visit_alu_dest(nir_alu_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1401 return cb(&instr
->dest
.dest
, state
);
1405 visit_intrinsic_dest(nir_intrinsic_instr
*instr
, nir_foreach_dest_cb cb
,
1408 if (nir_intrinsic_infos
[instr
->intrinsic
].has_dest
)
1409 return cb(&instr
->dest
, state
);
1415 visit_texture_dest(nir_tex_instr
*instr
, nir_foreach_dest_cb cb
,
1418 return cb(&instr
->dest
, state
);
1422 visit_phi_dest(nir_phi_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1424 return cb(&instr
->dest
, state
);
1428 visit_parallel_copy_dest(nir_parallel_copy_instr
*instr
,
1429 nir_foreach_dest_cb cb
, void *state
)
1431 nir_foreach_parallel_copy_entry(instr
, entry
) {
1432 if (!cb(&entry
->dest
, state
))
1440 nir_foreach_dest(nir_instr
*instr
, nir_foreach_dest_cb cb
, void *state
)
1442 switch (instr
->type
) {
1443 case nir_instr_type_alu
:
1444 return visit_alu_dest(nir_instr_as_alu(instr
), cb
, state
);
1445 case nir_instr_type_intrinsic
:
1446 return visit_intrinsic_dest(nir_instr_as_intrinsic(instr
), cb
, state
);
1447 case nir_instr_type_tex
:
1448 return visit_texture_dest(nir_instr_as_tex(instr
), cb
, state
);
1449 case nir_instr_type_phi
:
1450 return visit_phi_dest(nir_instr_as_phi(instr
), cb
, state
);
1451 case nir_instr_type_parallel_copy
:
1452 return visit_parallel_copy_dest(nir_instr_as_parallel_copy(instr
),
1455 case nir_instr_type_load_const
:
1456 case nir_instr_type_ssa_undef
:
1457 case nir_instr_type_call
:
1458 case nir_instr_type_jump
:
1462 unreachable("Invalid instruction type");
1469 struct foreach_ssa_def_state
{
1470 nir_foreach_ssa_def_cb cb
;
1475 nir_ssa_def_visitor(nir_dest
*dest
, void *void_state
)
1477 struct foreach_ssa_def_state
*state
= void_state
;
1480 return state
->cb(&dest
->ssa
, state
->client_state
);
1486 nir_foreach_ssa_def(nir_instr
*instr
, nir_foreach_ssa_def_cb cb
, void *state
)
1488 switch (instr
->type
) {
1489 case nir_instr_type_alu
:
1490 case nir_instr_type_tex
:
1491 case nir_instr_type_intrinsic
:
1492 case nir_instr_type_phi
:
1493 case nir_instr_type_parallel_copy
: {
1494 struct foreach_ssa_def_state foreach_state
= {cb
, state
};
1495 return nir_foreach_dest(instr
, nir_ssa_def_visitor
, &foreach_state
);
1498 case nir_instr_type_load_const
:
1499 return cb(&nir_instr_as_load_const(instr
)->def
, state
);
1500 case nir_instr_type_ssa_undef
:
1501 return cb(&nir_instr_as_ssa_undef(instr
)->def
, state
);
1502 case nir_instr_type_call
:
1503 case nir_instr_type_jump
:
1506 unreachable("Invalid instruction type");
1511 visit_src(nir_src
*src
, nir_foreach_src_cb cb
, void *state
)
1513 if (!cb(src
, state
))
1515 if (!src
->is_ssa
&& src
->reg
.indirect
)
1516 return cb(src
->reg
.indirect
, state
);
1521 visit_deref_array_src(nir_deref_array
*deref
, nir_foreach_src_cb cb
,
1524 if (deref
->deref_array_type
== nir_deref_array_type_indirect
)
1525 return visit_src(&deref
->indirect
, cb
, state
);
1530 visit_deref_src(nir_deref_var
*deref
, nir_foreach_src_cb cb
, void *state
)
1532 nir_deref
*cur
= &deref
->deref
;
1533 while (cur
!= NULL
) {
1534 if (cur
->deref_type
== nir_deref_type_array
)
1535 if (!visit_deref_array_src(nir_deref_as_array(cur
), cb
, state
))
1545 visit_alu_src(nir_alu_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1547 for (unsigned i
= 0; i
< nir_op_infos
[instr
->op
].num_inputs
; i
++)
1548 if (!visit_src(&instr
->src
[i
].src
, cb
, state
))
1555 visit_tex_src(nir_tex_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1557 for (unsigned i
= 0; i
< instr
->num_srcs
; i
++)
1558 if (!visit_src(&instr
->src
[i
].src
, cb
, state
))
1561 if (instr
->sampler
!= NULL
)
1562 if (!visit_deref_src(instr
->sampler
, cb
, state
))
1569 visit_intrinsic_src(nir_intrinsic_instr
*instr
, nir_foreach_src_cb cb
,
1572 unsigned num_srcs
= nir_intrinsic_infos
[instr
->intrinsic
].num_srcs
;
1573 for (unsigned i
= 0; i
< num_srcs
; i
++)
1574 if (!visit_src(&instr
->src
[i
], cb
, state
))
1578 nir_intrinsic_infos
[instr
->intrinsic
].num_variables
;
1579 for (unsigned i
= 0; i
< num_vars
; i
++)
1580 if (!visit_deref_src(instr
->variables
[i
], cb
, state
))
1587 visit_call_src(nir_call_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1593 visit_load_const_src(nir_load_const_instr
*instr
, nir_foreach_src_cb cb
,
1600 visit_phi_src(nir_phi_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1602 nir_foreach_phi_src(instr
, src
) {
1603 if (!visit_src(&src
->src
, cb
, state
))
1611 visit_parallel_copy_src(nir_parallel_copy_instr
*instr
,
1612 nir_foreach_src_cb cb
, void *state
)
1614 nir_foreach_parallel_copy_entry(instr
, entry
) {
1615 if (!visit_src(&entry
->src
, cb
, state
))
1624 nir_foreach_src_cb cb
;
1625 } visit_dest_indirect_state
;
1628 visit_dest_indirect(nir_dest
*dest
, void *_state
)
1630 visit_dest_indirect_state
*state
= (visit_dest_indirect_state
*) _state
;
1632 if (!dest
->is_ssa
&& dest
->reg
.indirect
)
1633 return state
->cb(dest
->reg
.indirect
, state
->state
);
1639 nir_foreach_src(nir_instr
*instr
, nir_foreach_src_cb cb
, void *state
)
1641 switch (instr
->type
) {
1642 case nir_instr_type_alu
:
1643 if (!visit_alu_src(nir_instr_as_alu(instr
), cb
, state
))
1646 case nir_instr_type_intrinsic
:
1647 if (!visit_intrinsic_src(nir_instr_as_intrinsic(instr
), cb
, state
))
1650 case nir_instr_type_tex
:
1651 if (!visit_tex_src(nir_instr_as_tex(instr
), cb
, state
))
1654 case nir_instr_type_call
:
1655 if (!visit_call_src(nir_instr_as_call(instr
), cb
, state
))
1658 case nir_instr_type_load_const
:
1659 if (!visit_load_const_src(nir_instr_as_load_const(instr
), cb
, state
))
1662 case nir_instr_type_phi
:
1663 if (!visit_phi_src(nir_instr_as_phi(instr
), cb
, state
))
1666 case nir_instr_type_parallel_copy
:
1667 if (!visit_parallel_copy_src(nir_instr_as_parallel_copy(instr
),
1671 case nir_instr_type_jump
:
1672 case nir_instr_type_ssa_undef
:
1676 unreachable("Invalid instruction type");
1680 visit_dest_indirect_state dest_state
;
1681 dest_state
.state
= state
;
1683 return nir_foreach_dest(instr
, visit_dest_indirect
, &dest_state
);
1687 nir_src_as_const_value(nir_src src
)
1692 if (src
.ssa
->parent_instr
->type
!= nir_instr_type_load_const
)
1695 nir_load_const_instr
*load
= nir_instr_as_load_const(src
.ssa
->parent_instr
);
1697 return &load
->value
;
1701 nir_srcs_equal(nir_src src1
, nir_src src2
)
1705 return src1
.ssa
== src2
.ssa
;
1713 if ((src1
.reg
.indirect
== NULL
) != (src2
.reg
.indirect
== NULL
))
1716 if (src1
.reg
.indirect
) {
1717 if (!nir_srcs_equal(*src1
.reg
.indirect
, *src2
.reg
.indirect
))
1721 return src1
.reg
.reg
== src2
.reg
.reg
&&
1722 src1
.reg
.base_offset
== src2
.reg
.base_offset
;
1728 src_does_not_use_def(nir_src
*src
, void *void_def
)
1730 nir_ssa_def
*def
= void_def
;
1733 return src
->ssa
!= def
;
1740 src_does_not_use_reg(nir_src
*src
, void *void_reg
)
1742 nir_register
*reg
= void_reg
;
1747 return src
->reg
.reg
!= reg
;
1752 nir_instr_rewrite_src(nir_instr
*instr
, nir_src
*src
, nir_src new_src
)
1755 nir_ssa_def
*old_ssa
= src
->ssa
;
1757 if (old_ssa
&& nir_foreach_src(instr
, src_does_not_use_def
, old_ssa
)) {
1758 struct set_entry
*entry
= _mesa_set_search(old_ssa
->uses
, instr
);
1760 _mesa_set_remove(old_ssa
->uses
, entry
);
1763 if (src
->reg
.indirect
)
1764 nir_instr_rewrite_src(instr
, src
->reg
.indirect
, new_src
);
1766 nir_register
*old_reg
= src
->reg
.reg
;
1768 if (old_reg
&& nir_foreach_src(instr
, src_does_not_use_reg
, old_reg
)) {
1769 struct set_entry
*entry
= _mesa_set_search(old_reg
->uses
, instr
);
1771 _mesa_set_remove(old_reg
->uses
, entry
);
1775 if (new_src
.is_ssa
) {
1777 _mesa_set_add(new_src
.ssa
->uses
, instr
);
1779 if (new_src
.reg
.reg
)
1780 _mesa_set_add(new_src
.reg
.reg
->uses
, instr
);
1785 nir_ssa_def_init(nir_instr
*instr
, nir_ssa_def
*def
,
1786 unsigned num_components
, const char *name
)
1788 void *mem_ctx
= ralloc_parent(instr
);
1791 def
->parent_instr
= instr
;
1792 def
->uses
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
1793 _mesa_key_pointer_equal
);
1794 def
->if_uses
= _mesa_set_create(mem_ctx
, _mesa_hash_pointer
,
1795 _mesa_key_pointer_equal
);
1796 def
->num_components
= num_components
;
1799 nir_function_impl
*impl
=
1800 nir_cf_node_get_function(&instr
->block
->cf_node
);
1802 def
->index
= impl
->ssa_alloc
++;
1804 def
->index
= UINT_MAX
;
1809 nir_ssa_dest_init(nir_instr
*instr
, nir_dest
*dest
,
1810 unsigned num_components
, const char *name
)
1812 dest
->is_ssa
= true;
1813 nir_ssa_def_init(instr
, &dest
->ssa
, num_components
, name
);
1816 struct ssa_def_rewrite_state
{
1823 ssa_def_rewrite_uses_src(nir_src
*src
, void *void_state
)
1825 struct ssa_def_rewrite_state
*state
= void_state
;
1827 if (src
->is_ssa
&& src
->ssa
== state
->old
)
1828 nir_src_copy(src
, &state
->new_src
, state
->mem_ctx
);
1834 nir_ssa_def_rewrite_uses(nir_ssa_def
*def
, nir_src new_src
, void *mem_ctx
)
1836 struct ssa_def_rewrite_state state
;
1837 state
.mem_ctx
= mem_ctx
;
1839 state
.new_src
= new_src
;
1841 assert(!new_src
.is_ssa
|| def
!= new_src
.ssa
);
1843 struct set
*new_uses
, *new_if_uses
;
1844 if (new_src
.is_ssa
) {
1845 new_uses
= new_src
.ssa
->uses
;
1846 new_if_uses
= new_src
.ssa
->if_uses
;
1848 new_uses
= new_src
.reg
.reg
->uses
;
1849 new_if_uses
= new_src
.reg
.reg
->if_uses
;
1852 struct set_entry
*entry
;
1853 set_foreach(def
->uses
, entry
) {
1854 nir_instr
*instr
= (nir_instr
*)entry
->key
;
1856 _mesa_set_remove(def
->uses
, entry
);
1857 nir_foreach_src(instr
, ssa_def_rewrite_uses_src
, &state
);
1858 _mesa_set_add(new_uses
, instr
);
1861 set_foreach(def
->if_uses
, entry
) {
1862 nir_if
*if_use
= (nir_if
*)entry
->key
;
1864 _mesa_set_remove(def
->if_uses
, entry
);
1865 nir_src_copy(&if_use
->condition
, &new_src
, mem_ctx
);
1866 _mesa_set_add(new_if_uses
, if_use
);
1871 static bool foreach_cf_node(nir_cf_node
*node
, nir_foreach_block_cb cb
,
1872 bool reverse
, void *state
);
1875 foreach_if(nir_if
*if_stmt
, nir_foreach_block_cb cb
, bool reverse
, void *state
)
1878 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
,
1879 &if_stmt
->else_list
) {
1880 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1884 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
,
1885 &if_stmt
->then_list
) {
1886 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1890 foreach_list_typed_safe(nir_cf_node
, node
, node
, &if_stmt
->then_list
) {
1891 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1895 foreach_list_typed_safe(nir_cf_node
, node
, node
, &if_stmt
->else_list
) {
1896 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1905 foreach_loop(nir_loop
*loop
, nir_foreach_block_cb cb
, bool reverse
, void *state
)
1908 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
, &loop
->body
) {
1909 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1913 foreach_list_typed_safe(nir_cf_node
, node
, node
, &loop
->body
) {
1914 if (!foreach_cf_node(node
, cb
, reverse
, state
))
1923 foreach_cf_node(nir_cf_node
*node
, nir_foreach_block_cb cb
,
1924 bool reverse
, void *state
)
1926 switch (node
->type
) {
1927 case nir_cf_node_block
:
1928 return cb(nir_cf_node_as_block(node
), state
);
1929 case nir_cf_node_if
:
1930 return foreach_if(nir_cf_node_as_if(node
), cb
, reverse
, state
);
1931 case nir_cf_node_loop
:
1932 return foreach_loop(nir_cf_node_as_loop(node
), cb
, reverse
, state
);
1936 unreachable("Invalid CFG node type");
1944 nir_foreach_block(nir_function_impl
*impl
, nir_foreach_block_cb cb
, void *state
)
1946 foreach_list_typed_safe(nir_cf_node
, node
, node
, &impl
->body
) {
1947 if (!foreach_cf_node(node
, cb
, false, state
))
1951 return cb(impl
->end_block
, state
);
1955 nir_foreach_block_reverse(nir_function_impl
*impl
, nir_foreach_block_cb cb
,
1958 if (!cb(impl
->end_block
, state
))
1961 foreach_list_typed_safe_reverse(nir_cf_node
, node
, node
, &impl
->body
) {
1962 if (!foreach_cf_node(node
, cb
, true, state
))
1970 nir_block_get_following_if(nir_block
*block
)
1972 if (exec_node_is_tail_sentinel(&block
->cf_node
.node
))
1975 if (nir_cf_node_is_last(&block
->cf_node
))
1978 nir_cf_node
*next_node
= nir_cf_node_next(&block
->cf_node
);
1980 if (next_node
->type
!= nir_cf_node_if
)
1983 return nir_cf_node_as_if(next_node
);
1987 index_block(nir_block
*block
, void *state
)
1989 unsigned *index
= state
;
1990 block
->index
= (*index
)++;
1995 nir_index_blocks(nir_function_impl
*impl
)
1999 if (impl
->valid_metadata
& nir_metadata_block_index
)
2002 nir_foreach_block(impl
, index_block
, &index
);
2004 impl
->num_blocks
= index
;
2008 index_ssa_def(nir_ssa_def
*def
, unsigned *index
)
2010 def
->index
= (*index
)++;
2014 index_ssa_def_cb(nir_dest
*dest
, void *state
)
2016 unsigned *index
= state
;
2018 index_ssa_def(&dest
->ssa
, index
);
2023 index_ssa_undef(nir_ssa_undef_instr
*instr
, unsigned *index
)
2025 index_ssa_def(&instr
->def
, index
);
2029 index_ssa_block(nir_block
*block
, void *state
)
2031 unsigned *index
= state
;
2033 nir_foreach_instr(block
, instr
) {
2034 if (instr
->type
== nir_instr_type_ssa_undef
)
2035 index_ssa_undef(nir_instr_as_ssa_undef(instr
), index
);
2037 nir_foreach_dest(instr
, index_ssa_def_cb
, state
);
2044 nir_index_ssa_defs(nir_function_impl
*impl
)
2047 nir_foreach_block(impl
, index_ssa_block
, &index
);
2048 impl
->ssa_alloc
= index
;