nir: Make some helpers for copying ALU src/dests.
[mesa.git] / src / glsl / nir / nir.c
1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Connor Abbott (cwabbott0@gmail.com)
25 *
26 */
27
28 #include "nir.h"
29 #include <assert.h>
30
31 nir_shader *
32 nir_shader_create(void *mem_ctx)
33 {
34 nir_shader *shader = ralloc(mem_ctx, nir_shader);
35
36 shader->uniforms = _mesa_hash_table_create(shader, _mesa_key_hash_string,
37 _mesa_key_string_equal);
38 shader->inputs = _mesa_hash_table_create(shader, _mesa_key_hash_string,
39 _mesa_key_string_equal);
40 shader->outputs = _mesa_hash_table_create(shader, _mesa_key_hash_string,
41 _mesa_key_string_equal);
42
43 shader->num_user_structures = 0;
44 shader->user_structures = NULL;
45
46 exec_list_make_empty(&shader->functions);
47 exec_list_make_empty(&shader->registers);
48 exec_list_make_empty(&shader->globals);
49 exec_list_make_empty(&shader->system_values);
50 shader->reg_alloc = 0;
51
52 shader->num_inputs = 0;
53 shader->num_outputs = 0;
54 shader->num_uniforms = 0;
55
56 return shader;
57 }
58
59 static nir_register *
60 reg_create(void *mem_ctx, struct exec_list *list)
61 {
62 nir_register *reg = ralloc(mem_ctx, nir_register);
63
64 reg->uses = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
65 _mesa_key_pointer_equal);
66 reg->defs = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
67 _mesa_key_pointer_equal);
68 reg->if_uses = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
69 _mesa_key_pointer_equal);
70
71 reg->num_components = 0;
72 reg->num_array_elems = 0;
73 reg->is_packed = false;
74 reg->name = NULL;
75
76 exec_list_push_tail(list, &reg->node);
77
78 return reg;
79 }
80
81 nir_register *
82 nir_global_reg_create(nir_shader *shader)
83 {
84 nir_register *reg = reg_create(shader, &shader->registers);
85 reg->index = shader->reg_alloc++;
86 reg->is_global = true;
87
88 return reg;
89 }
90
91 nir_register *
92 nir_local_reg_create(nir_function_impl *impl)
93 {
94 nir_register *reg = reg_create(ralloc_parent(impl), &impl->registers);
95 reg->index = impl->reg_alloc++;
96 reg->is_global = false;
97
98 return reg;
99 }
100
101 void
102 nir_reg_remove(nir_register *reg)
103 {
104 exec_node_remove(&reg->node);
105 }
106
107 nir_function *
108 nir_function_create(nir_shader *shader, const char *name)
109 {
110 nir_function *func = ralloc(shader, nir_function);
111
112 exec_list_push_tail(&shader->functions, &func->node);
113 exec_list_make_empty(&func->overload_list);
114 func->name = name;
115 func->shader = shader;
116
117 return func;
118 }
119
120 nir_function_overload *
121 nir_function_overload_create(nir_function *func)
122 {
123 void *mem_ctx = ralloc_parent(func);
124
125 nir_function_overload *overload = ralloc(mem_ctx, nir_function_overload);
126
127 overload->num_params = 0;
128 overload->params = NULL;
129 overload->return_type = glsl_void_type();
130 overload->impl = NULL;
131
132 exec_list_push_tail(&func->overload_list, &overload->node);
133 overload->function = func;
134
135 return overload;
136 }
137
138 nir_src nir_src_copy(nir_src src, void *mem_ctx)
139 {
140 nir_src ret;
141 ret.is_ssa = src.is_ssa;
142 if (ret.is_ssa) {
143 ret.ssa = src.ssa;
144 } else {
145 ret.reg.base_offset = src.reg.base_offset;
146 ret.reg.reg = src.reg.reg;
147 if (src.reg.indirect) {
148 ret.reg.indirect = ralloc(mem_ctx, nir_src);
149 *ret.reg.indirect = *src.reg.indirect;
150 } else {
151 ret.reg.indirect = NULL;
152 }
153 }
154
155 return ret;
156 }
157
158 nir_dest nir_dest_copy(nir_dest dest, void *mem_ctx)
159 {
160 nir_dest ret;
161 ret.is_ssa = dest.is_ssa;
162 if (ret.is_ssa) {
163 ret.ssa = dest.ssa;
164 } else {
165 ret.reg.base_offset = dest.reg.base_offset;
166 ret.reg.reg = dest.reg.reg;
167 if (dest.reg.indirect) {
168 ret.reg.indirect = ralloc(mem_ctx, nir_src);
169 *ret.reg.indirect = *dest.reg.indirect;
170 } else {
171 ret.reg.indirect = NULL;
172 }
173 }
174
175 return ret;
176 }
177
178 void
179 nir_alu_src_copy(nir_alu_src *dest, const nir_alu_src *src, void *mem_ctx)
180 {
181 dest->src = nir_src_copy(src->src, mem_ctx);
182 dest->abs = src->abs;
183 dest->negate = src->negate;
184 for (unsigned i = 0; i < 4; i++)
185 dest->swizzle[i] = src->swizzle[i];
186 }
187
188 void
189 nir_alu_dest_copy(nir_alu_dest *dest, const nir_alu_dest *src, void *mem_ctx)
190 {
191 dest->dest = nir_dest_copy(src->dest, mem_ctx);
192 dest->write_mask = src->write_mask;
193 dest->saturate = src->saturate;
194 }
195
196 static inline void
197 block_add_pred(nir_block *block, nir_block *pred)
198 {
199 _mesa_set_add(block->predecessors, pred);
200 }
201
202 static void
203 cf_init(nir_cf_node *node, nir_cf_node_type type)
204 {
205 exec_node_init(&node->node);
206 node->parent = NULL;
207 node->type = type;
208 }
209
210 static void
211 link_blocks(nir_block *pred, nir_block *succ1, nir_block *succ2)
212 {
213 pred->successors[0] = succ1;
214 block_add_pred(succ1, pred);
215
216 pred->successors[1] = succ2;
217 if (succ2 != NULL)
218 block_add_pred(succ2, pred);
219 }
220
221 static void
222 unlink_blocks(nir_block *pred, nir_block *succ)
223 {
224 if (pred->successors[0] == succ) {
225 pred->successors[0] = pred->successors[1];
226 pred->successors[1] = NULL;
227 } else {
228 assert(pred->successors[1] == succ);
229 pred->successors[1] = NULL;
230 }
231
232 struct set_entry *entry = _mesa_set_search(succ->predecessors, pred);
233
234 assert(entry);
235
236 _mesa_set_remove(succ->predecessors, entry);
237 }
238
239 static void
240 unlink_block_successors(nir_block *block)
241 {
242 if (block->successors[0] != NULL)
243 unlink_blocks(block, block->successors[0]);
244 if (block->successors[1] != NULL)
245 unlink_blocks(block, block->successors[1]);
246 }
247
248
249 nir_function_impl *
250 nir_function_impl_create(nir_function_overload *overload)
251 {
252 assert(overload->impl == NULL);
253
254 void *mem_ctx = ralloc_parent(overload);
255
256 nir_function_impl *impl = ralloc(mem_ctx, nir_function_impl);
257
258 overload->impl = impl;
259 impl->overload = overload;
260
261 cf_init(&impl->cf_node, nir_cf_node_function);
262
263 exec_list_make_empty(&impl->body);
264 exec_list_make_empty(&impl->registers);
265 exec_list_make_empty(&impl->locals);
266 impl->num_params = 0;
267 impl->params = NULL;
268 impl->return_var = NULL;
269 impl->reg_alloc = 0;
270 impl->ssa_alloc = 0;
271 impl->valid_metadata = nir_metadata_none;
272
273 /* create start & end blocks */
274 nir_block *start_block = nir_block_create(mem_ctx);
275 nir_block *end_block = nir_block_create(mem_ctx);
276 start_block->cf_node.parent = &impl->cf_node;
277 end_block->cf_node.parent = &impl->cf_node;
278 impl->start_block = start_block;
279 impl->end_block = end_block;
280
281 exec_list_push_tail(&impl->body, &start_block->cf_node.node);
282
283 start_block->successors[0] = end_block;
284 block_add_pred(end_block, start_block);
285
286 return impl;
287 }
288
289 nir_block *
290 nir_block_create(void *mem_ctx)
291 {
292 nir_block *block = ralloc(mem_ctx, nir_block);
293
294 cf_init(&block->cf_node, nir_cf_node_block);
295
296 block->successors[0] = block->successors[1] = NULL;
297 block->predecessors = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
298 _mesa_key_pointer_equal);
299 block->imm_dom = NULL;
300 block->dom_frontier = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
301 _mesa_key_pointer_equal);
302
303 exec_list_make_empty(&block->instr_list);
304
305 return block;
306 }
307
308 static inline void
309 src_init(nir_src *src)
310 {
311 src->is_ssa = false;
312 src->reg.reg = NULL;
313 src->reg.indirect = NULL;
314 src->reg.base_offset = 0;
315 }
316
317 nir_if *
318 nir_if_create(void *mem_ctx)
319 {
320 nir_if *if_stmt = ralloc(mem_ctx, nir_if);
321
322 cf_init(&if_stmt->cf_node, nir_cf_node_if);
323 src_init(&if_stmt->condition);
324
325 nir_block *then = nir_block_create(mem_ctx);
326 exec_list_make_empty(&if_stmt->then_list);
327 exec_list_push_tail(&if_stmt->then_list, &then->cf_node.node);
328 then->cf_node.parent = &if_stmt->cf_node;
329
330 nir_block *else_stmt = nir_block_create(mem_ctx);
331 exec_list_make_empty(&if_stmt->else_list);
332 exec_list_push_tail(&if_stmt->else_list, &else_stmt->cf_node.node);
333 else_stmt->cf_node.parent = &if_stmt->cf_node;
334
335 return if_stmt;
336 }
337
338 nir_loop *
339 nir_loop_create(void *mem_ctx)
340 {
341 nir_loop *loop = ralloc(mem_ctx, nir_loop);
342
343 cf_init(&loop->cf_node, nir_cf_node_loop);
344
345 nir_block *body = nir_block_create(mem_ctx);
346 exec_list_make_empty(&loop->body);
347 exec_list_push_tail(&loop->body, &body->cf_node.node);
348 body->cf_node.parent = &loop->cf_node;
349
350 body->successors[0] = body;
351 block_add_pred(body, body);
352
353 return loop;
354 }
355
356 static void
357 instr_init(nir_instr *instr, nir_instr_type type)
358 {
359 instr->type = type;
360 instr->block = NULL;
361 exec_node_init(&instr->node);
362 }
363
364 static void
365 dest_init(nir_dest *dest)
366 {
367 dest->is_ssa = false;
368 dest->reg.reg = NULL;
369 dest->reg.indirect = NULL;
370 dest->reg.base_offset = 0;
371 }
372
373 static void
374 alu_dest_init(nir_alu_dest *dest)
375 {
376 dest_init(&dest->dest);
377 dest->saturate = false;
378 dest->write_mask = 0xf;
379 }
380
381 static void
382 alu_src_init(nir_alu_src *src)
383 {
384 src_init(&src->src);
385 src->abs = src->negate = false;
386 src->swizzle[0] = 0;
387 src->swizzle[1] = 1;
388 src->swizzle[2] = 2;
389 src->swizzle[3] = 3;
390 }
391
392 nir_alu_instr *
393 nir_alu_instr_create(void *mem_ctx, nir_op op)
394 {
395 unsigned num_srcs = nir_op_infos[op].num_inputs;
396 nir_alu_instr *instr =
397 ralloc_size(mem_ctx,
398 sizeof(nir_alu_instr) + num_srcs * sizeof(nir_alu_src));
399
400 instr_init(&instr->instr, nir_instr_type_alu);
401 instr->op = op;
402 alu_dest_init(&instr->dest);
403 for (unsigned i = 0; i < num_srcs; i++)
404 alu_src_init(&instr->src[i]);
405
406 return instr;
407 }
408
409 nir_jump_instr *
410 nir_jump_instr_create(void *mem_ctx, nir_jump_type type)
411 {
412 nir_jump_instr *instr = ralloc(mem_ctx, nir_jump_instr);
413 instr_init(&instr->instr, nir_instr_type_jump);
414 instr->type = type;
415 return instr;
416 }
417
418 nir_load_const_instr *
419 nir_load_const_instr_create(void *mem_ctx, unsigned num_components)
420 {
421 nir_load_const_instr *instr = ralloc(mem_ctx, nir_load_const_instr);
422 instr_init(&instr->instr, nir_instr_type_load_const);
423
424 nir_ssa_def_init(&instr->instr, &instr->def, num_components, NULL);
425
426 return instr;
427 }
428
429 nir_intrinsic_instr *
430 nir_intrinsic_instr_create(void *mem_ctx, nir_intrinsic_op op)
431 {
432 unsigned num_srcs = nir_intrinsic_infos[op].num_srcs;
433 nir_intrinsic_instr *instr =
434 ralloc_size(mem_ctx,
435 sizeof(nir_intrinsic_instr) + num_srcs * sizeof(nir_src));
436
437 instr_init(&instr->instr, nir_instr_type_intrinsic);
438 instr->intrinsic = op;
439
440 if (nir_intrinsic_infos[op].has_dest)
441 dest_init(&instr->dest);
442
443 for (unsigned i = 0; i < num_srcs; i++)
444 src_init(&instr->src[i]);
445
446 return instr;
447 }
448
449 nir_call_instr *
450 nir_call_instr_create(void *mem_ctx, nir_function_overload *callee)
451 {
452 nir_call_instr *instr = ralloc(mem_ctx, nir_call_instr);
453 instr_init(&instr->instr, nir_instr_type_call);
454
455 instr->callee = callee;
456 instr->num_params = callee->num_params;
457 instr->params = ralloc_array(mem_ctx, nir_deref_var *, instr->num_params);
458 instr->return_deref = NULL;
459
460 return instr;
461 }
462
463 nir_tex_instr *
464 nir_tex_instr_create(void *mem_ctx, unsigned num_srcs)
465 {
466 nir_tex_instr *instr = ralloc(mem_ctx, nir_tex_instr);
467 instr_init(&instr->instr, nir_instr_type_tex);
468
469 dest_init(&instr->dest);
470
471 instr->num_srcs = num_srcs;
472 instr->src = ralloc_array(mem_ctx, nir_tex_src, num_srcs);
473 for (unsigned i = 0; i < num_srcs; i++)
474 src_init(&instr->src[i].src);
475
476 instr->sampler_index = 0;
477 instr->sampler_array_size = 0;
478 instr->sampler = NULL;
479
480 return instr;
481 }
482
483 nir_phi_instr *
484 nir_phi_instr_create(void *mem_ctx)
485 {
486 nir_phi_instr *instr = ralloc(mem_ctx, nir_phi_instr);
487 instr_init(&instr->instr, nir_instr_type_phi);
488
489 dest_init(&instr->dest);
490 exec_list_make_empty(&instr->srcs);
491 return instr;
492 }
493
494 nir_parallel_copy_instr *
495 nir_parallel_copy_instr_create(void *mem_ctx)
496 {
497 nir_parallel_copy_instr *instr = ralloc(mem_ctx, nir_parallel_copy_instr);
498 instr_init(&instr->instr, nir_instr_type_parallel_copy);
499
500 exec_list_make_empty(&instr->entries);
501
502 return instr;
503 }
504
505 nir_ssa_undef_instr *
506 nir_ssa_undef_instr_create(void *mem_ctx, unsigned num_components)
507 {
508 nir_ssa_undef_instr *instr = ralloc(mem_ctx, nir_ssa_undef_instr);
509 instr_init(&instr->instr, nir_instr_type_ssa_undef);
510
511 nir_ssa_def_init(&instr->instr, &instr->def, num_components, NULL);
512
513 return instr;
514 }
515
516 nir_deref_var *
517 nir_deref_var_create(void *mem_ctx, nir_variable *var)
518 {
519 nir_deref_var *deref = ralloc(mem_ctx, nir_deref_var);
520 deref->deref.deref_type = nir_deref_type_var;
521 deref->deref.child = NULL;
522 deref->deref.type = var->type;
523 deref->var = var;
524 return deref;
525 }
526
527 nir_deref_array *
528 nir_deref_array_create(void *mem_ctx)
529 {
530 nir_deref_array *deref = ralloc(mem_ctx, nir_deref_array);
531 deref->deref.deref_type = nir_deref_type_array;
532 deref->deref.child = NULL;
533 deref->deref_array_type = nir_deref_array_type_direct;
534 src_init(&deref->indirect);
535 deref->base_offset = 0;
536 return deref;
537 }
538
539 nir_deref_struct *
540 nir_deref_struct_create(void *mem_ctx, unsigned field_index)
541 {
542 nir_deref_struct *deref = ralloc(mem_ctx, nir_deref_struct);
543 deref->deref.deref_type = nir_deref_type_struct;
544 deref->deref.child = NULL;
545 deref->index = field_index;
546 return deref;
547 }
548
549 static nir_deref_var *
550 copy_deref_var(void *mem_ctx, nir_deref_var *deref)
551 {
552 nir_deref_var *ret = nir_deref_var_create(mem_ctx, deref->var);
553 ret->deref.type = deref->deref.type;
554 if (deref->deref.child)
555 ret->deref.child = nir_copy_deref(mem_ctx, deref->deref.child);
556 return ret;
557 }
558
559 static nir_deref_array *
560 copy_deref_array(void *mem_ctx, nir_deref_array *deref)
561 {
562 nir_deref_array *ret = nir_deref_array_create(mem_ctx);
563 ret->base_offset = deref->base_offset;
564 ret->deref_array_type = deref->deref_array_type;
565 if (deref->deref_array_type == nir_deref_array_type_indirect) {
566 ret->indirect = nir_src_copy(deref->indirect, mem_ctx);
567 }
568 ret->deref.type = deref->deref.type;
569 if (deref->deref.child)
570 ret->deref.child = nir_copy_deref(mem_ctx, deref->deref.child);
571 return ret;
572 }
573
574 static nir_deref_struct *
575 copy_deref_struct(void *mem_ctx, nir_deref_struct *deref)
576 {
577 nir_deref_struct *ret = nir_deref_struct_create(mem_ctx, deref->index);
578 ret->deref.type = deref->deref.type;
579 if (deref->deref.child)
580 ret->deref.child = nir_copy_deref(mem_ctx, deref->deref.child);
581 return ret;
582 }
583
584 nir_deref *
585 nir_copy_deref(void *mem_ctx, nir_deref *deref)
586 {
587 switch (deref->deref_type) {
588 case nir_deref_type_var:
589 return &copy_deref_var(mem_ctx, nir_deref_as_var(deref))->deref;
590 case nir_deref_type_array:
591 return &copy_deref_array(mem_ctx, nir_deref_as_array(deref))->deref;
592 case nir_deref_type_struct:
593 return &copy_deref_struct(mem_ctx, nir_deref_as_struct(deref))->deref;
594 default:
595 unreachable("Invalid dereference type");
596 }
597
598 return NULL;
599 }
600
601
602 /**
603 * \name Control flow modification
604 *
605 * These functions modify the control flow tree while keeping the control flow
606 * graph up-to-date. The invariants respected are:
607 * 1. Each then statement, else statement, or loop body must have at least one
608 * control flow node.
609 * 2. Each if-statement and loop must have one basic block before it and one
610 * after.
611 * 3. Two basic blocks cannot be directly next to each other.
612 * 4. If a basic block has a jump instruction, there must be only one and it
613 * must be at the end of the block.
614 * 5. The CFG must always be connected - this means that we must insert a fake
615 * CFG edge for loops with no break statement.
616 *
617 * The purpose of the second one is so that we have places to insert code during
618 * GCM, as well as eliminating the possibility of critical edges.
619 */
620 /*@{*/
621
622 static void
623 link_non_block_to_block(nir_cf_node *node, nir_block *block)
624 {
625 if (node->type == nir_cf_node_if) {
626 /*
627 * We're trying to link an if to a block after it; this just means linking
628 * the last block of the then and else branches.
629 */
630
631 nir_if *if_stmt = nir_cf_node_as_if(node);
632
633 nir_cf_node *last_then = nir_if_last_then_node(if_stmt);
634 assert(last_then->type == nir_cf_node_block);
635 nir_block *last_then_block = nir_cf_node_as_block(last_then);
636
637 nir_cf_node *last_else = nir_if_last_else_node(if_stmt);
638 assert(last_else->type == nir_cf_node_block);
639 nir_block *last_else_block = nir_cf_node_as_block(last_else);
640
641 if (exec_list_is_empty(&last_then_block->instr_list) ||
642 nir_block_last_instr(last_then_block)->type != nir_instr_type_jump) {
643 unlink_block_successors(last_then_block);
644 link_blocks(last_then_block, block, NULL);
645 }
646
647 if (exec_list_is_empty(&last_else_block->instr_list) ||
648 nir_block_last_instr(last_else_block)->type != nir_instr_type_jump) {
649 unlink_block_successors(last_else_block);
650 link_blocks(last_else_block, block, NULL);
651 }
652 } else {
653 assert(node->type == nir_cf_node_loop);
654
655 /*
656 * We can only get to this codepath if we're inserting a new loop, or
657 * at least a loop with no break statements; we can't insert break
658 * statements into a loop when we haven't inserted it into the CFG
659 * because we wouldn't know which block comes after the loop
660 * and therefore, which block should be the successor of the block with
661 * the break). Therefore, we need to insert a fake edge (see invariant
662 * #5).
663 */
664
665 nir_loop *loop = nir_cf_node_as_loop(node);
666
667 nir_cf_node *last = nir_loop_last_cf_node(loop);
668 assert(last->type == nir_cf_node_block);
669 nir_block *last_block = nir_cf_node_as_block(last);
670
671 last_block->successors[1] = block;
672 block_add_pred(block, last_block);
673 }
674 }
675
676 static void
677 link_block_to_non_block(nir_block *block, nir_cf_node *node)
678 {
679 if (node->type == nir_cf_node_if) {
680 /*
681 * We're trying to link a block to an if after it; this just means linking
682 * the block to the first block of the then and else branches.
683 */
684
685 nir_if *if_stmt = nir_cf_node_as_if(node);
686
687 nir_cf_node *first_then = nir_if_first_then_node(if_stmt);
688 assert(first_then->type == nir_cf_node_block);
689 nir_block *first_then_block = nir_cf_node_as_block(first_then);
690
691 nir_cf_node *first_else = nir_if_first_else_node(if_stmt);
692 assert(first_else->type == nir_cf_node_block);
693 nir_block *first_else_block = nir_cf_node_as_block(first_else);
694
695 unlink_block_successors(block);
696 link_blocks(block, first_then_block, first_else_block);
697 } else {
698 /*
699 * For similar reasons as the corresponding case in
700 * link_non_block_to_block(), don't worry about if the loop header has
701 * any predecessors that need to be unlinked.
702 */
703
704 assert(node->type == nir_cf_node_loop);
705
706 nir_loop *loop = nir_cf_node_as_loop(node);
707
708 nir_cf_node *loop_header = nir_loop_first_cf_node(loop);
709 assert(loop_header->type == nir_cf_node_block);
710 nir_block *loop_header_block = nir_cf_node_as_block(loop_header);
711
712 unlink_block_successors(block);
713 link_blocks(block, loop_header_block, NULL);
714 }
715
716 }
717
718 /**
719 * Takes a basic block and inserts a new empty basic block before it, making its
720 * predecessors point to the new block. This essentially splits the block into
721 * an empty header and a body so that another non-block CF node can be inserted
722 * between the two. Note that this does *not* link the two basic blocks, so
723 * some kind of cleanup *must* be performed after this call.
724 */
725
726 static nir_block *
727 split_block_beginning(nir_block *block)
728 {
729 nir_block *new_block = nir_block_create(ralloc_parent(block));
730 new_block->cf_node.parent = block->cf_node.parent;
731 exec_node_insert_node_before(&block->cf_node.node, &new_block->cf_node.node);
732
733 struct set_entry *entry;
734 set_foreach(block->predecessors, entry) {
735 nir_block *pred = (nir_block *) entry->key;
736
737 unlink_blocks(pred, block);
738 link_blocks(pred, new_block, NULL);
739 }
740
741 return new_block;
742 }
743
744 static void
745 rewrite_phi_preds(nir_block *block, nir_block *old_pred, nir_block *new_pred)
746 {
747 nir_foreach_instr_safe(block, instr) {
748 if (instr->type != nir_instr_type_phi)
749 break;
750
751 nir_phi_instr *phi = nir_instr_as_phi(instr);
752 nir_foreach_phi_src(phi, src) {
753 if (src->pred == old_pred) {
754 src->pred = new_pred;
755 break;
756 }
757 }
758 }
759 }
760
761 /**
762 * Moves the successors of source to the successors of dest, leaving both
763 * successors of source NULL.
764 */
765
766 static void
767 move_successors(nir_block *source, nir_block *dest)
768 {
769 nir_block *succ1 = source->successors[0];
770 nir_block *succ2 = source->successors[1];
771
772 if (succ1) {
773 unlink_blocks(source, succ1);
774 rewrite_phi_preds(succ1, source, dest);
775 }
776
777 if (succ2) {
778 unlink_blocks(source, succ2);
779 rewrite_phi_preds(succ2, source, dest);
780 }
781
782 unlink_block_successors(dest);
783 link_blocks(dest, succ1, succ2);
784 }
785
786 static nir_block *
787 split_block_end(nir_block *block)
788 {
789 nir_block *new_block = nir_block_create(ralloc_parent(block));
790 new_block->cf_node.parent = block->cf_node.parent;
791 exec_node_insert_after(&block->cf_node.node, &new_block->cf_node.node);
792
793 move_successors(block, new_block);
794
795 return new_block;
796 }
797
798 /**
799 * Inserts a non-basic block between two basic blocks and links them together.
800 */
801
802 static void
803 insert_non_block(nir_block *before, nir_cf_node *node, nir_block *after)
804 {
805 node->parent = before->cf_node.parent;
806 exec_node_insert_after(&before->cf_node.node, &node->node);
807 link_block_to_non_block(before, node);
808 link_non_block_to_block(node, after);
809 }
810
811 /**
812 * Inserts a non-basic block before a basic block.
813 */
814
815 static void
816 insert_non_block_before_block(nir_cf_node *node, nir_block *block)
817 {
818 /* split off the beginning of block into new_block */
819 nir_block *new_block = split_block_beginning(block);
820
821 /* insert our node in between new_block and block */
822 insert_non_block(new_block, node, block);
823 }
824
825 static void
826 insert_non_block_after_block(nir_block *block, nir_cf_node *node)
827 {
828 /* split off the end of block into new_block */
829 nir_block *new_block = split_block_end(block);
830
831 /* insert our node in between block and new_block */
832 insert_non_block(block, node, new_block);
833 }
834
835 /* walk up the control flow tree to find the innermost enclosed loop */
836 static nir_loop *
837 nearest_loop(nir_cf_node *node)
838 {
839 while (node->type != nir_cf_node_loop) {
840 node = node->parent;
841 }
842
843 return nir_cf_node_as_loop(node);
844 }
845
846 nir_function_impl *
847 nir_cf_node_get_function(nir_cf_node *node)
848 {
849 while (node->type != nir_cf_node_function) {
850 node = node->parent;
851 }
852
853 return nir_cf_node_as_function(node);
854 }
855
856 /*
857 * update the CFG after a jump instruction has been added to the end of a block
858 */
859
860 static void
861 handle_jump(nir_block *block)
862 {
863 nir_instr *instr = nir_block_last_instr(block);
864 nir_jump_instr *jump_instr = nir_instr_as_jump(instr);
865
866 unlink_block_successors(block);
867
868 nir_function_impl *impl = nir_cf_node_get_function(&block->cf_node);
869 nir_metadata_preserve(impl, nir_metadata_none);
870
871 if (jump_instr->type == nir_jump_break ||
872 jump_instr->type == nir_jump_continue) {
873 nir_loop *loop = nearest_loop(&block->cf_node);
874
875 if (jump_instr->type == nir_jump_continue) {
876 nir_cf_node *first_node = nir_loop_first_cf_node(loop);
877 assert(first_node->type == nir_cf_node_block);
878 nir_block *first_block = nir_cf_node_as_block(first_node);
879 link_blocks(block, first_block, NULL);
880 } else {
881 nir_cf_node *after = nir_cf_node_next(&loop->cf_node);
882 assert(after->type == nir_cf_node_block);
883 nir_block *after_block = nir_cf_node_as_block(after);
884 link_blocks(block, after_block, NULL);
885
886 /* If we inserted a fake link, remove it */
887 nir_cf_node *last = nir_loop_last_cf_node(loop);
888 assert(last->type == nir_cf_node_block);
889 nir_block *last_block = nir_cf_node_as_block(last);
890 if (last_block->successors[1] != NULL)
891 unlink_blocks(last_block, after_block);
892 }
893 } else {
894 assert(jump_instr->type == nir_jump_return);
895 link_blocks(block, impl->end_block, NULL);
896 }
897 }
898
899 static void
900 handle_remove_jump(nir_block *block, nir_jump_type type)
901 {
902 unlink_block_successors(block);
903
904 if (exec_node_is_tail_sentinel(block->cf_node.node.next)) {
905 nir_cf_node *parent = block->cf_node.parent;
906 if (parent->type == nir_cf_node_if) {
907 nir_cf_node *next = nir_cf_node_next(parent);
908 assert(next->type == nir_cf_node_block);
909 nir_block *next_block = nir_cf_node_as_block(next);
910
911 link_blocks(block, next_block, NULL);
912 } else {
913 assert(parent->type == nir_cf_node_loop);
914 nir_loop *loop = nir_cf_node_as_loop(parent);
915
916 nir_cf_node *head = nir_loop_first_cf_node(loop);
917 assert(head->type == nir_cf_node_block);
918 nir_block *head_block = nir_cf_node_as_block(head);
919
920 link_blocks(block, head_block, NULL);
921 }
922 } else {
923 nir_cf_node *next = nir_cf_node_next(&block->cf_node);
924 if (next->type == nir_cf_node_if) {
925 nir_if *next_if = nir_cf_node_as_if(next);
926
927 nir_cf_node *first_then = nir_if_first_then_node(next_if);
928 assert(first_then->type == nir_cf_node_block);
929 nir_block *first_then_block = nir_cf_node_as_block(first_then);
930
931 nir_cf_node *first_else = nir_if_first_else_node(next_if);
932 assert(first_else->type == nir_cf_node_block);
933 nir_block *first_else_block = nir_cf_node_as_block(first_else);
934
935 link_blocks(block, first_then_block, first_else_block);
936 } else {
937 assert(next->type == nir_cf_node_loop);
938 nir_loop *next_loop = nir_cf_node_as_loop(next);
939
940 nir_cf_node *first = nir_loop_first_cf_node(next_loop);
941 assert(first->type == nir_cf_node_block);
942 nir_block *first_block = nir_cf_node_as_block(first);
943
944 link_blocks(block, first_block, NULL);
945 }
946 }
947
948 if (type == nir_jump_break) {
949 nir_loop *loop = nearest_loop(&block->cf_node);
950
951 nir_cf_node *next = nir_cf_node_next(&loop->cf_node);
952 assert(next->type == nir_cf_node_block);
953 nir_block *next_block = nir_cf_node_as_block(next);
954
955 if (next_block->predecessors->entries == 0) {
956 /* insert fake link */
957 nir_cf_node *last = nir_loop_last_cf_node(loop);
958 assert(last->type == nir_cf_node_block);
959 nir_block *last_block = nir_cf_node_as_block(last);
960
961 last_block->successors[1] = next_block;
962 block_add_pred(next_block, last_block);
963 }
964 }
965
966 nir_function_impl *impl = nir_cf_node_get_function(&block->cf_node);
967 nir_metadata_preserve(impl, nir_metadata_none);
968 }
969
970 /**
971 * Inserts a basic block before another by merging the instructions.
972 *
973 * @param block the target of the insertion
974 * @param before the block to be inserted - must not have been inserted before
975 * @param has_jump whether \before has a jump instruction at the end
976 */
977
978 static void
979 insert_block_before_block(nir_block *block, nir_block *before, bool has_jump)
980 {
981 assert(!has_jump || exec_list_is_empty(&block->instr_list));
982
983 foreach_list_typed(nir_instr, instr, node, &before->instr_list) {
984 instr->block = block;
985 }
986
987 exec_list_prepend(&block->instr_list, &before->instr_list);
988
989 if (has_jump)
990 handle_jump(block);
991 }
992
993 /**
994 * Inserts a basic block after another by merging the instructions.
995 *
996 * @param block the target of the insertion
997 * @param after the block to be inserted - must not have been inserted before
998 * @param has_jump whether \after has a jump instruction at the end
999 */
1000
1001 static void
1002 insert_block_after_block(nir_block *block, nir_block *after, bool has_jump)
1003 {
1004 foreach_list_typed(nir_instr, instr, node, &after->instr_list) {
1005 instr->block = block;
1006 }
1007
1008 exec_list_append(&block->instr_list, &after->instr_list);
1009
1010 if (has_jump)
1011 handle_jump(block);
1012 }
1013
1014 static void
1015 update_if_uses(nir_cf_node *node)
1016 {
1017 if (node->type != nir_cf_node_if)
1018 return;
1019
1020 nir_if *if_stmt = nir_cf_node_as_if(node);
1021
1022 struct set *if_uses_set = if_stmt->condition.is_ssa ?
1023 if_stmt->condition.ssa->if_uses :
1024 if_stmt->condition.reg.reg->uses;
1025
1026 _mesa_set_add(if_uses_set, if_stmt);
1027 }
1028
1029 void
1030 nir_cf_node_insert_after(nir_cf_node *node, nir_cf_node *after)
1031 {
1032 update_if_uses(after);
1033
1034 if (after->type == nir_cf_node_block) {
1035 /*
1036 * either node or the one after it must be a basic block, by invariant #2;
1037 * in either case, just merge the blocks together.
1038 */
1039 nir_block *after_block = nir_cf_node_as_block(after);
1040
1041 bool has_jump = !exec_list_is_empty(&after_block->instr_list) &&
1042 nir_block_last_instr(after_block)->type == nir_instr_type_jump;
1043
1044 if (node->type == nir_cf_node_block) {
1045 insert_block_after_block(nir_cf_node_as_block(node), after_block,
1046 has_jump);
1047 } else {
1048 nir_cf_node *next = nir_cf_node_next(node);
1049 assert(next->type == nir_cf_node_block);
1050 nir_block *next_block = nir_cf_node_as_block(next);
1051
1052 insert_block_before_block(next_block, after_block, has_jump);
1053 }
1054 } else {
1055 if (node->type == nir_cf_node_block) {
1056 insert_non_block_after_block(nir_cf_node_as_block(node), after);
1057 } else {
1058 /*
1059 * We have to insert a non-basic block after a non-basic block. Since
1060 * every non-basic block has a basic block after it, this is equivalent
1061 * to inserting a non-basic block before a basic block.
1062 */
1063
1064 nir_cf_node *next = nir_cf_node_next(node);
1065 assert(next->type == nir_cf_node_block);
1066 nir_block *next_block = nir_cf_node_as_block(next);
1067
1068 insert_non_block_before_block(after, next_block);
1069 }
1070 }
1071
1072 nir_function_impl *impl = nir_cf_node_get_function(node);
1073 nir_metadata_preserve(impl, nir_metadata_none);
1074 }
1075
1076 void
1077 nir_cf_node_insert_before(nir_cf_node *node, nir_cf_node *before)
1078 {
1079 update_if_uses(before);
1080
1081 if (before->type == nir_cf_node_block) {
1082 nir_block *before_block = nir_cf_node_as_block(before);
1083
1084 bool has_jump = !exec_list_is_empty(&before_block->instr_list) &&
1085 nir_block_last_instr(before_block)->type == nir_instr_type_jump;
1086
1087 if (node->type == nir_cf_node_block) {
1088 insert_block_before_block(nir_cf_node_as_block(node), before_block,
1089 has_jump);
1090 } else {
1091 nir_cf_node *prev = nir_cf_node_prev(node);
1092 assert(prev->type == nir_cf_node_block);
1093 nir_block *prev_block = nir_cf_node_as_block(prev);
1094
1095 insert_block_after_block(prev_block, before_block, has_jump);
1096 }
1097 } else {
1098 if (node->type == nir_cf_node_block) {
1099 insert_non_block_before_block(before, nir_cf_node_as_block(node));
1100 } else {
1101 /*
1102 * We have to insert a non-basic block before a non-basic block. This
1103 * is equivalent to inserting a non-basic block after a basic block.
1104 */
1105
1106 nir_cf_node *prev_node = nir_cf_node_prev(node);
1107 assert(prev_node->type == nir_cf_node_block);
1108 nir_block *prev_block = nir_cf_node_as_block(prev_node);
1109
1110 insert_non_block_after_block(prev_block, before);
1111 }
1112 }
1113
1114 nir_function_impl *impl = nir_cf_node_get_function(node);
1115 nir_metadata_preserve(impl, nir_metadata_none);
1116 }
1117
1118 void
1119 nir_cf_node_insert_begin(struct exec_list *list, nir_cf_node *node)
1120 {
1121 nir_cf_node *begin = exec_node_data(nir_cf_node, list->head, node);
1122 nir_cf_node_insert_before(begin, node);
1123 }
1124
1125 void
1126 nir_cf_node_insert_end(struct exec_list *list, nir_cf_node *node)
1127 {
1128 nir_cf_node *end = exec_node_data(nir_cf_node, list->tail_pred, node);
1129 nir_cf_node_insert_after(end, node);
1130 }
1131
1132 /**
1133 * Stitch two basic blocks together into one. The aggregate must have the same
1134 * predecessors as the first and the same successors as the second.
1135 */
1136
1137 static void
1138 stitch_blocks(nir_block *before, nir_block *after)
1139 {
1140 /*
1141 * We move after into before, so we have to deal with up to 2 successors vs.
1142 * possibly a large number of predecessors.
1143 *
1144 * TODO: special case when before is empty and after isn't?
1145 */
1146
1147 move_successors(after, before);
1148
1149 foreach_list_typed(nir_instr, instr, node, &after->instr_list) {
1150 instr->block = before;
1151 }
1152
1153 exec_list_append(&before->instr_list, &after->instr_list);
1154 exec_node_remove(&after->cf_node.node);
1155 }
1156
1157 void
1158 nir_cf_node_remove(nir_cf_node *node)
1159 {
1160 nir_function_impl *impl = nir_cf_node_get_function(node);
1161 nir_metadata_preserve(impl, nir_metadata_none);
1162
1163 if (node->type == nir_cf_node_block) {
1164 /*
1165 * Basic blocks can't really be removed by themselves, since they act as
1166 * padding between the non-basic blocks. So all we do here is empty the
1167 * block of instructions.
1168 *
1169 * TODO: could we assert here?
1170 */
1171 exec_list_make_empty(&nir_cf_node_as_block(node)->instr_list);
1172 } else {
1173 nir_cf_node *before = nir_cf_node_prev(node);
1174 assert(before->type == nir_cf_node_block);
1175 nir_block *before_block = nir_cf_node_as_block(before);
1176
1177 nir_cf_node *after = nir_cf_node_next(node);
1178 assert(after->type == nir_cf_node_block);
1179 nir_block *after_block = nir_cf_node_as_block(after);
1180
1181 exec_node_remove(&node->node);
1182 stitch_blocks(before_block, after_block);
1183 }
1184 }
1185
1186 static bool
1187 add_use_cb(nir_src *src, void *state)
1188 {
1189 nir_instr *instr = (nir_instr *) state;
1190
1191 struct set *uses_set = src->is_ssa ? src->ssa->uses : src->reg.reg->uses;
1192
1193 _mesa_set_add(uses_set, instr);
1194
1195 return true;
1196 }
1197
1198 static bool
1199 add_ssa_def_cb(nir_ssa_def *def, void *state)
1200 {
1201 nir_instr *instr = (nir_instr *) state;
1202
1203 if (instr->block && def->index == UINT_MAX) {
1204 nir_function_impl *impl =
1205 nir_cf_node_get_function(&instr->block->cf_node);
1206
1207 def->index = impl->ssa_alloc++;
1208 }
1209
1210 return true;
1211 }
1212
1213 static bool
1214 add_reg_def_cb(nir_dest *dest, void *state)
1215 {
1216 nir_instr *instr = (nir_instr *) state;
1217
1218 if (!dest->is_ssa)
1219 _mesa_set_add(dest->reg.reg->defs, instr);
1220
1221 return true;
1222 }
1223
1224 static void
1225 add_defs_uses(nir_instr *instr)
1226 {
1227 nir_foreach_src(instr, add_use_cb, instr);
1228 nir_foreach_dest(instr, add_reg_def_cb, instr);
1229 nir_foreach_ssa_def(instr, add_ssa_def_cb, instr);
1230 }
1231
1232 void
1233 nir_instr_insert_before(nir_instr *instr, nir_instr *before)
1234 {
1235 assert(before->type != nir_instr_type_jump);
1236 before->block = instr->block;
1237 add_defs_uses(before);
1238 exec_node_insert_node_before(&instr->node, &before->node);
1239 }
1240
1241 void
1242 nir_instr_insert_after(nir_instr *instr, nir_instr *after)
1243 {
1244 if (after->type == nir_instr_type_jump) {
1245 assert(instr == nir_block_last_instr(instr->block));
1246 assert(instr->type != nir_instr_type_jump);
1247 }
1248
1249 after->block = instr->block;
1250 add_defs_uses(after);
1251 exec_node_insert_after(&instr->node, &after->node);
1252
1253 if (after->type == nir_instr_type_jump)
1254 handle_jump(after->block);
1255 }
1256
1257 void
1258 nir_instr_insert_before_block(nir_block *block, nir_instr *before)
1259 {
1260 if (before->type == nir_instr_type_jump)
1261 assert(exec_list_is_empty(&block->instr_list));
1262
1263 before->block = block;
1264 add_defs_uses(before);
1265 exec_list_push_head(&block->instr_list, &before->node);
1266
1267 if (before->type == nir_instr_type_jump)
1268 handle_jump(block);
1269 }
1270
1271 void
1272 nir_instr_insert_after_block(nir_block *block, nir_instr *after)
1273 {
1274 if (after->type == nir_instr_type_jump) {
1275 assert(exec_list_is_empty(&block->instr_list) ||
1276 nir_block_last_instr(block)->type != nir_instr_type_jump);
1277 }
1278
1279 after->block = block;
1280 add_defs_uses(after);
1281 exec_list_push_tail(&block->instr_list, &after->node);
1282
1283 if (after->type == nir_instr_type_jump)
1284 handle_jump(block);
1285 }
1286
1287 void
1288 nir_instr_insert_before_cf(nir_cf_node *node, nir_instr *before)
1289 {
1290 if (node->type == nir_cf_node_block) {
1291 nir_instr_insert_before_block(nir_cf_node_as_block(node), before);
1292 } else {
1293 nir_cf_node *prev = nir_cf_node_prev(node);
1294 assert(prev->type == nir_cf_node_block);
1295 nir_block *prev_block = nir_cf_node_as_block(prev);
1296
1297 nir_instr_insert_before_block(prev_block, before);
1298 }
1299 }
1300
1301 void
1302 nir_instr_insert_after_cf(nir_cf_node *node, nir_instr *after)
1303 {
1304 if (node->type == nir_cf_node_block) {
1305 nir_instr_insert_after_block(nir_cf_node_as_block(node), after);
1306 } else {
1307 nir_cf_node *next = nir_cf_node_next(node);
1308 assert(next->type == nir_cf_node_block);
1309 nir_block *next_block = nir_cf_node_as_block(next);
1310
1311 nir_instr_insert_before_block(next_block, after);
1312 }
1313 }
1314
1315 void
1316 nir_instr_insert_before_cf_list(struct exec_list *list, nir_instr *before)
1317 {
1318 nir_cf_node *first_node = exec_node_data(nir_cf_node,
1319 exec_list_get_head(list), node);
1320 nir_instr_insert_before_cf(first_node, before);
1321 }
1322
1323 void
1324 nir_instr_insert_after_cf_list(struct exec_list *list, nir_instr *after)
1325 {
1326 nir_cf_node *last_node = exec_node_data(nir_cf_node,
1327 exec_list_get_tail(list), node);
1328 nir_instr_insert_after_cf(last_node, after);
1329 }
1330
1331 static bool
1332 remove_use_cb(nir_src *src, void *state)
1333 {
1334 nir_instr *instr = (nir_instr *) state;
1335
1336 struct set *uses_set = src->is_ssa ? src->ssa->uses : src->reg.reg->uses;
1337
1338 struct set_entry *entry = _mesa_set_search(uses_set, instr);
1339 if (entry)
1340 _mesa_set_remove(uses_set, entry);
1341
1342 return true;
1343 }
1344
1345 static bool
1346 remove_def_cb(nir_dest *dest, void *state)
1347 {
1348 nir_instr *instr = (nir_instr *) state;
1349
1350 if (dest->is_ssa)
1351 return true;
1352
1353 nir_register *reg = dest->reg.reg;
1354
1355 struct set_entry *entry = _mesa_set_search(reg->defs, instr);
1356 if (entry)
1357 _mesa_set_remove(reg->defs, entry);
1358
1359 return true;
1360 }
1361
1362 static void
1363 remove_defs_uses(nir_instr *instr)
1364 {
1365 nir_foreach_dest(instr, remove_def_cb, instr);
1366 nir_foreach_src(instr, remove_use_cb, instr);
1367 }
1368
1369 void nir_instr_remove(nir_instr *instr)
1370 {
1371 remove_defs_uses(instr);
1372 exec_node_remove(&instr->node);
1373
1374 if (instr->type == nir_instr_type_jump) {
1375 nir_jump_instr *jump_instr = nir_instr_as_jump(instr);
1376 handle_remove_jump(instr->block, jump_instr->type);
1377 }
1378 }
1379
1380 /*@}*/
1381
1382 void
1383 nir_index_local_regs(nir_function_impl *impl)
1384 {
1385 unsigned index = 0;
1386 foreach_list_typed(nir_register, reg, node, &impl->registers) {
1387 reg->index = index++;
1388 }
1389 impl->reg_alloc = index;
1390 }
1391
1392 void
1393 nir_index_global_regs(nir_shader *shader)
1394 {
1395 unsigned index = 0;
1396 foreach_list_typed(nir_register, reg, node, &shader->registers) {
1397 reg->index = index++;
1398 }
1399 shader->reg_alloc = index;
1400 }
1401
1402 static bool
1403 visit_alu_dest(nir_alu_instr *instr, nir_foreach_dest_cb cb, void *state)
1404 {
1405 return cb(&instr->dest.dest, state);
1406 }
1407
1408 static bool
1409 visit_intrinsic_dest(nir_intrinsic_instr *instr, nir_foreach_dest_cb cb,
1410 void *state)
1411 {
1412 if (nir_intrinsic_infos[instr->intrinsic].has_dest)
1413 return cb(&instr->dest, state);
1414
1415 return true;
1416 }
1417
1418 static bool
1419 visit_texture_dest(nir_tex_instr *instr, nir_foreach_dest_cb cb,
1420 void *state)
1421 {
1422 return cb(&instr->dest, state);
1423 }
1424
1425 static bool
1426 visit_phi_dest(nir_phi_instr *instr, nir_foreach_dest_cb cb, void *state)
1427 {
1428 return cb(&instr->dest, state);
1429 }
1430
1431 static bool
1432 visit_parallel_copy_dest(nir_parallel_copy_instr *instr,
1433 nir_foreach_dest_cb cb, void *state)
1434 {
1435 nir_foreach_parallel_copy_entry(instr, entry) {
1436 if (!cb(&entry->dest, state))
1437 return false;
1438 }
1439
1440 return true;
1441 }
1442
1443 bool
1444 nir_foreach_dest(nir_instr *instr, nir_foreach_dest_cb cb, void *state)
1445 {
1446 switch (instr->type) {
1447 case nir_instr_type_alu:
1448 return visit_alu_dest(nir_instr_as_alu(instr), cb, state);
1449 case nir_instr_type_intrinsic:
1450 return visit_intrinsic_dest(nir_instr_as_intrinsic(instr), cb, state);
1451 case nir_instr_type_tex:
1452 return visit_texture_dest(nir_instr_as_tex(instr), cb, state);
1453 case nir_instr_type_phi:
1454 return visit_phi_dest(nir_instr_as_phi(instr), cb, state);
1455 case nir_instr_type_parallel_copy:
1456 return visit_parallel_copy_dest(nir_instr_as_parallel_copy(instr),
1457 cb, state);
1458
1459 case nir_instr_type_load_const:
1460 case nir_instr_type_ssa_undef:
1461 case nir_instr_type_call:
1462 case nir_instr_type_jump:
1463 break;
1464
1465 default:
1466 unreachable("Invalid instruction type");
1467 break;
1468 }
1469
1470 return true;
1471 }
1472
1473 struct foreach_ssa_def_state {
1474 nir_foreach_ssa_def_cb cb;
1475 void *client_state;
1476 };
1477
1478 static inline bool
1479 nir_ssa_def_visitor(nir_dest *dest, void *void_state)
1480 {
1481 struct foreach_ssa_def_state *state = void_state;
1482
1483 if (dest->is_ssa)
1484 return state->cb(&dest->ssa, state->client_state);
1485 else
1486 return true;
1487 }
1488
1489 bool
1490 nir_foreach_ssa_def(nir_instr *instr, nir_foreach_ssa_def_cb cb, void *state)
1491 {
1492 switch (instr->type) {
1493 case nir_instr_type_alu:
1494 case nir_instr_type_tex:
1495 case nir_instr_type_intrinsic:
1496 case nir_instr_type_phi:
1497 case nir_instr_type_parallel_copy: {
1498 struct foreach_ssa_def_state foreach_state = {cb, state};
1499 return nir_foreach_dest(instr, nir_ssa_def_visitor, &foreach_state);
1500 }
1501
1502 case nir_instr_type_load_const:
1503 return cb(&nir_instr_as_load_const(instr)->def, state);
1504 case nir_instr_type_ssa_undef:
1505 return cb(&nir_instr_as_ssa_undef(instr)->def, state);
1506 case nir_instr_type_call:
1507 case nir_instr_type_jump:
1508 return true;
1509 default:
1510 unreachable("Invalid instruction type");
1511 }
1512 }
1513
1514 static bool
1515 visit_src(nir_src *src, nir_foreach_src_cb cb, void *state)
1516 {
1517 if (!cb(src, state))
1518 return false;
1519 if (!src->is_ssa && src->reg.indirect)
1520 return cb(src->reg.indirect, state);
1521 return true;
1522 }
1523
1524 static bool
1525 visit_deref_array_src(nir_deref_array *deref, nir_foreach_src_cb cb,
1526 void *state)
1527 {
1528 if (deref->deref_array_type == nir_deref_array_type_indirect)
1529 return visit_src(&deref->indirect, cb, state);
1530 return true;
1531 }
1532
1533 static bool
1534 visit_deref_src(nir_deref_var *deref, nir_foreach_src_cb cb, void *state)
1535 {
1536 nir_deref *cur = &deref->deref;
1537 while (cur != NULL) {
1538 if (cur->deref_type == nir_deref_type_array)
1539 if (!visit_deref_array_src(nir_deref_as_array(cur), cb, state))
1540 return false;
1541
1542 cur = cur->child;
1543 }
1544
1545 return true;
1546 }
1547
1548 static bool
1549 visit_alu_src(nir_alu_instr *instr, nir_foreach_src_cb cb, void *state)
1550 {
1551 for (unsigned i = 0; i < nir_op_infos[instr->op].num_inputs; i++)
1552 if (!visit_src(&instr->src[i].src, cb, state))
1553 return false;
1554
1555 return true;
1556 }
1557
1558 static bool
1559 visit_tex_src(nir_tex_instr *instr, nir_foreach_src_cb cb, void *state)
1560 {
1561 for (unsigned i = 0; i < instr->num_srcs; i++)
1562 if (!visit_src(&instr->src[i].src, cb, state))
1563 return false;
1564
1565 if (instr->sampler != NULL)
1566 if (!visit_deref_src(instr->sampler, cb, state))
1567 return false;
1568
1569 return true;
1570 }
1571
1572 static bool
1573 visit_intrinsic_src(nir_intrinsic_instr *instr, nir_foreach_src_cb cb,
1574 void *state)
1575 {
1576 unsigned num_srcs = nir_intrinsic_infos[instr->intrinsic].num_srcs;
1577 for (unsigned i = 0; i < num_srcs; i++)
1578 if (!visit_src(&instr->src[i], cb, state))
1579 return false;
1580
1581 unsigned num_vars =
1582 nir_intrinsic_infos[instr->intrinsic].num_variables;
1583 for (unsigned i = 0; i < num_vars; i++)
1584 if (!visit_deref_src(instr->variables[i], cb, state))
1585 return false;
1586
1587 return true;
1588 }
1589
1590 static bool
1591 visit_call_src(nir_call_instr *instr, nir_foreach_src_cb cb, void *state)
1592 {
1593 return true;
1594 }
1595
1596 static bool
1597 visit_load_const_src(nir_load_const_instr *instr, nir_foreach_src_cb cb,
1598 void *state)
1599 {
1600 return true;
1601 }
1602
1603 static bool
1604 visit_phi_src(nir_phi_instr *instr, nir_foreach_src_cb cb, void *state)
1605 {
1606 nir_foreach_phi_src(instr, src) {
1607 if (!visit_src(&src->src, cb, state))
1608 return false;
1609 }
1610
1611 return true;
1612 }
1613
1614 static bool
1615 visit_parallel_copy_src(nir_parallel_copy_instr *instr,
1616 nir_foreach_src_cb cb, void *state)
1617 {
1618 nir_foreach_parallel_copy_entry(instr, entry) {
1619 if (!visit_src(&entry->src, cb, state))
1620 return false;
1621 }
1622
1623 return true;
1624 }
1625
1626 typedef struct {
1627 void *state;
1628 nir_foreach_src_cb cb;
1629 } visit_dest_indirect_state;
1630
1631 static bool
1632 visit_dest_indirect(nir_dest *dest, void *_state)
1633 {
1634 visit_dest_indirect_state *state = (visit_dest_indirect_state *) _state;
1635
1636 if (!dest->is_ssa && dest->reg.indirect)
1637 return state->cb(dest->reg.indirect, state->state);
1638
1639 return true;
1640 }
1641
1642 bool
1643 nir_foreach_src(nir_instr *instr, nir_foreach_src_cb cb, void *state)
1644 {
1645 switch (instr->type) {
1646 case nir_instr_type_alu:
1647 if (!visit_alu_src(nir_instr_as_alu(instr), cb, state))
1648 return false;
1649 break;
1650 case nir_instr_type_intrinsic:
1651 if (!visit_intrinsic_src(nir_instr_as_intrinsic(instr), cb, state))
1652 return false;
1653 break;
1654 case nir_instr_type_tex:
1655 if (!visit_tex_src(nir_instr_as_tex(instr), cb, state))
1656 return false;
1657 break;
1658 case nir_instr_type_call:
1659 if (!visit_call_src(nir_instr_as_call(instr), cb, state))
1660 return false;
1661 break;
1662 case nir_instr_type_load_const:
1663 if (!visit_load_const_src(nir_instr_as_load_const(instr), cb, state))
1664 return false;
1665 break;
1666 case nir_instr_type_phi:
1667 if (!visit_phi_src(nir_instr_as_phi(instr), cb, state))
1668 return false;
1669 break;
1670 case nir_instr_type_parallel_copy:
1671 if (!visit_parallel_copy_src(nir_instr_as_parallel_copy(instr),
1672 cb, state))
1673 return false;
1674 break;
1675 case nir_instr_type_jump:
1676 case nir_instr_type_ssa_undef:
1677 return true;
1678
1679 default:
1680 unreachable("Invalid instruction type");
1681 break;
1682 }
1683
1684 visit_dest_indirect_state dest_state;
1685 dest_state.state = state;
1686 dest_state.cb = cb;
1687 return nir_foreach_dest(instr, visit_dest_indirect, &dest_state);
1688 }
1689
1690 nir_const_value *
1691 nir_src_as_const_value(nir_src src)
1692 {
1693 if (!src.is_ssa)
1694 return NULL;
1695
1696 if (src.ssa->parent_instr->type != nir_instr_type_load_const)
1697 return NULL;
1698
1699 nir_load_const_instr *load = nir_instr_as_load_const(src.ssa->parent_instr);
1700
1701 return &load->value;
1702 }
1703
1704 bool
1705 nir_srcs_equal(nir_src src1, nir_src src2)
1706 {
1707 if (src1.is_ssa) {
1708 if (src2.is_ssa) {
1709 return src1.ssa == src2.ssa;
1710 } else {
1711 return false;
1712 }
1713 } else {
1714 if (src2.is_ssa) {
1715 return false;
1716 } else {
1717 if ((src1.reg.indirect == NULL) != (src2.reg.indirect == NULL))
1718 return false;
1719
1720 if (src1.reg.indirect) {
1721 if (!nir_srcs_equal(*src1.reg.indirect, *src2.reg.indirect))
1722 return false;
1723 }
1724
1725 return src1.reg.reg == src2.reg.reg &&
1726 src1.reg.base_offset == src2.reg.base_offset;
1727 }
1728 }
1729 }
1730
1731 static bool
1732 src_does_not_use_def(nir_src *src, void *void_def)
1733 {
1734 nir_ssa_def *def = void_def;
1735
1736 if (src->is_ssa) {
1737 return src->ssa != def;
1738 } else {
1739 return true;
1740 }
1741 }
1742
1743 static bool
1744 src_does_not_use_reg(nir_src *src, void *void_reg)
1745 {
1746 nir_register *reg = void_reg;
1747
1748 if (src->is_ssa) {
1749 return true;
1750 } else {
1751 return src->reg.reg != reg;
1752 }
1753 }
1754
1755 void
1756 nir_instr_rewrite_src(nir_instr *instr, nir_src *src, nir_src new_src)
1757 {
1758 if (src->is_ssa) {
1759 nir_ssa_def *old_ssa = src->ssa;
1760 *src = new_src;
1761 if (old_ssa && nir_foreach_src(instr, src_does_not_use_def, old_ssa)) {
1762 struct set_entry *entry = _mesa_set_search(old_ssa->uses, instr);
1763 assert(entry);
1764 _mesa_set_remove(old_ssa->uses, entry);
1765 }
1766 } else {
1767 if (src->reg.indirect)
1768 nir_instr_rewrite_src(instr, src->reg.indirect, new_src);
1769
1770 nir_register *old_reg = src->reg.reg;
1771 *src = new_src;
1772 if (old_reg && nir_foreach_src(instr, src_does_not_use_reg, old_reg)) {
1773 struct set_entry *entry = _mesa_set_search(old_reg->uses, instr);
1774 assert(entry);
1775 _mesa_set_remove(old_reg->uses, entry);
1776 }
1777 }
1778
1779 if (new_src.is_ssa) {
1780 if (new_src.ssa)
1781 _mesa_set_add(new_src.ssa->uses, instr);
1782 } else {
1783 if (new_src.reg.reg)
1784 _mesa_set_add(new_src.reg.reg->uses, instr);
1785 }
1786 }
1787
1788 void
1789 nir_ssa_def_init(nir_instr *instr, nir_ssa_def *def,
1790 unsigned num_components, const char *name)
1791 {
1792 void *mem_ctx = ralloc_parent(instr);
1793
1794 def->name = name;
1795 def->parent_instr = instr;
1796 def->uses = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
1797 _mesa_key_pointer_equal);
1798 def->if_uses = _mesa_set_create(mem_ctx, _mesa_hash_pointer,
1799 _mesa_key_pointer_equal);
1800 def->num_components = num_components;
1801
1802 if (instr->block) {
1803 nir_function_impl *impl =
1804 nir_cf_node_get_function(&instr->block->cf_node);
1805
1806 def->index = impl->ssa_alloc++;
1807 } else {
1808 def->index = UINT_MAX;
1809 }
1810 }
1811
1812 void
1813 nir_ssa_dest_init(nir_instr *instr, nir_dest *dest,
1814 unsigned num_components, const char *name)
1815 {
1816 dest->is_ssa = true;
1817 nir_ssa_def_init(instr, &dest->ssa, num_components, name);
1818 }
1819
1820 struct ssa_def_rewrite_state {
1821 void *mem_ctx;
1822 nir_ssa_def *old;
1823 nir_src new_src;
1824 };
1825
1826 static bool
1827 ssa_def_rewrite_uses_src(nir_src *src, void *void_state)
1828 {
1829 struct ssa_def_rewrite_state *state = void_state;
1830
1831 if (src->is_ssa && src->ssa == state->old)
1832 *src = nir_src_copy(state->new_src, state->mem_ctx);
1833
1834 return true;
1835 }
1836
1837 void
1838 nir_ssa_def_rewrite_uses(nir_ssa_def *def, nir_src new_src, void *mem_ctx)
1839 {
1840 struct ssa_def_rewrite_state state;
1841 state.mem_ctx = mem_ctx;
1842 state.old = def;
1843 state.new_src = new_src;
1844
1845 assert(!new_src.is_ssa || def != new_src.ssa);
1846
1847 struct set *new_uses, *new_if_uses;
1848 if (new_src.is_ssa) {
1849 new_uses = new_src.ssa->uses;
1850 new_if_uses = new_src.ssa->if_uses;
1851 } else {
1852 new_uses = new_src.reg.reg->uses;
1853 new_if_uses = new_src.reg.reg->if_uses;
1854 }
1855
1856 struct set_entry *entry;
1857 set_foreach(def->uses, entry) {
1858 nir_instr *instr = (nir_instr *)entry->key;
1859
1860 _mesa_set_remove(def->uses, entry);
1861 nir_foreach_src(instr, ssa_def_rewrite_uses_src, &state);
1862 _mesa_set_add(new_uses, instr);
1863 }
1864
1865 set_foreach(def->if_uses, entry) {
1866 nir_if *if_use = (nir_if *)entry->key;
1867
1868 _mesa_set_remove(def->if_uses, entry);
1869 if_use->condition = nir_src_copy(new_src, mem_ctx);
1870 _mesa_set_add(new_if_uses, if_use);
1871 }
1872 }
1873
1874
1875 static bool foreach_cf_node(nir_cf_node *node, nir_foreach_block_cb cb,
1876 bool reverse, void *state);
1877
1878 static inline bool
1879 foreach_if(nir_if *if_stmt, nir_foreach_block_cb cb, bool reverse, void *state)
1880 {
1881 if (reverse) {
1882 foreach_list_typed_safe_reverse(nir_cf_node, node, node,
1883 &if_stmt->else_list) {
1884 if (!foreach_cf_node(node, cb, reverse, state))
1885 return false;
1886 }
1887
1888 foreach_list_typed_safe_reverse(nir_cf_node, node, node,
1889 &if_stmt->then_list) {
1890 if (!foreach_cf_node(node, cb, reverse, state))
1891 return false;
1892 }
1893 } else {
1894 foreach_list_typed_safe(nir_cf_node, node, node, &if_stmt->then_list) {
1895 if (!foreach_cf_node(node, cb, reverse, state))
1896 return false;
1897 }
1898
1899 foreach_list_typed_safe(nir_cf_node, node, node, &if_stmt->else_list) {
1900 if (!foreach_cf_node(node, cb, reverse, state))
1901 return false;
1902 }
1903 }
1904
1905 return true;
1906 }
1907
1908 static inline bool
1909 foreach_loop(nir_loop *loop, nir_foreach_block_cb cb, bool reverse, void *state)
1910 {
1911 if (reverse) {
1912 foreach_list_typed_safe_reverse(nir_cf_node, node, node, &loop->body) {
1913 if (!foreach_cf_node(node, cb, reverse, state))
1914 return false;
1915 }
1916 } else {
1917 foreach_list_typed_safe(nir_cf_node, node, node, &loop->body) {
1918 if (!foreach_cf_node(node, cb, reverse, state))
1919 return false;
1920 }
1921 }
1922
1923 return true;
1924 }
1925
1926 static bool
1927 foreach_cf_node(nir_cf_node *node, nir_foreach_block_cb cb,
1928 bool reverse, void *state)
1929 {
1930 switch (node->type) {
1931 case nir_cf_node_block:
1932 return cb(nir_cf_node_as_block(node), state);
1933 case nir_cf_node_if:
1934 return foreach_if(nir_cf_node_as_if(node), cb, reverse, state);
1935 case nir_cf_node_loop:
1936 return foreach_loop(nir_cf_node_as_loop(node), cb, reverse, state);
1937 break;
1938
1939 default:
1940 unreachable("Invalid CFG node type");
1941 break;
1942 }
1943
1944 return false;
1945 }
1946
1947 bool
1948 nir_foreach_block(nir_function_impl *impl, nir_foreach_block_cb cb, void *state)
1949 {
1950 foreach_list_typed_safe(nir_cf_node, node, node, &impl->body) {
1951 if (!foreach_cf_node(node, cb, false, state))
1952 return false;
1953 }
1954
1955 return cb(impl->end_block, state);
1956 }
1957
1958 bool
1959 nir_foreach_block_reverse(nir_function_impl *impl, nir_foreach_block_cb cb,
1960 void *state)
1961 {
1962 if (!cb(impl->end_block, state))
1963 return false;
1964
1965 foreach_list_typed_safe_reverse(nir_cf_node, node, node, &impl->body) {
1966 if (!foreach_cf_node(node, cb, true, state))
1967 return false;
1968 }
1969
1970 return true;
1971 }
1972
1973 nir_if *
1974 nir_block_get_following_if(nir_block *block)
1975 {
1976 if (exec_node_is_tail_sentinel(&block->cf_node.node))
1977 return NULL;
1978
1979 if (nir_cf_node_is_last(&block->cf_node))
1980 return NULL;
1981
1982 nir_cf_node *next_node = nir_cf_node_next(&block->cf_node);
1983
1984 if (next_node->type != nir_cf_node_if)
1985 return NULL;
1986
1987 return nir_cf_node_as_if(next_node);
1988 }
1989
1990 static bool
1991 index_block(nir_block *block, void *state)
1992 {
1993 unsigned *index = (unsigned *) state;
1994 block->index = (*index)++;
1995 return true;
1996 }
1997
1998 void
1999 nir_index_blocks(nir_function_impl *impl)
2000 {
2001 unsigned index = 0;
2002
2003 if (impl->valid_metadata & nir_metadata_block_index)
2004 return;
2005
2006 nir_foreach_block(impl, index_block, &index);
2007
2008 impl->num_blocks = index;
2009 }
2010
2011 static void
2012 index_ssa_def(nir_ssa_def *def, unsigned *index)
2013 {
2014 def->index = (*index)++;
2015 }
2016
2017 static bool
2018 index_ssa_def_cb(nir_dest *dest, void *state)
2019 {
2020 unsigned *index = (unsigned *) state;
2021 if (dest->is_ssa)
2022 index_ssa_def(&dest->ssa, index);
2023 return true;
2024 }
2025
2026 static void
2027 index_ssa_undef(nir_ssa_undef_instr *instr, unsigned *index)
2028 {
2029 index_ssa_def(&instr->def, index);
2030 }
2031
2032 static bool
2033 index_ssa_block(nir_block *block, void *state)
2034 {
2035 unsigned *index = (unsigned *) state;
2036
2037 nir_foreach_instr(block, instr) {
2038 if (instr->type == nir_instr_type_ssa_undef)
2039 index_ssa_undef(nir_instr_as_ssa_undef(instr), index);
2040 else
2041 nir_foreach_dest(instr, index_ssa_def_cb, state);
2042 }
2043
2044 return true;
2045 }
2046
2047 void
2048 nir_index_ssa_defs(nir_function_impl *impl)
2049 {
2050 unsigned index = 0;
2051 nir_foreach_block(impl, index_ssa_block, &index);
2052 impl->ssa_alloc = index;
2053 }