re PR testsuite/79026 (The tests changed by revision r244006 now fail on darwin)
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains various simple utilities to analyze the CFG. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "timevar.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30
31 /* Store the data structures necessary for depth-first search. */
32 struct depth_first_search_ds {
33 /* stack for backtracking during the algorithm */
34 basic_block *stack;
35
36 /* number of edges in the stack. That is, positions 0, ..., sp-1
37 have edges. */
38 unsigned int sp;
39
40 /* record of basic blocks already seen by depth-first search */
41 sbitmap visited_blocks;
42 };
43
44 static void flow_dfs_compute_reverse_init (depth_first_search_ds *);
45 static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds *,
46 basic_block);
47 static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds *,
48 basic_block);
49 static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
50 \f
51 /* Mark the back edges in DFS traversal.
52 Return nonzero if a loop (natural or otherwise) is present.
53 Inspired by Depth_First_Search_PP described in:
54
55 Advanced Compiler Design and Implementation
56 Steven Muchnick
57 Morgan Kaufmann, 1997
58
59 and heavily borrowed from pre_and_rev_post_order_compute. */
60
61 bool
62 mark_dfs_back_edges (void)
63 {
64 edge_iterator *stack;
65 int *pre;
66 int *post;
67 int sp;
68 int prenum = 1;
69 int postnum = 1;
70 bool found = false;
71
72 /* Allocate the preorder and postorder number arrays. */
73 pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
74 post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
75
76 /* Allocate stack for back-tracking up CFG. */
77 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
78 sp = 0;
79
80 /* Allocate bitmap to track nodes that have been visited. */
81 auto_sbitmap visited (last_basic_block_for_fn (cfun));
82
83 /* None of the nodes in the CFG have been visited yet. */
84 bitmap_clear (visited);
85
86 /* Push the first edge on to the stack. */
87 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
88
89 while (sp)
90 {
91 edge_iterator ei;
92 basic_block src;
93 basic_block dest;
94
95 /* Look at the edge on the top of the stack. */
96 ei = stack[sp - 1];
97 src = ei_edge (ei)->src;
98 dest = ei_edge (ei)->dest;
99 ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
100
101 /* Check if the edge destination has been visited yet. */
102 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
103 dest->index))
104 {
105 /* Mark that we have visited the destination. */
106 bitmap_set_bit (visited, dest->index);
107
108 pre[dest->index] = prenum++;
109 if (EDGE_COUNT (dest->succs) > 0)
110 {
111 /* Since the DEST node has been visited for the first
112 time, check its successors. */
113 stack[sp++] = ei_start (dest->succs);
114 }
115 else
116 post[dest->index] = postnum++;
117 }
118 else
119 {
120 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
121 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
122 && pre[src->index] >= pre[dest->index]
123 && post[dest->index] == 0)
124 ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
125
126 if (ei_one_before_end_p (ei)
127 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
128 post[src->index] = postnum++;
129
130 if (!ei_one_before_end_p (ei))
131 ei_next (&stack[sp - 1]);
132 else
133 sp--;
134 }
135 }
136
137 free (pre);
138 free (post);
139 free (stack);
140
141 return found;
142 }
143
144 /* Find unreachable blocks. An unreachable block will have 0 in
145 the reachable bit in block->flags. A nonzero value indicates the
146 block is reachable. */
147
148 void
149 find_unreachable_blocks (void)
150 {
151 edge e;
152 edge_iterator ei;
153 basic_block *tos, *worklist, bb;
154
155 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
156
157 /* Clear all the reachability flags. */
158
159 FOR_EACH_BB_FN (bb, cfun)
160 bb->flags &= ~BB_REACHABLE;
161
162 /* Add our starting points to the worklist. Almost always there will
163 be only one. It isn't inconceivable that we might one day directly
164 support Fortran alternate entry points. */
165
166 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
167 {
168 *tos++ = e->dest;
169
170 /* Mark the block reachable. */
171 e->dest->flags |= BB_REACHABLE;
172 }
173
174 /* Iterate: find everything reachable from what we've already seen. */
175
176 while (tos != worklist)
177 {
178 basic_block b = *--tos;
179
180 FOR_EACH_EDGE (e, ei, b->succs)
181 {
182 basic_block dest = e->dest;
183
184 if (!(dest->flags & BB_REACHABLE))
185 {
186 *tos++ = dest;
187 dest->flags |= BB_REACHABLE;
188 }
189 }
190 }
191
192 free (worklist);
193 }
194
195 /* Verify that there are no unreachable blocks in the current function. */
196
197 void
198 verify_no_unreachable_blocks (void)
199 {
200 find_unreachable_blocks ();
201
202 basic_block bb;
203 FOR_EACH_BB_FN (bb, cfun)
204 gcc_assert ((bb->flags & BB_REACHABLE) != 0);
205 }
206
207 \f
208 /* Functions to access an edge list with a vector representation.
209 Enough data is kept such that given an index number, the
210 pred and succ that edge represents can be determined, or
211 given a pred and a succ, its index number can be returned.
212 This allows algorithms which consume a lot of memory to
213 represent the normally full matrix of edge (pred,succ) with a
214 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
215 wasted space in the client code due to sparse flow graphs. */
216
217 /* This functions initializes the edge list. Basically the entire
218 flowgraph is processed, and all edges are assigned a number,
219 and the data structure is filled in. */
220
221 struct edge_list *
222 create_edge_list (void)
223 {
224 struct edge_list *elist;
225 edge e;
226 int num_edges;
227 basic_block bb;
228 edge_iterator ei;
229
230 /* Determine the number of edges in the flow graph by counting successor
231 edges on each basic block. */
232 num_edges = 0;
233 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
234 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
235 {
236 num_edges += EDGE_COUNT (bb->succs);
237 }
238
239 elist = XNEW (struct edge_list);
240 elist->num_edges = num_edges;
241 elist->index_to_edge = XNEWVEC (edge, num_edges);
242
243 num_edges = 0;
244
245 /* Follow successors of blocks, and register these edges. */
246 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
247 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
248 FOR_EACH_EDGE (e, ei, bb->succs)
249 elist->index_to_edge[num_edges++] = e;
250
251 return elist;
252 }
253
254 /* This function free's memory associated with an edge list. */
255
256 void
257 free_edge_list (struct edge_list *elist)
258 {
259 if (elist)
260 {
261 free (elist->index_to_edge);
262 free (elist);
263 }
264 }
265
266 /* This function provides debug output showing an edge list. */
267
268 DEBUG_FUNCTION void
269 print_edge_list (FILE *f, struct edge_list *elist)
270 {
271 int x;
272
273 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
274 n_basic_blocks_for_fn (cfun), elist->num_edges);
275
276 for (x = 0; x < elist->num_edges; x++)
277 {
278 fprintf (f, " %-4d - edge(", x);
279 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
280 fprintf (f, "entry,");
281 else
282 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
283
284 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
285 fprintf (f, "exit)\n");
286 else
287 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
288 }
289 }
290
291 /* This function provides an internal consistency check of an edge list,
292 verifying that all edges are present, and that there are no
293 extra edges. */
294
295 DEBUG_FUNCTION void
296 verify_edge_list (FILE *f, struct edge_list *elist)
297 {
298 int pred, succ, index;
299 edge e;
300 basic_block bb, p, s;
301 edge_iterator ei;
302
303 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
304 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
305 {
306 FOR_EACH_EDGE (e, ei, bb->succs)
307 {
308 pred = e->src->index;
309 succ = e->dest->index;
310 index = EDGE_INDEX (elist, e->src, e->dest);
311 if (index == EDGE_INDEX_NO_EDGE)
312 {
313 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
314 continue;
315 }
316
317 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
318 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
319 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
320 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
321 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
322 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
323 }
324 }
325
326 /* We've verified that all the edges are in the list, now lets make sure
327 there are no spurious edges in the list. This is an expensive check! */
328
329 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
330 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
331 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
332 {
333 int found_edge = 0;
334
335 FOR_EACH_EDGE (e, ei, p->succs)
336 if (e->dest == s)
337 {
338 found_edge = 1;
339 break;
340 }
341
342 FOR_EACH_EDGE (e, ei, s->preds)
343 if (e->src == p)
344 {
345 found_edge = 1;
346 break;
347 }
348
349 if (EDGE_INDEX (elist, p, s)
350 == EDGE_INDEX_NO_EDGE && found_edge != 0)
351 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
352 p->index, s->index);
353 if (EDGE_INDEX (elist, p, s)
354 != EDGE_INDEX_NO_EDGE && found_edge == 0)
355 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
356 p->index, s->index, EDGE_INDEX (elist, p, s));
357 }
358 }
359
360
361 /* Functions to compute control dependences. */
362
363 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
364 void
365 control_dependences::set_control_dependence_map_bit (basic_block bb,
366 int edge_index)
367 {
368 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
369 return;
370 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
371 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
372 }
373
374 /* Clear all control dependences for block BB. */
375 void
376 control_dependences::clear_control_dependence_bitmap (basic_block bb)
377 {
378 bitmap_clear (control_dependence_map[bb->index]);
379 }
380
381 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
382 This function is necessary because some blocks have negative numbers. */
383
384 static inline basic_block
385 find_pdom (basic_block block)
386 {
387 gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
388
389 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
390 return EXIT_BLOCK_PTR_FOR_FN (cfun);
391 else
392 {
393 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
394 if (! bb)
395 return EXIT_BLOCK_PTR_FOR_FN (cfun);
396 return bb;
397 }
398 }
399
400 /* Determine all blocks' control dependences on the given edge with edge_list
401 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
402
403 void
404 control_dependences::find_control_dependence (int edge_index)
405 {
406 basic_block current_block;
407 basic_block ending_block;
408
409 gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
410
411 /* For abnormal edges, we don't make current_block control
412 dependent because instructions that throw are always necessary
413 anyway. */
414 edge e = find_edge (get_edge_src (edge_index), get_edge_dest (edge_index));
415 if (e->flags & EDGE_ABNORMAL)
416 return;
417
418 if (get_edge_src (edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
419 ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
420 else
421 ending_block = find_pdom (get_edge_src (edge_index));
422
423 for (current_block = get_edge_dest (edge_index);
424 current_block != ending_block
425 && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
426 current_block = find_pdom (current_block))
427 set_control_dependence_map_bit (current_block, edge_index);
428 }
429
430 /* Record all blocks' control dependences on all edges in the edge
431 list EL, ala Morgan, Section 3.6. */
432
433 control_dependences::control_dependences ()
434 {
435 timevar_push (TV_CONTROL_DEPENDENCES);
436
437 /* Initialize the edge list. */
438 int num_edges = 0;
439 basic_block bb;
440 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
441 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
442 num_edges += EDGE_COUNT (bb->succs);
443 m_el.create (num_edges);
444 edge e;
445 edge_iterator ei;
446 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
447 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
448 FOR_EACH_EDGE (e, ei, bb->succs)
449 m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
450
451 control_dependence_map.create (last_basic_block_for_fn (cfun));
452 for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
453 control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
454 for (int i = 0; i < num_edges; ++i)
455 find_control_dependence (i);
456
457 timevar_pop (TV_CONTROL_DEPENDENCES);
458 }
459
460 /* Free control dependences and the associated edge list. */
461
462 control_dependences::~control_dependences ()
463 {
464 for (unsigned i = 0; i < control_dependence_map.length (); ++i)
465 BITMAP_FREE (control_dependence_map[i]);
466 control_dependence_map.release ();
467 m_el.release ();
468 }
469
470 /* Returns the bitmap of edges the basic-block I is dependent on. */
471
472 bitmap
473 control_dependences::get_edges_dependent_on (int i)
474 {
475 return control_dependence_map[i];
476 }
477
478 /* Returns the edge source with index I from the edge list. */
479
480 basic_block
481 control_dependences::get_edge_src (int i)
482 {
483 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
484 }
485
486 /* Returns the edge destination with index I from the edge list. */
487
488 basic_block
489 control_dependences::get_edge_dest (int i)
490 {
491 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
492 }
493
494
495 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
496 If no such edge exists, return NULL. */
497
498 edge
499 find_edge (basic_block pred, basic_block succ)
500 {
501 edge e;
502 edge_iterator ei;
503
504 if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
505 {
506 FOR_EACH_EDGE (e, ei, pred->succs)
507 if (e->dest == succ)
508 return e;
509 }
510 else
511 {
512 FOR_EACH_EDGE (e, ei, succ->preds)
513 if (e->src == pred)
514 return e;
515 }
516
517 return NULL;
518 }
519
520 /* This routine will determine what, if any, edge there is between
521 a specified predecessor and successor. */
522
523 int
524 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
525 {
526 int x;
527
528 for (x = 0; x < NUM_EDGES (edge_list); x++)
529 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
530 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
531 return x;
532
533 return (EDGE_INDEX_NO_EDGE);
534 }
535 \f
536 /* This routine will remove any fake predecessor edges for a basic block.
537 When the edge is removed, it is also removed from whatever successor
538 list it is in. */
539
540 static void
541 remove_fake_predecessors (basic_block bb)
542 {
543 edge e;
544 edge_iterator ei;
545
546 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
547 {
548 if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
549 remove_edge (e);
550 else
551 ei_next (&ei);
552 }
553 }
554
555 /* This routine will remove all fake edges from the flow graph. If
556 we remove all fake successors, it will automatically remove all
557 fake predecessors. */
558
559 void
560 remove_fake_edges (void)
561 {
562 basic_block bb;
563
564 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
565 remove_fake_predecessors (bb);
566 }
567
568 /* This routine will remove all fake edges to the EXIT_BLOCK. */
569
570 void
571 remove_fake_exit_edges (void)
572 {
573 remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
574 }
575
576
577 /* This function will add a fake edge between any block which has no
578 successors, and the exit block. Some data flow equations require these
579 edges to exist. */
580
581 void
582 add_noreturn_fake_exit_edges (void)
583 {
584 basic_block bb;
585
586 FOR_EACH_BB_FN (bb, cfun)
587 if (EDGE_COUNT (bb->succs) == 0)
588 make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
589 }
590
591 /* This function adds a fake edge between any infinite loops to the
592 exit block. Some optimizations require a path from each node to
593 the exit node.
594
595 See also Morgan, Figure 3.10, pp. 82-83.
596
597 The current implementation is ugly, not attempting to minimize the
598 number of inserted fake edges. To reduce the number of fake edges
599 to insert, add fake edges from _innermost_ loops containing only
600 nodes not reachable from the exit block. */
601
602 void
603 connect_infinite_loops_to_exit (void)
604 {
605 basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
606 basic_block deadend_block;
607 depth_first_search_ds dfs_ds;
608
609 /* Perform depth-first search in the reverse graph to find nodes
610 reachable from the exit block. */
611 flow_dfs_compute_reverse_init (&dfs_ds);
612 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
613
614 /* Repeatedly add fake edges, updating the unreachable nodes. */
615 while (1)
616 {
617 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
618 unvisited_block);
619 if (!unvisited_block)
620 break;
621
622 deadend_block = dfs_find_deadend (unvisited_block);
623 make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
624 flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
625 }
626
627 flow_dfs_compute_reverse_finish (&dfs_ds);
628 return;
629 }
630 \f
631 /* Compute reverse top sort order. This is computing a post order
632 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
633 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
634 true, unreachable blocks are deleted. */
635
636 int
637 post_order_compute (int *post_order, bool include_entry_exit,
638 bool delete_unreachable)
639 {
640 edge_iterator *stack;
641 int sp;
642 int post_order_num = 0;
643 int count;
644
645 if (include_entry_exit)
646 post_order[post_order_num++] = EXIT_BLOCK;
647
648 /* Allocate stack for back-tracking up CFG. */
649 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
650 sp = 0;
651
652 /* Allocate bitmap to track nodes that have been visited. */
653 auto_sbitmap visited (last_basic_block_for_fn (cfun));
654
655 /* None of the nodes in the CFG have been visited yet. */
656 bitmap_clear (visited);
657
658 /* Push the first edge on to the stack. */
659 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
660
661 while (sp)
662 {
663 edge_iterator ei;
664 basic_block src;
665 basic_block dest;
666
667 /* Look at the edge on the top of the stack. */
668 ei = stack[sp - 1];
669 src = ei_edge (ei)->src;
670 dest = ei_edge (ei)->dest;
671
672 /* Check if the edge destination has been visited yet. */
673 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
674 && ! bitmap_bit_p (visited, dest->index))
675 {
676 /* Mark that we have visited the destination. */
677 bitmap_set_bit (visited, dest->index);
678
679 if (EDGE_COUNT (dest->succs) > 0)
680 /* Since the DEST node has been visited for the first
681 time, check its successors. */
682 stack[sp++] = ei_start (dest->succs);
683 else
684 post_order[post_order_num++] = dest->index;
685 }
686 else
687 {
688 if (ei_one_before_end_p (ei)
689 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
690 post_order[post_order_num++] = src->index;
691
692 if (!ei_one_before_end_p (ei))
693 ei_next (&stack[sp - 1]);
694 else
695 sp--;
696 }
697 }
698
699 if (include_entry_exit)
700 {
701 post_order[post_order_num++] = ENTRY_BLOCK;
702 count = post_order_num;
703 }
704 else
705 count = post_order_num + 2;
706
707 /* Delete the unreachable blocks if some were found and we are
708 supposed to do it. */
709 if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
710 {
711 basic_block b;
712 basic_block next_bb;
713 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
714 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
715 {
716 next_bb = b->next_bb;
717
718 if (!(bitmap_bit_p (visited, b->index)))
719 delete_basic_block (b);
720 }
721
722 tidy_fallthru_edges ();
723 }
724
725 free (stack);
726 return post_order_num;
727 }
728
729
730 /* Helper routine for inverted_post_order_compute
731 flow_dfs_compute_reverse_execute, and the reverse-CFG
732 deapth first search in dominance.c.
733 BB has to belong to a region of CFG
734 unreachable by inverted traversal from the exit.
735 i.e. there's no control flow path from ENTRY to EXIT
736 that contains this BB.
737 This can happen in two cases - if there's an infinite loop
738 or if there's a block that has no successor
739 (call to a function with no return).
740 Some RTL passes deal with this condition by
741 calling connect_infinite_loops_to_exit () and/or
742 add_noreturn_fake_exit_edges ().
743 However, those methods involve modifying the CFG itself
744 which may not be desirable.
745 Hence, we deal with the infinite loop/no return cases
746 by identifying a unique basic block that can reach all blocks
747 in such a region by inverted traversal.
748 This function returns a basic block that guarantees
749 that all blocks in the region are reachable
750 by starting an inverted traversal from the returned block. */
751
752 basic_block
753 dfs_find_deadend (basic_block bb)
754 {
755 bitmap visited = BITMAP_ALLOC (NULL);
756
757 for (;;)
758 {
759 if (EDGE_COUNT (bb->succs) == 0
760 || ! bitmap_set_bit (visited, bb->index))
761 {
762 BITMAP_FREE (visited);
763 return bb;
764 }
765
766 /* If we are in an analyzed cycle make sure to try exiting it.
767 Note this is a heuristic only and expected to work when loop
768 fixup is needed as well. */
769 if (! bb->loop_father
770 || ! loop_outer (bb->loop_father))
771 bb = EDGE_SUCC (bb, 0)->dest;
772 else
773 {
774 edge_iterator ei;
775 edge e;
776 FOR_EACH_EDGE (e, ei, bb->succs)
777 if (loop_exit_edge_p (bb->loop_father, e))
778 break;
779 bb = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
780 }
781 }
782
783 gcc_unreachable ();
784 }
785
786
787 /* Compute the reverse top sort order of the inverted CFG
788 i.e. starting from the exit block and following the edges backward
789 (from successors to predecessors).
790 This ordering can be used for forward dataflow problems among others.
791
792 Optionally if START_POINTS is specified, start from exit block and all
793 basic blocks in START_POINTS. This is used by CD-DCE.
794
795 This function assumes that all blocks in the CFG are reachable
796 from the ENTRY (but not necessarily from EXIT).
797
798 If there's an infinite loop,
799 a simple inverted traversal starting from the blocks
800 with no successors can't visit all blocks.
801 To solve this problem, we first do inverted traversal
802 starting from the blocks with no successor.
803 And if there's any block left that's not visited by the regular
804 inverted traversal from EXIT,
805 those blocks are in such problematic region.
806 Among those, we find one block that has
807 any visited predecessor (which is an entry into such a region),
808 and start looking for a "dead end" from that block
809 and do another inverted traversal from that block. */
810
811 int
812 inverted_post_order_compute (int *post_order,
813 sbitmap *start_points)
814 {
815 basic_block bb;
816 edge_iterator *stack;
817 int sp;
818 int post_order_num = 0;
819
820 if (flag_checking)
821 verify_no_unreachable_blocks ();
822
823 /* Allocate stack for back-tracking up CFG. */
824 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
825 sp = 0;
826
827 /* Allocate bitmap to track nodes that have been visited. */
828 auto_sbitmap visited (last_basic_block_for_fn (cfun));
829
830 /* None of the nodes in the CFG have been visited yet. */
831 bitmap_clear (visited);
832
833 if (start_points)
834 {
835 FOR_ALL_BB_FN (bb, cfun)
836 if (bitmap_bit_p (*start_points, bb->index)
837 && EDGE_COUNT (bb->preds) > 0)
838 {
839 stack[sp++] = ei_start (bb->preds);
840 bitmap_set_bit (visited, bb->index);
841 }
842 if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
843 {
844 stack[sp++] = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
845 bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
846 }
847 }
848 else
849 /* Put all blocks that have no successor into the initial work list. */
850 FOR_ALL_BB_FN (bb, cfun)
851 if (EDGE_COUNT (bb->succs) == 0)
852 {
853 /* Push the initial edge on to the stack. */
854 if (EDGE_COUNT (bb->preds) > 0)
855 {
856 stack[sp++] = ei_start (bb->preds);
857 bitmap_set_bit (visited, bb->index);
858 }
859 }
860
861 do
862 {
863 bool has_unvisited_bb = false;
864
865 /* The inverted traversal loop. */
866 while (sp)
867 {
868 edge_iterator ei;
869 basic_block pred;
870
871 /* Look at the edge on the top of the stack. */
872 ei = stack[sp - 1];
873 bb = ei_edge (ei)->dest;
874 pred = ei_edge (ei)->src;
875
876 /* Check if the predecessor has been visited yet. */
877 if (! bitmap_bit_p (visited, pred->index))
878 {
879 /* Mark that we have visited the destination. */
880 bitmap_set_bit (visited, pred->index);
881
882 if (EDGE_COUNT (pred->preds) > 0)
883 /* Since the predecessor node has been visited for the first
884 time, check its predecessors. */
885 stack[sp++] = ei_start (pred->preds);
886 else
887 post_order[post_order_num++] = pred->index;
888 }
889 else
890 {
891 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
892 && ei_one_before_end_p (ei))
893 post_order[post_order_num++] = bb->index;
894
895 if (!ei_one_before_end_p (ei))
896 ei_next (&stack[sp - 1]);
897 else
898 sp--;
899 }
900 }
901
902 /* Detect any infinite loop and activate the kludge.
903 Note that this doesn't check EXIT_BLOCK itself
904 since EXIT_BLOCK is always added after the outer do-while loop. */
905 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
906 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
907 if (!bitmap_bit_p (visited, bb->index))
908 {
909 has_unvisited_bb = true;
910
911 if (EDGE_COUNT (bb->preds) > 0)
912 {
913 edge_iterator ei;
914 edge e;
915 basic_block visited_pred = NULL;
916
917 /* Find an already visited predecessor. */
918 FOR_EACH_EDGE (e, ei, bb->preds)
919 {
920 if (bitmap_bit_p (visited, e->src->index))
921 visited_pred = e->src;
922 }
923
924 if (visited_pred)
925 {
926 basic_block be = dfs_find_deadend (bb);
927 gcc_assert (be != NULL);
928 bitmap_set_bit (visited, be->index);
929 stack[sp++] = ei_start (be->preds);
930 break;
931 }
932 }
933 }
934
935 if (has_unvisited_bb && sp == 0)
936 {
937 /* No blocks are reachable from EXIT at all.
938 Find a dead-end from the ENTRY, and restart the iteration. */
939 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
940 gcc_assert (be != NULL);
941 bitmap_set_bit (visited, be->index);
942 stack[sp++] = ei_start (be->preds);
943 }
944
945 /* The only case the below while fires is
946 when there's an infinite loop. */
947 }
948 while (sp);
949
950 /* EXIT_BLOCK is always included. */
951 post_order[post_order_num++] = EXIT_BLOCK;
952
953 free (stack);
954 return post_order_num;
955 }
956
957 /* Compute the depth first search order of FN and store in the array
958 PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
959 reverse completion number for each node. Returns the number of nodes
960 visited. A depth first search tries to get as far away from the starting
961 point as quickly as possible.
962
963 In case the function has unreachable blocks the number of nodes
964 visited does not include them.
965
966 pre_order is a really a preorder numbering of the graph.
967 rev_post_order is really a reverse postorder numbering of the graph. */
968
969 int
970 pre_and_rev_post_order_compute_fn (struct function *fn,
971 int *pre_order, int *rev_post_order,
972 bool include_entry_exit)
973 {
974 edge_iterator *stack;
975 int sp;
976 int pre_order_num = 0;
977 int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
978
979 /* Allocate stack for back-tracking up CFG. */
980 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
981 sp = 0;
982
983 if (include_entry_exit)
984 {
985 if (pre_order)
986 pre_order[pre_order_num] = ENTRY_BLOCK;
987 pre_order_num++;
988 if (rev_post_order)
989 rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
990 }
991 else
992 rev_post_order_num -= NUM_FIXED_BLOCKS;
993
994 /* Allocate bitmap to track nodes that have been visited. */
995 auto_sbitmap visited (last_basic_block_for_fn (cfun));
996
997 /* None of the nodes in the CFG have been visited yet. */
998 bitmap_clear (visited);
999
1000 /* Push the first edge on to the stack. */
1001 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
1002
1003 while (sp)
1004 {
1005 edge_iterator ei;
1006 basic_block src;
1007 basic_block dest;
1008
1009 /* Look at the edge on the top of the stack. */
1010 ei = stack[sp - 1];
1011 src = ei_edge (ei)->src;
1012 dest = ei_edge (ei)->dest;
1013
1014 /* Check if the edge destination has been visited yet. */
1015 if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
1016 && ! bitmap_bit_p (visited, dest->index))
1017 {
1018 /* Mark that we have visited the destination. */
1019 bitmap_set_bit (visited, dest->index);
1020
1021 if (pre_order)
1022 pre_order[pre_order_num] = dest->index;
1023
1024 pre_order_num++;
1025
1026 if (EDGE_COUNT (dest->succs) > 0)
1027 /* Since the DEST node has been visited for the first
1028 time, check its successors. */
1029 stack[sp++] = ei_start (dest->succs);
1030 else if (rev_post_order)
1031 /* There are no successors for the DEST node so assign
1032 its reverse completion number. */
1033 rev_post_order[rev_post_order_num--] = dest->index;
1034 }
1035 else
1036 {
1037 if (ei_one_before_end_p (ei)
1038 && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1039 && rev_post_order)
1040 /* There are no more successors for the SRC node
1041 so assign its reverse completion number. */
1042 rev_post_order[rev_post_order_num--] = src->index;
1043
1044 if (!ei_one_before_end_p (ei))
1045 ei_next (&stack[sp - 1]);
1046 else
1047 sp--;
1048 }
1049 }
1050
1051 free (stack);
1052
1053 if (include_entry_exit)
1054 {
1055 if (pre_order)
1056 pre_order[pre_order_num] = EXIT_BLOCK;
1057 pre_order_num++;
1058 if (rev_post_order)
1059 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1060 }
1061
1062 return pre_order_num;
1063 }
1064
1065 /* Like pre_and_rev_post_order_compute_fn but operating on the
1066 current function and asserting that all nodes were visited. */
1067
1068 int
1069 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1070 bool include_entry_exit)
1071 {
1072 int pre_order_num
1073 = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1074 include_entry_exit);
1075 if (include_entry_exit)
1076 /* The number of nodes visited should be the number of blocks. */
1077 gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1078 else
1079 /* The number of nodes visited should be the number of blocks minus
1080 the entry and exit blocks which are not visited here. */
1081 gcc_assert (pre_order_num
1082 == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1083
1084 return pre_order_num;
1085 }
1086
1087 /* Compute the depth first search order on the _reverse_ graph and
1088 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1089 Returns the number of nodes visited.
1090
1091 The computation is split into three pieces:
1092
1093 flow_dfs_compute_reverse_init () creates the necessary data
1094 structures.
1095
1096 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1097 structures. The block will start the search.
1098
1099 flow_dfs_compute_reverse_execute () continues (or starts) the
1100 search using the block on the top of the stack, stopping when the
1101 stack is empty.
1102
1103 flow_dfs_compute_reverse_finish () destroys the necessary data
1104 structures.
1105
1106 Thus, the user will probably call ..._init(), call ..._add_bb() to
1107 add a beginning basic block to the stack, call ..._execute(),
1108 possibly add another bb to the stack and again call ..._execute(),
1109 ..., and finally call _finish(). */
1110
1111 /* Initialize the data structures used for depth-first search on the
1112 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1113 added to the basic block stack. DATA is the current depth-first
1114 search context. If INITIALIZE_STACK is nonzero, there is an
1115 element on the stack. */
1116
1117 static void
1118 flow_dfs_compute_reverse_init (depth_first_search_ds *data)
1119 {
1120 /* Allocate stack for back-tracking up CFG. */
1121 data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1122 data->sp = 0;
1123
1124 /* Allocate bitmap to track nodes that have been visited. */
1125 data->visited_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1126
1127 /* None of the nodes in the CFG have been visited yet. */
1128 bitmap_clear (data->visited_blocks);
1129
1130 return;
1131 }
1132
1133 /* Add the specified basic block to the top of the dfs data
1134 structures. When the search continues, it will start at the
1135 block. */
1136
1137 static void
1138 flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
1139 {
1140 data->stack[data->sp++] = bb;
1141 bitmap_set_bit (data->visited_blocks, bb->index);
1142 }
1143
1144 /* Continue the depth-first search through the reverse graph starting with the
1145 block at the stack's top and ending when the stack is empty. Visited nodes
1146 are marked. Returns an unvisited basic block, or NULL if there is none
1147 available. */
1148
1149 static basic_block
1150 flow_dfs_compute_reverse_execute (depth_first_search_ds *data,
1151 basic_block last_unvisited)
1152 {
1153 basic_block bb;
1154 edge e;
1155 edge_iterator ei;
1156
1157 while (data->sp > 0)
1158 {
1159 bb = data->stack[--data->sp];
1160
1161 /* Perform depth-first search on adjacent vertices. */
1162 FOR_EACH_EDGE (e, ei, bb->preds)
1163 if (!bitmap_bit_p (data->visited_blocks, e->src->index))
1164 flow_dfs_compute_reverse_add_bb (data, e->src);
1165 }
1166
1167 /* Determine if there are unvisited basic blocks. */
1168 FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1169 if (!bitmap_bit_p (data->visited_blocks, bb->index))
1170 return bb;
1171
1172 return NULL;
1173 }
1174
1175 /* Destroy the data structures needed for depth-first search on the
1176 reverse graph. */
1177
1178 static void
1179 flow_dfs_compute_reverse_finish (depth_first_search_ds *data)
1180 {
1181 free (data->stack);
1182 sbitmap_free (data->visited_blocks);
1183 }
1184
1185 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1186 if REVERSE, go against direction of edges. Returns number of blocks
1187 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1188 int
1189 dfs_enumerate_from (basic_block bb, int reverse,
1190 bool (*predicate) (const_basic_block, const void *),
1191 basic_block *rslt, int rslt_max, const void *data)
1192 {
1193 basic_block *st, lbb;
1194 int sp = 0, tv = 0;
1195 unsigned size;
1196
1197 /* A bitmap to keep track of visited blocks. Allocating it each time
1198 this function is called is not possible, since dfs_enumerate_from
1199 is often used on small (almost) disjoint parts of cfg (bodies of
1200 loops), and allocating a large sbitmap would lead to quadratic
1201 behavior. */
1202 static sbitmap visited;
1203 static unsigned v_size;
1204
1205 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1206 #define UNMARK_VISITED(BB) (bitmap_clear_bit (visited, (BB)->index))
1207 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1208
1209 /* Resize the VISITED sbitmap if necessary. */
1210 size = last_basic_block_for_fn (cfun);
1211 if (size < 10)
1212 size = 10;
1213
1214 if (!visited)
1215 {
1216
1217 visited = sbitmap_alloc (size);
1218 bitmap_clear (visited);
1219 v_size = size;
1220 }
1221 else if (v_size < size)
1222 {
1223 /* Ensure that we increase the size of the sbitmap exponentially. */
1224 if (2 * v_size > size)
1225 size = 2 * v_size;
1226
1227 visited = sbitmap_resize (visited, size, 0);
1228 v_size = size;
1229 }
1230
1231 st = XNEWVEC (basic_block, rslt_max);
1232 rslt[tv++] = st[sp++] = bb;
1233 MARK_VISITED (bb);
1234 while (sp)
1235 {
1236 edge e;
1237 edge_iterator ei;
1238 lbb = st[--sp];
1239 if (reverse)
1240 {
1241 FOR_EACH_EDGE (e, ei, lbb->preds)
1242 if (!VISITED_P (e->src) && predicate (e->src, data))
1243 {
1244 gcc_assert (tv != rslt_max);
1245 rslt[tv++] = st[sp++] = e->src;
1246 MARK_VISITED (e->src);
1247 }
1248 }
1249 else
1250 {
1251 FOR_EACH_EDGE (e, ei, lbb->succs)
1252 if (!VISITED_P (e->dest) && predicate (e->dest, data))
1253 {
1254 gcc_assert (tv != rslt_max);
1255 rslt[tv++] = st[sp++] = e->dest;
1256 MARK_VISITED (e->dest);
1257 }
1258 }
1259 }
1260 free (st);
1261 for (sp = 0; sp < tv; sp++)
1262 UNMARK_VISITED (rslt[sp]);
1263 return tv;
1264 #undef MARK_VISITED
1265 #undef UNMARK_VISITED
1266 #undef VISITED_P
1267 }
1268
1269
1270 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1271
1272 This algorithm can be found in Timothy Harvey's PhD thesis, at
1273 http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1274 dominance algorithms.
1275
1276 First, we identify each join point, j (any node with more than one
1277 incoming edge is a join point).
1278
1279 We then examine each predecessor, p, of j and walk up the dominator tree
1280 starting at p.
1281
1282 We stop the walk when we reach j's immediate dominator - j is in the
1283 dominance frontier of each of the nodes in the walk, except for j's
1284 immediate dominator. Intuitively, all of the rest of j's dominators are
1285 shared by j's predecessors as well.
1286 Since they dominate j, they will not have j in their dominance frontiers.
1287
1288 The number of nodes touched by this algorithm is equal to the size
1289 of the dominance frontiers, no more, no less.
1290 */
1291
1292
1293 static void
1294 compute_dominance_frontiers_1 (bitmap_head *frontiers)
1295 {
1296 edge p;
1297 edge_iterator ei;
1298 basic_block b;
1299 FOR_EACH_BB_FN (b, cfun)
1300 {
1301 if (EDGE_COUNT (b->preds) >= 2)
1302 {
1303 FOR_EACH_EDGE (p, ei, b->preds)
1304 {
1305 basic_block runner = p->src;
1306 basic_block domsb;
1307 if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1308 continue;
1309
1310 domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1311 while (runner != domsb)
1312 {
1313 if (!bitmap_set_bit (&frontiers[runner->index],
1314 b->index))
1315 break;
1316 runner = get_immediate_dominator (CDI_DOMINATORS,
1317 runner);
1318 }
1319 }
1320 }
1321 }
1322 }
1323
1324
1325 void
1326 compute_dominance_frontiers (bitmap_head *frontiers)
1327 {
1328 timevar_push (TV_DOM_FRONTIERS);
1329
1330 compute_dominance_frontiers_1 (frontiers);
1331
1332 timevar_pop (TV_DOM_FRONTIERS);
1333 }
1334
1335 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1336 return a bitmap with all the blocks in the iterated dominance
1337 frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1338 frontier information as returned by compute_dominance_frontiers.
1339
1340 The resulting set of blocks are the potential sites where PHI nodes
1341 are needed. The caller is responsible for freeing the memory
1342 allocated for the return value. */
1343
1344 bitmap
1345 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1346 {
1347 bitmap_iterator bi;
1348 unsigned bb_index, i;
1349 bitmap phi_insertion_points;
1350
1351 /* Each block can appear at most twice on the work-stack. */
1352 auto_vec<int> work_stack (2 * n_basic_blocks_for_fn (cfun));
1353 phi_insertion_points = BITMAP_ALLOC (NULL);
1354
1355 /* Seed the work list with all the blocks in DEF_BLOCKS. We use
1356 vec::quick_push here for speed. This is safe because we know that
1357 the number of definition blocks is no greater than the number of
1358 basic blocks, which is the initial capacity of WORK_STACK. */
1359 EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
1360 work_stack.quick_push (bb_index);
1361
1362 /* Pop a block off the worklist, add every block that appears in
1363 the original block's DF that we have not already processed to
1364 the worklist. Iterate until the worklist is empty. Blocks
1365 which are added to the worklist are potential sites for
1366 PHI nodes. */
1367 while (work_stack.length () > 0)
1368 {
1369 bb_index = work_stack.pop ();
1370
1371 /* Since the registration of NEW -> OLD name mappings is done
1372 separately from the call to update_ssa, when updating the SSA
1373 form, the basic blocks where new and/or old names are defined
1374 may have disappeared by CFG cleanup calls. In this case,
1375 we may pull a non-existing block from the work stack. */
1376 gcc_checking_assert (bb_index
1377 < (unsigned) last_basic_block_for_fn (cfun));
1378
1379 EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1380 0, i, bi)
1381 {
1382 work_stack.quick_push (i);
1383 bitmap_set_bit (phi_insertion_points, i);
1384 }
1385 }
1386
1387 return phi_insertion_points;
1388 }
1389
1390 /* Intersection and union of preds/succs for sbitmap based data flow
1391 solvers. All four functions defined below take the same arguments:
1392 B is the basic block to perform the operation for. DST is the
1393 target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1394 last_basic_block so that it can be indexed with basic block indices.
1395 DST may be (but does not have to be) SRC[B->index]. */
1396
1397 /* Set the bitmap DST to the intersection of SRC of successors of
1398 basic block B. */
1399
1400 void
1401 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1402 {
1403 unsigned int set_size = dst->size;
1404 edge e;
1405 unsigned ix;
1406
1407 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1408 {
1409 e = EDGE_SUCC (b, ix);
1410 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1411 continue;
1412
1413 bitmap_copy (dst, src[e->dest->index]);
1414 break;
1415 }
1416
1417 if (e == 0)
1418 bitmap_ones (dst);
1419 else
1420 for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1421 {
1422 unsigned int i;
1423 SBITMAP_ELT_TYPE *p, *r;
1424
1425 e = EDGE_SUCC (b, ix);
1426 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1427 continue;
1428
1429 p = src[e->dest->index]->elms;
1430 r = dst->elms;
1431 for (i = 0; i < set_size; i++)
1432 *r++ &= *p++;
1433 }
1434 }
1435
1436 /* Set the bitmap DST to the intersection of SRC of predecessors of
1437 basic block B. */
1438
1439 void
1440 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1441 {
1442 unsigned int set_size = dst->size;
1443 edge e;
1444 unsigned ix;
1445
1446 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1447 {
1448 e = EDGE_PRED (b, ix);
1449 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1450 continue;
1451
1452 bitmap_copy (dst, src[e->src->index]);
1453 break;
1454 }
1455
1456 if (e == 0)
1457 bitmap_ones (dst);
1458 else
1459 for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1460 {
1461 unsigned int i;
1462 SBITMAP_ELT_TYPE *p, *r;
1463
1464 e = EDGE_PRED (b, ix);
1465 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1466 continue;
1467
1468 p = src[e->src->index]->elms;
1469 r = dst->elms;
1470 for (i = 0; i < set_size; i++)
1471 *r++ &= *p++;
1472 }
1473 }
1474
1475 /* Set the bitmap DST to the union of SRC of successors of
1476 basic block B. */
1477
1478 void
1479 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1480 {
1481 unsigned int set_size = dst->size;
1482 edge e;
1483 unsigned ix;
1484
1485 for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1486 {
1487 e = EDGE_SUCC (b, ix);
1488 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1489 continue;
1490
1491 bitmap_copy (dst, src[e->dest->index]);
1492 break;
1493 }
1494
1495 if (ix == EDGE_COUNT (b->succs))
1496 bitmap_clear (dst);
1497 else
1498 for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1499 {
1500 unsigned int i;
1501 SBITMAP_ELT_TYPE *p, *r;
1502
1503 e = EDGE_SUCC (b, ix);
1504 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1505 continue;
1506
1507 p = src[e->dest->index]->elms;
1508 r = dst->elms;
1509 for (i = 0; i < set_size; i++)
1510 *r++ |= *p++;
1511 }
1512 }
1513
1514 /* Set the bitmap DST to the union of SRC of predecessors of
1515 basic block B. */
1516
1517 void
1518 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1519 {
1520 unsigned int set_size = dst->size;
1521 edge e;
1522 unsigned ix;
1523
1524 for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1525 {
1526 e = EDGE_PRED (b, ix);
1527 if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1528 continue;
1529
1530 bitmap_copy (dst, src[e->src->index]);
1531 break;
1532 }
1533
1534 if (ix == EDGE_COUNT (b->preds))
1535 bitmap_clear (dst);
1536 else
1537 for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1538 {
1539 unsigned int i;
1540 SBITMAP_ELT_TYPE *p, *r;
1541
1542 e = EDGE_PRED (b, ix);
1543 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1544 continue;
1545
1546 p = src[e->src->index]->elms;
1547 r = dst->elms;
1548 for (i = 0; i < set_size; i++)
1549 *r++ |= *p++;
1550 }
1551 }
1552
1553 /* Returns the list of basic blocks in the function in an order that guarantees
1554 that if a block X has just a single predecessor Y, then Y is after X in the
1555 ordering. */
1556
1557 basic_block *
1558 single_pred_before_succ_order (void)
1559 {
1560 basic_block x, y;
1561 basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1562 unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1563 unsigned np, i;
1564 auto_sbitmap visited (last_basic_block_for_fn (cfun));
1565
1566 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1567 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1568
1569 bitmap_clear (visited);
1570
1571 MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1572 FOR_EACH_BB_FN (x, cfun)
1573 {
1574 if (VISITED_P (x))
1575 continue;
1576
1577 /* Walk the predecessors of x as long as they have precisely one
1578 predecessor and add them to the list, so that they get stored
1579 after x. */
1580 for (y = x, np = 1;
1581 single_pred_p (y) && !VISITED_P (single_pred (y));
1582 y = single_pred (y))
1583 np++;
1584 for (y = x, i = n - np;
1585 single_pred_p (y) && !VISITED_P (single_pred (y));
1586 y = single_pred (y), i++)
1587 {
1588 order[i] = y;
1589 MARK_VISITED (y);
1590 }
1591 order[i] = y;
1592 MARK_VISITED (y);
1593
1594 gcc_assert (i == n - 1);
1595 n -= np;
1596 }
1597
1598 gcc_assert (n == 0);
1599 return order;
1600
1601 #undef MARK_VISITED
1602 #undef VISITED_P
1603 }