coretypes.h: Include hash-table.h and hash-set.h for host files.
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains various simple utilities to analyze the CFG. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "predict.h"
26 #include "tm.h"
27 #include "hard-reg-set.h"
28 #include "input.h"
29 #include "function.h"
30 #include "dominance.h"
31 #include "cfg.h"
32 #include "cfganal.h"
33 #include "basic-block.h"
34 #include "bitmap.h"
35 #include "sbitmap.h"
36 #include "timevar.h"
37
38 /* Store the data structures necessary for depth-first search. */
39 struct depth_first_search_dsS {
40 /* stack for backtracking during the algorithm */
41 basic_block *stack;
42
43 /* number of edges in the stack. That is, positions 0, ..., sp-1
44 have edges. */
45 unsigned int sp;
46
47 /* record of basic blocks already seen by depth-first search */
48 sbitmap visited_blocks;
49 };
50 typedef struct depth_first_search_dsS *depth_first_search_ds;
51
52 static void flow_dfs_compute_reverse_init (depth_first_search_ds);
53 static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
54 basic_block);
55 static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
56 basic_block);
57 static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
58 \f
59 /* Mark the back edges in DFS traversal.
60 Return nonzero if a loop (natural or otherwise) is present.
61 Inspired by Depth_First_Search_PP described in:
62
63 Advanced Compiler Design and Implementation
64 Steven Muchnick
65 Morgan Kaufmann, 1997
66
67 and heavily borrowed from pre_and_rev_post_order_compute. */
68
69 bool
70 mark_dfs_back_edges (void)
71 {
72 edge_iterator *stack;
73 int *pre;
74 int *post;
75 int sp;
76 int prenum = 1;
77 int postnum = 1;
78 sbitmap visited;
79 bool found = false;
80
81 /* Allocate the preorder and postorder number arrays. */
82 pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
83 post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
84
85 /* Allocate stack for back-tracking up CFG. */
86 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
87 sp = 0;
88
89 /* Allocate bitmap to track nodes that have been visited. */
90 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
91
92 /* None of the nodes in the CFG have been visited yet. */
93 bitmap_clear (visited);
94
95 /* Push the first edge on to the stack. */
96 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
97
98 while (sp)
99 {
100 edge_iterator ei;
101 basic_block src;
102 basic_block dest;
103
104 /* Look at the edge on the top of the stack. */
105 ei = stack[sp - 1];
106 src = ei_edge (ei)->src;
107 dest = ei_edge (ei)->dest;
108 ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
109
110 /* Check if the edge destination has been visited yet. */
111 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
112 dest->index))
113 {
114 /* Mark that we have visited the destination. */
115 bitmap_set_bit (visited, dest->index);
116
117 pre[dest->index] = prenum++;
118 if (EDGE_COUNT (dest->succs) > 0)
119 {
120 /* Since the DEST node has been visited for the first
121 time, check its successors. */
122 stack[sp++] = ei_start (dest->succs);
123 }
124 else
125 post[dest->index] = postnum++;
126 }
127 else
128 {
129 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
130 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
131 && pre[src->index] >= pre[dest->index]
132 && post[dest->index] == 0)
133 ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
134
135 if (ei_one_before_end_p (ei)
136 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
137 post[src->index] = postnum++;
138
139 if (!ei_one_before_end_p (ei))
140 ei_next (&stack[sp - 1]);
141 else
142 sp--;
143 }
144 }
145
146 free (pre);
147 free (post);
148 free (stack);
149 sbitmap_free (visited);
150
151 return found;
152 }
153
154 /* Find unreachable blocks. An unreachable block will have 0 in
155 the reachable bit in block->flags. A nonzero value indicates the
156 block is reachable. */
157
158 void
159 find_unreachable_blocks (void)
160 {
161 edge e;
162 edge_iterator ei;
163 basic_block *tos, *worklist, bb;
164
165 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
166
167 /* Clear all the reachability flags. */
168
169 FOR_EACH_BB_FN (bb, cfun)
170 bb->flags &= ~BB_REACHABLE;
171
172 /* Add our starting points to the worklist. Almost always there will
173 be only one. It isn't inconceivable that we might one day directly
174 support Fortran alternate entry points. */
175
176 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
177 {
178 *tos++ = e->dest;
179
180 /* Mark the block reachable. */
181 e->dest->flags |= BB_REACHABLE;
182 }
183
184 /* Iterate: find everything reachable from what we've already seen. */
185
186 while (tos != worklist)
187 {
188 basic_block b = *--tos;
189
190 FOR_EACH_EDGE (e, ei, b->succs)
191 {
192 basic_block dest = e->dest;
193
194 if (!(dest->flags & BB_REACHABLE))
195 {
196 *tos++ = dest;
197 dest->flags |= BB_REACHABLE;
198 }
199 }
200 }
201
202 free (worklist);
203 }
204 \f
205 /* Functions to access an edge list with a vector representation.
206 Enough data is kept such that given an index number, the
207 pred and succ that edge represents can be determined, or
208 given a pred and a succ, its index number can be returned.
209 This allows algorithms which consume a lot of memory to
210 represent the normally full matrix of edge (pred,succ) with a
211 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
212 wasted space in the client code due to sparse flow graphs. */
213
214 /* This functions initializes the edge list. Basically the entire
215 flowgraph is processed, and all edges are assigned a number,
216 and the data structure is filled in. */
217
218 struct edge_list *
219 create_edge_list (void)
220 {
221 struct edge_list *elist;
222 edge e;
223 int num_edges;
224 basic_block bb;
225 edge_iterator ei;
226
227 /* Determine the number of edges in the flow graph by counting successor
228 edges on each basic block. */
229 num_edges = 0;
230 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
231 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
232 {
233 num_edges += EDGE_COUNT (bb->succs);
234 }
235
236 elist = XNEW (struct edge_list);
237 elist->num_edges = num_edges;
238 elist->index_to_edge = XNEWVEC (edge, num_edges);
239
240 num_edges = 0;
241
242 /* Follow successors of blocks, and register these edges. */
243 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
244 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
245 FOR_EACH_EDGE (e, ei, bb->succs)
246 elist->index_to_edge[num_edges++] = e;
247
248 return elist;
249 }
250
251 /* This function free's memory associated with an edge list. */
252
253 void
254 free_edge_list (struct edge_list *elist)
255 {
256 if (elist)
257 {
258 free (elist->index_to_edge);
259 free (elist);
260 }
261 }
262
263 /* This function provides debug output showing an edge list. */
264
265 DEBUG_FUNCTION void
266 print_edge_list (FILE *f, struct edge_list *elist)
267 {
268 int x;
269
270 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
271 n_basic_blocks_for_fn (cfun), elist->num_edges);
272
273 for (x = 0; x < elist->num_edges; x++)
274 {
275 fprintf (f, " %-4d - edge(", x);
276 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
277 fprintf (f, "entry,");
278 else
279 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
280
281 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
282 fprintf (f, "exit)\n");
283 else
284 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
285 }
286 }
287
288 /* This function provides an internal consistency check of an edge list,
289 verifying that all edges are present, and that there are no
290 extra edges. */
291
292 DEBUG_FUNCTION void
293 verify_edge_list (FILE *f, struct edge_list *elist)
294 {
295 int pred, succ, index;
296 edge e;
297 basic_block bb, p, s;
298 edge_iterator ei;
299
300 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
301 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
302 {
303 FOR_EACH_EDGE (e, ei, bb->succs)
304 {
305 pred = e->src->index;
306 succ = e->dest->index;
307 index = EDGE_INDEX (elist, e->src, e->dest);
308 if (index == EDGE_INDEX_NO_EDGE)
309 {
310 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
311 continue;
312 }
313
314 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
315 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
316 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
317 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
318 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
319 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
320 }
321 }
322
323 /* We've verified that all the edges are in the list, now lets make sure
324 there are no spurious edges in the list. This is an expensive check! */
325
326 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
327 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
328 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
329 {
330 int found_edge = 0;
331
332 FOR_EACH_EDGE (e, ei, p->succs)
333 if (e->dest == s)
334 {
335 found_edge = 1;
336 break;
337 }
338
339 FOR_EACH_EDGE (e, ei, s->preds)
340 if (e->src == p)
341 {
342 found_edge = 1;
343 break;
344 }
345
346 if (EDGE_INDEX (elist, p, s)
347 == EDGE_INDEX_NO_EDGE && found_edge != 0)
348 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
349 p->index, s->index);
350 if (EDGE_INDEX (elist, p, s)
351 != EDGE_INDEX_NO_EDGE && found_edge == 0)
352 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
353 p->index, s->index, EDGE_INDEX (elist, p, s));
354 }
355 }
356
357
358 /* Functions to compute control dependences. */
359
360 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
361 void
362 control_dependences::set_control_dependence_map_bit (basic_block bb,
363 int edge_index)
364 {
365 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
366 return;
367 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
368 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
369 }
370
371 /* Clear all control dependences for block BB. */
372 void
373 control_dependences::clear_control_dependence_bitmap (basic_block bb)
374 {
375 bitmap_clear (control_dependence_map[bb->index]);
376 }
377
378 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
379 This function is necessary because some blocks have negative numbers. */
380
381 static inline basic_block
382 find_pdom (basic_block block)
383 {
384 gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
385
386 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
387 return EXIT_BLOCK_PTR_FOR_FN (cfun);
388 else
389 {
390 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
391 if (! bb)
392 return EXIT_BLOCK_PTR_FOR_FN (cfun);
393 return bb;
394 }
395 }
396
397 /* Determine all blocks' control dependences on the given edge with edge_list
398 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
399
400 void
401 control_dependences::find_control_dependence (int edge_index)
402 {
403 basic_block current_block;
404 basic_block ending_block;
405
406 gcc_assert (INDEX_EDGE_PRED_BB (m_el, edge_index)
407 != EXIT_BLOCK_PTR_FOR_FN (cfun));
408
409 if (INDEX_EDGE_PRED_BB (m_el, edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
410 ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
411 else
412 ending_block = find_pdom (INDEX_EDGE_PRED_BB (m_el, edge_index));
413
414 for (current_block = INDEX_EDGE_SUCC_BB (m_el, edge_index);
415 current_block != ending_block
416 && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
417 current_block = find_pdom (current_block))
418 {
419 edge e = INDEX_EDGE (m_el, edge_index);
420
421 /* For abnormal edges, we don't make current_block control
422 dependent because instructions that throw are always necessary
423 anyway. */
424 if (e->flags & EDGE_ABNORMAL)
425 continue;
426
427 set_control_dependence_map_bit (current_block, edge_index);
428 }
429 }
430
431 /* Record all blocks' control dependences on all edges in the edge
432 list EL, ala Morgan, Section 3.6. */
433
434 control_dependences::control_dependences (struct edge_list *edges)
435 : m_el (edges)
436 {
437 timevar_push (TV_CONTROL_DEPENDENCES);
438 control_dependence_map.create (last_basic_block_for_fn (cfun));
439 for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
440 control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
441 for (int i = 0; i < NUM_EDGES (m_el); ++i)
442 find_control_dependence (i);
443 timevar_pop (TV_CONTROL_DEPENDENCES);
444 }
445
446 /* Free control dependences and the associated edge list. */
447
448 control_dependences::~control_dependences ()
449 {
450 for (unsigned i = 0; i < control_dependence_map.length (); ++i)
451 BITMAP_FREE (control_dependence_map[i]);
452 control_dependence_map.release ();
453 free_edge_list (m_el);
454 }
455
456 /* Returns the bitmap of edges the basic-block I is dependent on. */
457
458 bitmap
459 control_dependences::get_edges_dependent_on (int i)
460 {
461 return control_dependence_map[i];
462 }
463
464 /* Returns the edge with index I from the edge list. */
465
466 edge
467 control_dependences::get_edge (int i)
468 {
469 return INDEX_EDGE (m_el, i);
470 }
471
472
473 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
474 If no such edge exists, return NULL. */
475
476 edge
477 find_edge (basic_block pred, basic_block succ)
478 {
479 edge e;
480 edge_iterator ei;
481
482 if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
483 {
484 FOR_EACH_EDGE (e, ei, pred->succs)
485 if (e->dest == succ)
486 return e;
487 }
488 else
489 {
490 FOR_EACH_EDGE (e, ei, succ->preds)
491 if (e->src == pred)
492 return e;
493 }
494
495 return NULL;
496 }
497
498 /* This routine will determine what, if any, edge there is between
499 a specified predecessor and successor. */
500
501 int
502 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
503 {
504 int x;
505
506 for (x = 0; x < NUM_EDGES (edge_list); x++)
507 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
508 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
509 return x;
510
511 return (EDGE_INDEX_NO_EDGE);
512 }
513 \f
514 /* This routine will remove any fake predecessor edges for a basic block.
515 When the edge is removed, it is also removed from whatever successor
516 list it is in. */
517
518 static void
519 remove_fake_predecessors (basic_block bb)
520 {
521 edge e;
522 edge_iterator ei;
523
524 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
525 {
526 if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
527 remove_edge (e);
528 else
529 ei_next (&ei);
530 }
531 }
532
533 /* This routine will remove all fake edges from the flow graph. If
534 we remove all fake successors, it will automatically remove all
535 fake predecessors. */
536
537 void
538 remove_fake_edges (void)
539 {
540 basic_block bb;
541
542 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
543 remove_fake_predecessors (bb);
544 }
545
546 /* This routine will remove all fake edges to the EXIT_BLOCK. */
547
548 void
549 remove_fake_exit_edges (void)
550 {
551 remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
552 }
553
554
555 /* This function will add a fake edge between any block which has no
556 successors, and the exit block. Some data flow equations require these
557 edges to exist. */
558
559 void
560 add_noreturn_fake_exit_edges (void)
561 {
562 basic_block bb;
563
564 FOR_EACH_BB_FN (bb, cfun)
565 if (EDGE_COUNT (bb->succs) == 0)
566 make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
567 }
568
569 /* This function adds a fake edge between any infinite loops to the
570 exit block. Some optimizations require a path from each node to
571 the exit node.
572
573 See also Morgan, Figure 3.10, pp. 82-83.
574
575 The current implementation is ugly, not attempting to minimize the
576 number of inserted fake edges. To reduce the number of fake edges
577 to insert, add fake edges from _innermost_ loops containing only
578 nodes not reachable from the exit block. */
579
580 void
581 connect_infinite_loops_to_exit (void)
582 {
583 basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
584 basic_block deadend_block;
585 struct depth_first_search_dsS dfs_ds;
586
587 /* Perform depth-first search in the reverse graph to find nodes
588 reachable from the exit block. */
589 flow_dfs_compute_reverse_init (&dfs_ds);
590 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
591
592 /* Repeatedly add fake edges, updating the unreachable nodes. */
593 while (1)
594 {
595 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
596 unvisited_block);
597 if (!unvisited_block)
598 break;
599
600 deadend_block = dfs_find_deadend (unvisited_block);
601 make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
602 flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
603 }
604
605 flow_dfs_compute_reverse_finish (&dfs_ds);
606 return;
607 }
608 \f
609 /* Compute reverse top sort order. This is computing a post order
610 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
611 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
612 true, unreachable blocks are deleted. */
613
614 int
615 post_order_compute (int *post_order, bool include_entry_exit,
616 bool delete_unreachable)
617 {
618 edge_iterator *stack;
619 int sp;
620 int post_order_num = 0;
621 sbitmap visited;
622 int count;
623
624 if (include_entry_exit)
625 post_order[post_order_num++] = EXIT_BLOCK;
626
627 /* Allocate stack for back-tracking up CFG. */
628 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
629 sp = 0;
630
631 /* Allocate bitmap to track nodes that have been visited. */
632 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
633
634 /* None of the nodes in the CFG have been visited yet. */
635 bitmap_clear (visited);
636
637 /* Push the first edge on to the stack. */
638 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
639
640 while (sp)
641 {
642 edge_iterator ei;
643 basic_block src;
644 basic_block dest;
645
646 /* Look at the edge on the top of the stack. */
647 ei = stack[sp - 1];
648 src = ei_edge (ei)->src;
649 dest = ei_edge (ei)->dest;
650
651 /* Check if the edge destination has been visited yet. */
652 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
653 && ! bitmap_bit_p (visited, dest->index))
654 {
655 /* Mark that we have visited the destination. */
656 bitmap_set_bit (visited, dest->index);
657
658 if (EDGE_COUNT (dest->succs) > 0)
659 /* Since the DEST node has been visited for the first
660 time, check its successors. */
661 stack[sp++] = ei_start (dest->succs);
662 else
663 post_order[post_order_num++] = dest->index;
664 }
665 else
666 {
667 if (ei_one_before_end_p (ei)
668 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
669 post_order[post_order_num++] = src->index;
670
671 if (!ei_one_before_end_p (ei))
672 ei_next (&stack[sp - 1]);
673 else
674 sp--;
675 }
676 }
677
678 if (include_entry_exit)
679 {
680 post_order[post_order_num++] = ENTRY_BLOCK;
681 count = post_order_num;
682 }
683 else
684 count = post_order_num + 2;
685
686 /* Delete the unreachable blocks if some were found and we are
687 supposed to do it. */
688 if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
689 {
690 basic_block b;
691 basic_block next_bb;
692 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
693 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
694 {
695 next_bb = b->next_bb;
696
697 if (!(bitmap_bit_p (visited, b->index)))
698 delete_basic_block (b);
699 }
700
701 tidy_fallthru_edges ();
702 }
703
704 free (stack);
705 sbitmap_free (visited);
706 return post_order_num;
707 }
708
709
710 /* Helper routine for inverted_post_order_compute
711 flow_dfs_compute_reverse_execute, and the reverse-CFG
712 deapth first search in dominance.c.
713 BB has to belong to a region of CFG
714 unreachable by inverted traversal from the exit.
715 i.e. there's no control flow path from ENTRY to EXIT
716 that contains this BB.
717 This can happen in two cases - if there's an infinite loop
718 or if there's a block that has no successor
719 (call to a function with no return).
720 Some RTL passes deal with this condition by
721 calling connect_infinite_loops_to_exit () and/or
722 add_noreturn_fake_exit_edges ().
723 However, those methods involve modifying the CFG itself
724 which may not be desirable.
725 Hence, we deal with the infinite loop/no return cases
726 by identifying a unique basic block that can reach all blocks
727 in such a region by inverted traversal.
728 This function returns a basic block that guarantees
729 that all blocks in the region are reachable
730 by starting an inverted traversal from the returned block. */
731
732 basic_block
733 dfs_find_deadend (basic_block bb)
734 {
735 bitmap visited = BITMAP_ALLOC (NULL);
736
737 for (;;)
738 {
739 if (EDGE_COUNT (bb->succs) == 0
740 || ! bitmap_set_bit (visited, bb->index))
741 {
742 BITMAP_FREE (visited);
743 return bb;
744 }
745
746 bb = EDGE_SUCC (bb, 0)->dest;
747 }
748
749 gcc_unreachable ();
750 }
751
752
753 /* Compute the reverse top sort order of the inverted CFG
754 i.e. starting from the exit block and following the edges backward
755 (from successors to predecessors).
756 This ordering can be used for forward dataflow problems among others.
757
758 This function assumes that all blocks in the CFG are reachable
759 from the ENTRY (but not necessarily from EXIT).
760
761 If there's an infinite loop,
762 a simple inverted traversal starting from the blocks
763 with no successors can't visit all blocks.
764 To solve this problem, we first do inverted traversal
765 starting from the blocks with no successor.
766 And if there's any block left that's not visited by the regular
767 inverted traversal from EXIT,
768 those blocks are in such problematic region.
769 Among those, we find one block that has
770 any visited predecessor (which is an entry into such a region),
771 and start looking for a "dead end" from that block
772 and do another inverted traversal from that block. */
773
774 int
775 inverted_post_order_compute (int *post_order)
776 {
777 basic_block bb;
778 edge_iterator *stack;
779 int sp;
780 int post_order_num = 0;
781 sbitmap visited;
782
783 /* Allocate stack for back-tracking up CFG. */
784 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
785 sp = 0;
786
787 /* Allocate bitmap to track nodes that have been visited. */
788 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
789
790 /* None of the nodes in the CFG have been visited yet. */
791 bitmap_clear (visited);
792
793 /* Put all blocks that have no successor into the initial work list. */
794 FOR_ALL_BB_FN (bb, cfun)
795 if (EDGE_COUNT (bb->succs) == 0)
796 {
797 /* Push the initial edge on to the stack. */
798 if (EDGE_COUNT (bb->preds) > 0)
799 {
800 stack[sp++] = ei_start (bb->preds);
801 bitmap_set_bit (visited, bb->index);
802 }
803 }
804
805 do
806 {
807 bool has_unvisited_bb = false;
808
809 /* The inverted traversal loop. */
810 while (sp)
811 {
812 edge_iterator ei;
813 basic_block pred;
814
815 /* Look at the edge on the top of the stack. */
816 ei = stack[sp - 1];
817 bb = ei_edge (ei)->dest;
818 pred = ei_edge (ei)->src;
819
820 /* Check if the predecessor has been visited yet. */
821 if (! bitmap_bit_p (visited, pred->index))
822 {
823 /* Mark that we have visited the destination. */
824 bitmap_set_bit (visited, pred->index);
825
826 if (EDGE_COUNT (pred->preds) > 0)
827 /* Since the predecessor node has been visited for the first
828 time, check its predecessors. */
829 stack[sp++] = ei_start (pred->preds);
830 else
831 post_order[post_order_num++] = pred->index;
832 }
833 else
834 {
835 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
836 && ei_one_before_end_p (ei))
837 post_order[post_order_num++] = bb->index;
838
839 if (!ei_one_before_end_p (ei))
840 ei_next (&stack[sp - 1]);
841 else
842 sp--;
843 }
844 }
845
846 /* Detect any infinite loop and activate the kludge.
847 Note that this doesn't check EXIT_BLOCK itself
848 since EXIT_BLOCK is always added after the outer do-while loop. */
849 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
850 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
851 if (!bitmap_bit_p (visited, bb->index))
852 {
853 has_unvisited_bb = true;
854
855 if (EDGE_COUNT (bb->preds) > 0)
856 {
857 edge_iterator ei;
858 edge e;
859 basic_block visited_pred = NULL;
860
861 /* Find an already visited predecessor. */
862 FOR_EACH_EDGE (e, ei, bb->preds)
863 {
864 if (bitmap_bit_p (visited, e->src->index))
865 visited_pred = e->src;
866 }
867
868 if (visited_pred)
869 {
870 basic_block be = dfs_find_deadend (bb);
871 gcc_assert (be != NULL);
872 bitmap_set_bit (visited, be->index);
873 stack[sp++] = ei_start (be->preds);
874 break;
875 }
876 }
877 }
878
879 if (has_unvisited_bb && sp == 0)
880 {
881 /* No blocks are reachable from EXIT at all.
882 Find a dead-end from the ENTRY, and restart the iteration. */
883 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
884 gcc_assert (be != NULL);
885 bitmap_set_bit (visited, be->index);
886 stack[sp++] = ei_start (be->preds);
887 }
888
889 /* The only case the below while fires is
890 when there's an infinite loop. */
891 }
892 while (sp);
893
894 /* EXIT_BLOCK is always included. */
895 post_order[post_order_num++] = EXIT_BLOCK;
896
897 free (stack);
898 sbitmap_free (visited);
899 return post_order_num;
900 }
901
902 /* Compute the depth first search order of FN and store in the array
903 PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
904 reverse completion number for each node. Returns the number of nodes
905 visited. A depth first search tries to get as far away from the starting
906 point as quickly as possible.
907
908 In case the function has unreachable blocks the number of nodes
909 visited does not include them.
910
911 pre_order is a really a preorder numbering of the graph.
912 rev_post_order is really a reverse postorder numbering of the graph. */
913
914 int
915 pre_and_rev_post_order_compute_fn (struct function *fn,
916 int *pre_order, int *rev_post_order,
917 bool include_entry_exit)
918 {
919 edge_iterator *stack;
920 int sp;
921 int pre_order_num = 0;
922 int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
923 sbitmap visited;
924
925 /* Allocate stack for back-tracking up CFG. */
926 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
927 sp = 0;
928
929 if (include_entry_exit)
930 {
931 if (pre_order)
932 pre_order[pre_order_num] = ENTRY_BLOCK;
933 pre_order_num++;
934 if (rev_post_order)
935 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
936 }
937 else
938 rev_post_order_num -= NUM_FIXED_BLOCKS;
939
940 /* Allocate bitmap to track nodes that have been visited. */
941 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
942
943 /* None of the nodes in the CFG have been visited yet. */
944 bitmap_clear (visited);
945
946 /* Push the first edge on to the stack. */
947 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
948
949 while (sp)
950 {
951 edge_iterator ei;
952 basic_block src;
953 basic_block dest;
954
955 /* Look at the edge on the top of the stack. */
956 ei = stack[sp - 1];
957 src = ei_edge (ei)->src;
958 dest = ei_edge (ei)->dest;
959
960 /* Check if the edge destination has been visited yet. */
961 if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
962 && ! bitmap_bit_p (visited, dest->index))
963 {
964 /* Mark that we have visited the destination. */
965 bitmap_set_bit (visited, dest->index);
966
967 if (pre_order)
968 pre_order[pre_order_num] = dest->index;
969
970 pre_order_num++;
971
972 if (EDGE_COUNT (dest->succs) > 0)
973 /* Since the DEST node has been visited for the first
974 time, check its successors. */
975 stack[sp++] = ei_start (dest->succs);
976 else if (rev_post_order)
977 /* There are no successors for the DEST node so assign
978 its reverse completion number. */
979 rev_post_order[rev_post_order_num--] = dest->index;
980 }
981 else
982 {
983 if (ei_one_before_end_p (ei)
984 && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
985 && rev_post_order)
986 /* There are no more successors for the SRC node
987 so assign its reverse completion number. */
988 rev_post_order[rev_post_order_num--] = src->index;
989
990 if (!ei_one_before_end_p (ei))
991 ei_next (&stack[sp - 1]);
992 else
993 sp--;
994 }
995 }
996
997 free (stack);
998 sbitmap_free (visited);
999
1000 if (include_entry_exit)
1001 {
1002 if (pre_order)
1003 pre_order[pre_order_num] = EXIT_BLOCK;
1004 pre_order_num++;
1005 if (rev_post_order)
1006 rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
1007 }
1008
1009 return pre_order_num;
1010 }
1011
1012 /* Like pre_and_rev_post_order_compute_fn but operating on the
1013 current function and asserting that all nodes were visited. */
1014
1015 int
1016 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1017 bool include_entry_exit)
1018 {
1019 int pre_order_num
1020 = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1021 include_entry_exit);
1022 if (include_entry_exit)
1023 /* The number of nodes visited should be the number of blocks. */
1024 gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1025 else
1026 /* The number of nodes visited should be the number of blocks minus
1027 the entry and exit blocks which are not visited here. */
1028 gcc_assert (pre_order_num
1029 == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1030
1031 return pre_order_num;
1032 }
1033
1034 /* Compute the depth first search order on the _reverse_ graph and
1035 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1036 Returns the number of nodes visited.
1037
1038 The computation is split into three pieces:
1039
1040 flow_dfs_compute_reverse_init () creates the necessary data
1041 structures.
1042
1043 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1044 structures. The block will start the search.
1045
1046 flow_dfs_compute_reverse_execute () continues (or starts) the
1047 search using the block on the top of the stack, stopping when the
1048 stack is empty.
1049
1050 flow_dfs_compute_reverse_finish () destroys the necessary data
1051 structures.
1052
1053 Thus, the user will probably call ..._init(), call ..._add_bb() to
1054 add a beginning basic block to the stack, call ..._execute(),
1055 possibly add another bb to the stack and again call ..._execute(),
1056 ..., and finally call _finish(). */
1057
1058 /* Initialize the data structures used for depth-first search on the
1059 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1060 added to the basic block stack. DATA is the current depth-first
1061 search context. If INITIALIZE_STACK is nonzero, there is an
1062 element on the stack. */
1063
1064 static void
1065 flow_dfs_compute_reverse_init (depth_first_search_ds data)
1066 {
1067 /* Allocate stack for back-tracking up CFG. */
1068 data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1069 data->sp = 0;
1070
1071 /* Allocate bitmap to track nodes that have been visited. */
1072 data->visited_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1073
1074 /* None of the nodes in the CFG have been visited yet. */
1075 bitmap_clear (data->visited_blocks);
1076
1077 return;
1078 }
1079
1080 /* Add the specified basic block to the top of the dfs data
1081 structures. When the search continues, it will start at the
1082 block. */
1083
1084 static void
1085 flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
1086 {
1087 data->stack[data->sp++] = bb;
1088 bitmap_set_bit (data->visited_blocks, bb->index);
1089 }
1090
1091 /* Continue the depth-first search through the reverse graph starting with the
1092 block at the stack's top and ending when the stack is empty. Visited nodes
1093 are marked. Returns an unvisited basic block, or NULL if there is none
1094 available. */
1095
1096 static basic_block
1097 flow_dfs_compute_reverse_execute (depth_first_search_ds data,
1098 basic_block last_unvisited)
1099 {
1100 basic_block bb;
1101 edge e;
1102 edge_iterator ei;
1103
1104 while (data->sp > 0)
1105 {
1106 bb = data->stack[--data->sp];
1107
1108 /* Perform depth-first search on adjacent vertices. */
1109 FOR_EACH_EDGE (e, ei, bb->preds)
1110 if (!bitmap_bit_p (data->visited_blocks, e->src->index))
1111 flow_dfs_compute_reverse_add_bb (data, e->src);
1112 }
1113
1114 /* Determine if there are unvisited basic blocks. */
1115 FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1116 if (!bitmap_bit_p (data->visited_blocks, bb->index))
1117 return bb;
1118
1119 return NULL;
1120 }
1121
1122 /* Destroy the data structures needed for depth-first search on the
1123 reverse graph. */
1124
1125 static void
1126 flow_dfs_compute_reverse_finish (depth_first_search_ds data)
1127 {
1128 free (data->stack);
1129 sbitmap_free (data->visited_blocks);
1130 }
1131
1132 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1133 if REVERSE, go against direction of edges. Returns number of blocks
1134 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1135 int
1136 dfs_enumerate_from (basic_block bb, int reverse,
1137 bool (*predicate) (const_basic_block, const void *),
1138 basic_block *rslt, int rslt_max, const void *data)
1139 {
1140 basic_block *st, lbb;
1141 int sp = 0, tv = 0;
1142 unsigned size;
1143
1144 /* A bitmap to keep track of visited blocks. Allocating it each time
1145 this function is called is not possible, since dfs_enumerate_from
1146 is often used on small (almost) disjoint parts of cfg (bodies of
1147 loops), and allocating a large sbitmap would lead to quadratic
1148 behavior. */
1149 static sbitmap visited;
1150 static unsigned v_size;
1151
1152 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1153 #define UNMARK_VISITED(BB) (bitmap_clear_bit (visited, (BB)->index))
1154 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1155
1156 /* Resize the VISITED sbitmap if necessary. */
1157 size = last_basic_block_for_fn (cfun);
1158 if (size < 10)
1159 size = 10;
1160
1161 if (!visited)
1162 {
1163
1164 visited = sbitmap_alloc (size);
1165 bitmap_clear (visited);
1166 v_size = size;
1167 }
1168 else if (v_size < size)
1169 {
1170 /* Ensure that we increase the size of the sbitmap exponentially. */
1171 if (2 * v_size > size)
1172 size = 2 * v_size;
1173
1174 visited = sbitmap_resize (visited, size, 0);
1175 v_size = size;
1176 }
1177
1178 st = XNEWVEC (basic_block, rslt_max);
1179 rslt[tv++] = st[sp++] = bb;
1180 MARK_VISITED (bb);
1181 while (sp)
1182 {
1183 edge e;
1184 edge_iterator ei;
1185 lbb = st[--sp];
1186 if (reverse)
1187 {
1188 FOR_EACH_EDGE (e, ei, lbb->preds)
1189 if (!VISITED_P (e->src) && predicate (e->src, data))
1190 {
1191 gcc_assert (tv != rslt_max);
1192 rslt[tv++] = st[sp++] = e->src;
1193 MARK_VISITED (e->src);
1194 }
1195 }
1196 else
1197 {
1198 FOR_EACH_EDGE (e, ei, lbb->succs)
1199 if (!VISITED_P (e->dest) && predicate (e->dest, data))
1200 {
1201 gcc_assert (tv != rslt_max);
1202 rslt[tv++] = st[sp++] = e->dest;
1203 MARK_VISITED (e->dest);
1204 }
1205 }
1206 }
1207 free (st);
1208 for (sp = 0; sp < tv; sp++)
1209 UNMARK_VISITED (rslt[sp]);
1210 return tv;
1211 #undef MARK_VISITED
1212 #undef UNMARK_VISITED
1213 #undef VISITED_P
1214 }
1215
1216
1217 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1218
1219 This algorithm can be found in Timothy Harvey's PhD thesis, at
1220 http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1221 dominance algorithms.
1222
1223 First, we identify each join point, j (any node with more than one
1224 incoming edge is a join point).
1225
1226 We then examine each predecessor, p, of j and walk up the dominator tree
1227 starting at p.
1228
1229 We stop the walk when we reach j's immediate dominator - j is in the
1230 dominance frontier of each of the nodes in the walk, except for j's
1231 immediate dominator. Intuitively, all of the rest of j's dominators are
1232 shared by j's predecessors as well.
1233 Since they dominate j, they will not have j in their dominance frontiers.
1234
1235 The number of nodes touched by this algorithm is equal to the size
1236 of the dominance frontiers, no more, no less.
1237 */
1238
1239
1240 static void
1241 compute_dominance_frontiers_1 (bitmap_head *frontiers)
1242 {
1243 edge p;
1244 edge_iterator ei;
1245 basic_block b;
1246 FOR_EACH_BB_FN (b, cfun)
1247 {
1248 if (EDGE_COUNT (b->preds) >= 2)
1249 {
1250 FOR_EACH_EDGE (p, ei, b->preds)
1251 {
1252 basic_block runner = p->src;
1253 basic_block domsb;
1254 if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1255 continue;
1256
1257 domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1258 while (runner != domsb)
1259 {
1260 if (!bitmap_set_bit (&frontiers[runner->index],
1261 b->index))
1262 break;
1263 runner = get_immediate_dominator (CDI_DOMINATORS,
1264 runner);
1265 }
1266 }
1267 }
1268 }
1269 }
1270
1271
1272 void
1273 compute_dominance_frontiers (bitmap_head *frontiers)
1274 {
1275 timevar_push (TV_DOM_FRONTIERS);
1276
1277 compute_dominance_frontiers_1 (frontiers);
1278
1279 timevar_pop (TV_DOM_FRONTIERS);
1280 }
1281
1282 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1283 return a bitmap with all the blocks in the iterated dominance
1284 frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1285 frontier information as returned by compute_dominance_frontiers.
1286
1287 The resulting set of blocks are the potential sites where PHI nodes
1288 are needed. The caller is responsible for freeing the memory
1289 allocated for the return value. */
1290
1291 bitmap
1292 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1293 {
1294 bitmap_iterator bi;
1295 unsigned bb_index, i;
1296 bitmap phi_insertion_points;
1297
1298 /* Each block can appear at most twice on the work-stack. */
1299 auto_vec<int> work_stack (2 * n_basic_blocks_for_fn (cfun));
1300 phi_insertion_points = BITMAP_ALLOC (NULL);
1301
1302 /* Seed the work list with all the blocks in DEF_BLOCKS. We use
1303 vec::quick_push here for speed. This is safe because we know that
1304 the number of definition blocks is no greater than the number of
1305 basic blocks, which is the initial capacity of WORK_STACK. */
1306 EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
1307 work_stack.quick_push (bb_index);
1308
1309 /* Pop a block off the worklist, add every block that appears in
1310 the original block's DF that we have not already processed to
1311 the worklist. Iterate until the worklist is empty. Blocks
1312 which are added to the worklist are potential sites for
1313 PHI nodes. */
1314 while (work_stack.length () > 0)
1315 {
1316 bb_index = work_stack.pop ();
1317
1318 /* Since the registration of NEW -> OLD name mappings is done
1319 separately from the call to update_ssa, when updating the SSA
1320 form, the basic blocks where new and/or old names are defined
1321 may have disappeared by CFG cleanup calls. In this case,
1322 we may pull a non-existing block from the work stack. */
1323 gcc_checking_assert (bb_index
1324 < (unsigned) last_basic_block_for_fn (cfun));
1325
1326 EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1327 0, i, bi)
1328 {
1329 work_stack.quick_push (i);
1330 bitmap_set_bit (phi_insertion_points, i);
1331 }
1332 }
1333
1334 return phi_insertion_points;
1335 }
1336
1337 /* Intersection and union of preds/succs for sbitmap based data flow
1338 solvers. All four functions defined below take the same arguments:
1339 B is the basic block to perform the operation for. DST is the
1340 target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1341 last_basic_block so that it can be indexed with basic block indices.
1342 DST may be (but does not have to be) SRC[B->index]. */
1343
1344 /* Set the bitmap DST to the intersection of SRC of successors of
1345 basic block B. */
1346
1347 void
1348 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1349 {
1350 unsigned int set_size = dst->size;
1351 edge e;
1352 unsigned ix;
1353
1354 gcc_assert (!dst->popcount);
1355
1356 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1357 {
1358 e = EDGE_SUCC (b, ix);
1359 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1360 continue;
1361
1362 bitmap_copy (dst, src[e->dest->index]);
1363 break;
1364 }
1365
1366 if (e == 0)
1367 bitmap_ones (dst);
1368 else
1369 for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1370 {
1371 unsigned int i;
1372 SBITMAP_ELT_TYPE *p, *r;
1373
1374 e = EDGE_SUCC (b, ix);
1375 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1376 continue;
1377
1378 p = src[e->dest->index]->elms;
1379 r = dst->elms;
1380 for (i = 0; i < set_size; i++)
1381 *r++ &= *p++;
1382 }
1383 }
1384
1385 /* Set the bitmap DST to the intersection of SRC of predecessors of
1386 basic block B. */
1387
1388 void
1389 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1390 {
1391 unsigned int set_size = dst->size;
1392 edge e;
1393 unsigned ix;
1394
1395 gcc_assert (!dst->popcount);
1396
1397 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1398 {
1399 e = EDGE_PRED (b, ix);
1400 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1401 continue;
1402
1403 bitmap_copy (dst, src[e->src->index]);
1404 break;
1405 }
1406
1407 if (e == 0)
1408 bitmap_ones (dst);
1409 else
1410 for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1411 {
1412 unsigned int i;
1413 SBITMAP_ELT_TYPE *p, *r;
1414
1415 e = EDGE_PRED (b, ix);
1416 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1417 continue;
1418
1419 p = src[e->src->index]->elms;
1420 r = dst->elms;
1421 for (i = 0; i < set_size; i++)
1422 *r++ &= *p++;
1423 }
1424 }
1425
1426 /* Set the bitmap DST to the union of SRC of successors of
1427 basic block B. */
1428
1429 void
1430 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1431 {
1432 unsigned int set_size = dst->size;
1433 edge e;
1434 unsigned ix;
1435
1436 gcc_assert (!dst->popcount);
1437
1438 for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1439 {
1440 e = EDGE_SUCC (b, ix);
1441 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1442 continue;
1443
1444 bitmap_copy (dst, src[e->dest->index]);
1445 break;
1446 }
1447
1448 if (ix == EDGE_COUNT (b->succs))
1449 bitmap_clear (dst);
1450 else
1451 for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1452 {
1453 unsigned int i;
1454 SBITMAP_ELT_TYPE *p, *r;
1455
1456 e = EDGE_SUCC (b, ix);
1457 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1458 continue;
1459
1460 p = src[e->dest->index]->elms;
1461 r = dst->elms;
1462 for (i = 0; i < set_size; i++)
1463 *r++ |= *p++;
1464 }
1465 }
1466
1467 /* Set the bitmap DST to the union of SRC of predecessors of
1468 basic block B. */
1469
1470 void
1471 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1472 {
1473 unsigned int set_size = dst->size;
1474 edge e;
1475 unsigned ix;
1476
1477 gcc_assert (!dst->popcount);
1478
1479 for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1480 {
1481 e = EDGE_PRED (b, ix);
1482 if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1483 continue;
1484
1485 bitmap_copy (dst, src[e->src->index]);
1486 break;
1487 }
1488
1489 if (ix == EDGE_COUNT (b->preds))
1490 bitmap_clear (dst);
1491 else
1492 for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1493 {
1494 unsigned int i;
1495 SBITMAP_ELT_TYPE *p, *r;
1496
1497 e = EDGE_PRED (b, ix);
1498 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1499 continue;
1500
1501 p = src[e->src->index]->elms;
1502 r = dst->elms;
1503 for (i = 0; i < set_size; i++)
1504 *r++ |= *p++;
1505 }
1506 }
1507
1508 /* Returns the list of basic blocks in the function in an order that guarantees
1509 that if a block X has just a single predecessor Y, then Y is after X in the
1510 ordering. */
1511
1512 basic_block *
1513 single_pred_before_succ_order (void)
1514 {
1515 basic_block x, y;
1516 basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1517 unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1518 unsigned np, i;
1519 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
1520
1521 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1522 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1523
1524 bitmap_clear (visited);
1525
1526 MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1527 FOR_EACH_BB_FN (x, cfun)
1528 {
1529 if (VISITED_P (x))
1530 continue;
1531
1532 /* Walk the predecessors of x as long as they have precisely one
1533 predecessor and add them to the list, so that they get stored
1534 after x. */
1535 for (y = x, np = 1;
1536 single_pred_p (y) && !VISITED_P (single_pred (y));
1537 y = single_pred (y))
1538 np++;
1539 for (y = x, i = n - np;
1540 single_pred_p (y) && !VISITED_P (single_pred (y));
1541 y = single_pred (y), i++)
1542 {
1543 order[i] = y;
1544 MARK_VISITED (y);
1545 }
1546 order[i] = y;
1547 MARK_VISITED (y);
1548
1549 gcc_assert (i == n - 1);
1550 n -= np;
1551 }
1552
1553 sbitmap_free (visited);
1554 gcc_assert (n == 0);
1555 return order;
1556
1557 #undef MARK_VISITED
1558 #undef VISITED_P
1559 }