* cfg.c, cfganal.c, cfgbuild.c: Reformatting and minor cleanups.
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "toplev.h"
29
30 #include "obstack.h"
31
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS {
34 /* stack for backtracking during the algorithm */
35 basic_block *stack;
36
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
38 have edges. */
39 unsigned int sp;
40
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks;
43 };
44 typedef struct depth_first_search_dsS *depth_first_search_ds;
45
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds, basic_block));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds));
54 static void remove_fake_successors PARAMS ((basic_block));
55 static bool need_fake_edge_p PARAMS ((rtx));
56 \f
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
59
60 bool
61 forwarder_block_p (bb)
62 basic_block bb;
63 {
64 rtx insn;
65
66 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
67 || !bb->succ || bb->succ->succ_next)
68 return false;
69
70 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
71 if (INSN_P (insn) && active_insn_p (insn))
72 return false;
73
74 return (!INSN_P (insn)
75 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
76 || !active_insn_p (insn));
77 }
78
79 /* Return nonzero if we can reach target from src by falling through. */
80
81 bool
82 can_fallthru (src, target)
83 basic_block src, target;
84 {
85 rtx insn = src->end;
86 rtx insn2 = target->head;
87
88 if (src->index + 1 == target->index && !active_insn_p (insn2))
89 insn2 = next_active_insn (insn2);
90
91 /* ??? Later we may add code to move jump tables offline. */
92 return next_active_insn (insn) == insn2;
93 }
94 \f
95 /* Mark the back edges in DFS traversal.
96 Return non-zero if a loop (natural or otherwise) is present.
97 Inspired by Depth_First_Search_PP described in:
98
99 Advanced Compiler Design and Implementation
100 Steven Muchnick
101 Morgan Kaufmann, 1997
102
103 and heavily borrowed from flow_depth_first_order_compute. */
104
105 bool
106 mark_dfs_back_edges ()
107 {
108 edge *stack;
109 int *pre;
110 int *post;
111 int sp;
112 int prenum = 1;
113 int postnum = 1;
114 sbitmap visited;
115 bool found = false;
116
117 /* Allocate the preorder and postorder number arrays. */
118 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
119 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
120
121 /* Allocate stack for back-tracking up CFG. */
122 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
123 sp = 0;
124
125 /* Allocate bitmap to track nodes that have been visited. */
126 visited = sbitmap_alloc (n_basic_blocks);
127
128 /* None of the nodes in the CFG have been visited yet. */
129 sbitmap_zero (visited);
130
131 /* Push the first edge on to the stack. */
132 stack[sp++] = ENTRY_BLOCK_PTR->succ;
133
134 while (sp)
135 {
136 edge e;
137 basic_block src;
138 basic_block dest;
139
140 /* Look at the edge on the top of the stack. */
141 e = stack[sp - 1];
142 src = e->src;
143 dest = e->dest;
144 e->flags &= ~EDGE_DFS_BACK;
145
146 /* Check if the edge destination has been visited yet. */
147 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
148 {
149 /* Mark that we have visited the destination. */
150 SET_BIT (visited, dest->index);
151
152 pre[dest->index] = prenum++;
153 if (dest->succ)
154 {
155 /* Since the DEST node has been visited for the first
156 time, check its successors. */
157 stack[sp++] = dest->succ;
158 }
159 else
160 post[dest->index] = postnum++;
161 }
162 else
163 {
164 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
165 && pre[src->index] >= pre[dest->index]
166 && post[dest->index] == 0)
167 e->flags |= EDGE_DFS_BACK, found = true;
168
169 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
170 post[src->index] = postnum++;
171
172 if (e->succ_next)
173 stack[sp - 1] = e->succ_next;
174 else
175 sp--;
176 }
177 }
178
179 free (pre);
180 free (post);
181 free (stack);
182 sbitmap_free (visited);
183
184 return found;
185 }
186
187 /* Return true if we need to add fake edge to exit.
188 Helper function for the flow_call_edges_add. */
189
190 static bool
191 need_fake_edge_p (insn)
192 rtx insn;
193 {
194 if (!INSN_P (insn))
195 return false;
196
197 if ((GET_CODE (insn) == CALL_INSN
198 && !SIBLING_CALL_P (insn)
199 && !find_reg_note (insn, REG_NORETURN, NULL)
200 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
201 && !CONST_OR_PURE_CALL_P (insn)))
202 return true;
203
204 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
205 && MEM_VOLATILE_P (PATTERN (insn)))
206 || (GET_CODE (PATTERN (insn)) == PARALLEL
207 && asm_noperands (insn) != -1
208 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
209 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
210 }
211
212 /* Add fake edges to the function exit for any non constant and non noreturn
213 calls, volatile inline assembly in the bitmap of blocks specified by
214 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
215 that were split.
216
217 The goal is to expose cases in which entering a basic block does not imply
218 that all subsequent instructions must be executed. */
219
220 int
221 flow_call_edges_add (blocks)
222 sbitmap blocks;
223 {
224 int i;
225 int blocks_split = 0;
226 int bb_num = 0;
227 basic_block *bbs;
228 bool check_last_block = false;
229
230 /* Map bb indices into basic block pointers since split_block
231 will renumber the basic blocks. */
232
233 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
234
235 if (! blocks)
236 {
237 for (i = 0; i < n_basic_blocks; i++)
238 bbs[bb_num++] = BASIC_BLOCK (i);
239
240 check_last_block = true;
241 }
242
243 else
244 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
245 {
246 bbs[bb_num++] = BASIC_BLOCK (i);
247 if (i == n_basic_blocks - 1)
248 check_last_block = true;
249 });
250
251 /* In the last basic block, before epilogue generation, there will be
252 a fallthru edge to EXIT. Special care is required if the last insn
253 of the last basic block is a call because make_edge folds duplicate
254 edges, which would result in the fallthru edge also being marked
255 fake, which would result in the fallthru edge being removed by
256 remove_fake_edges, which would result in an invalid CFG.
257
258 Moreover, we can't elide the outgoing fake edge, since the block
259 profiler needs to take this into account in order to solve the minimal
260 spanning tree in the case that the call doesn't return.
261
262 Handle this by adding a dummy instruction in a new last basic block. */
263 if (check_last_block
264 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
265 {
266 edge e;
267
268 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
269 if (e->dest == EXIT_BLOCK_PTR)
270 break;
271
272 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
273 commit_edge_insertions ();
274 }
275
276 /* Now add fake edges to the function exit for any non constant
277 calls since there is no way that we can determine if they will
278 return or not... */
279
280 for (i = 0; i < bb_num; i++)
281 {
282 basic_block bb = bbs[i];
283 rtx insn;
284 rtx prev_insn;
285
286 for (insn = bb->end; ; insn = prev_insn)
287 {
288 prev_insn = PREV_INSN (insn);
289 if (need_fake_edge_p (insn))
290 {
291 edge e;
292
293 /* The above condition should be enough to verify that there is
294 no edge to the exit block in CFG already. Calling make_edge
295 in such case would make us to mark that edge as fake and
296 remove it later. */
297
298 #ifdef ENABLE_CHECKING
299 if (insn == bb->end)
300 for (e = bb->succ; e; e = e->succ_next)
301 if (e->dest == EXIT_BLOCK_PTR)
302 abort ();
303 #endif
304
305 /* Note that the following may create a new basic block
306 and renumber the existing basic blocks. */
307 e = split_block (bb, insn);
308 if (e)
309 blocks_split++;
310
311 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
312 }
313
314 if (insn == bb->head)
315 break;
316 }
317 }
318
319 if (blocks_split)
320 verify_flow_info ();
321
322 free (bbs);
323 return blocks_split;
324 }
325
326 /* Find unreachable blocks. An unreachable block will have 0 in
327 the reachable bit in block->flags. A non-zero value indicates the
328 block is reachable. */
329
330 void
331 find_unreachable_blocks ()
332 {
333 edge e;
334 int i, n;
335 basic_block *tos, *worklist;
336
337 n = n_basic_blocks;
338 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
339
340 /* Clear all the reachability flags. */
341
342 for (i = 0; i < n; ++i)
343 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
344
345 /* Add our starting points to the worklist. Almost always there will
346 be only one. It isn't inconceivable that we might one day directly
347 support Fortran alternate entry points. */
348
349 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
350 {
351 *tos++ = e->dest;
352
353 /* Mark the block reachable. */
354 e->dest->flags |= BB_REACHABLE;
355 }
356
357 /* Iterate: find everything reachable from what we've already seen. */
358
359 while (tos != worklist)
360 {
361 basic_block b = *--tos;
362
363 for (e = b->succ; e; e = e->succ_next)
364 if (!(e->dest->flags & BB_REACHABLE))
365 {
366 *tos++ = e->dest;
367 e->dest->flags |= BB_REACHABLE;
368 }
369 }
370
371 free (worklist);
372 }
373 \f
374 /* Functions to access an edge list with a vector representation.
375 Enough data is kept such that given an index number, the
376 pred and succ that edge represents can be determined, or
377 given a pred and a succ, its index number can be returned.
378 This allows algorithms which consume a lot of memory to
379 represent the normally full matrix of edge (pred,succ) with a
380 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
381 wasted space in the client code due to sparse flow graphs. */
382
383 /* This functions initializes the edge list. Basically the entire
384 flowgraph is processed, and all edges are assigned a number,
385 and the data structure is filled in. */
386
387 struct edge_list *
388 create_edge_list ()
389 {
390 struct edge_list *elist;
391 edge e;
392 int num_edges;
393 int x;
394 int block_count;
395
396 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
397
398 num_edges = 0;
399
400 /* Determine the number of edges in the flow graph by counting successor
401 edges on each basic block. */
402 for (x = 0; x < n_basic_blocks; x++)
403 {
404 basic_block bb = BASIC_BLOCK (x);
405
406 for (e = bb->succ; e; e = e->succ_next)
407 num_edges++;
408 }
409
410 /* Don't forget successors of the entry block. */
411 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
412 num_edges++;
413
414 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
415 elist->num_blocks = block_count;
416 elist->num_edges = num_edges;
417 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
418
419 num_edges = 0;
420
421 /* Follow successors of the entry block, and register these edges. */
422 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
423 elist->index_to_edge[num_edges++] = e;
424
425 for (x = 0; x < n_basic_blocks; x++)
426 {
427 basic_block bb = BASIC_BLOCK (x);
428
429 /* Follow all successors of blocks, and register these edges. */
430 for (e = bb->succ; e; e = e->succ_next)
431 elist->index_to_edge[num_edges++] = e;
432 }
433
434 return elist;
435 }
436
437 /* This function free's memory associated with an edge list. */
438
439 void
440 free_edge_list (elist)
441 struct edge_list *elist;
442 {
443 if (elist)
444 {
445 free (elist->index_to_edge);
446 free (elist);
447 }
448 }
449
450 /* This function provides debug output showing an edge list. */
451
452 void
453 print_edge_list (f, elist)
454 FILE *f;
455 struct edge_list *elist;
456 {
457 int x;
458
459 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
460 elist->num_blocks - 2, elist->num_edges);
461
462 for (x = 0; x < elist->num_edges; x++)
463 {
464 fprintf (f, " %-4d - edge(", x);
465 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
466 fprintf (f, "entry,");
467 else
468 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
469
470 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
471 fprintf (f, "exit)\n");
472 else
473 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
474 }
475 }
476
477 /* This function provides an internal consistency check of an edge list,
478 verifying that all edges are present, and that there are no
479 extra edges. */
480
481 void
482 verify_edge_list (f, elist)
483 FILE *f;
484 struct edge_list *elist;
485 {
486 int x, pred, succ, index;
487 edge e;
488
489 for (x = 0; x < n_basic_blocks; x++)
490 {
491 basic_block bb = BASIC_BLOCK (x);
492
493 for (e = bb->succ; e; e = e->succ_next)
494 {
495 pred = e->src->index;
496 succ = e->dest->index;
497 index = EDGE_INDEX (elist, e->src, e->dest);
498 if (index == EDGE_INDEX_NO_EDGE)
499 {
500 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
501 continue;
502 }
503
504 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
505 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
506 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
507 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
508 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
509 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
510 }
511 }
512
513 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
514 {
515 pred = e->src->index;
516 succ = e->dest->index;
517 index = EDGE_INDEX (elist, e->src, e->dest);
518 if (index == EDGE_INDEX_NO_EDGE)
519 {
520 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
521 continue;
522 }
523
524 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
525 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
526 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
527 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
528 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
529 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
530 }
531
532 /* We've verified that all the edges are in the list, no lets make sure
533 there are no spurious edges in the list. */
534
535 for (pred = 0; pred < n_basic_blocks; pred++)
536 for (succ = 0; succ < n_basic_blocks; succ++)
537 {
538 basic_block p = BASIC_BLOCK (pred);
539 basic_block s = BASIC_BLOCK (succ);
540 int found_edge = 0;
541
542 for (e = p->succ; e; e = e->succ_next)
543 if (e->dest == s)
544 {
545 found_edge = 1;
546 break;
547 }
548
549 for (e = s->pred; e; e = e->pred_next)
550 if (e->src == p)
551 {
552 found_edge = 1;
553 break;
554 }
555
556 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
557 == EDGE_INDEX_NO_EDGE && found_edge != 0)
558 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
559 pred, succ);
560 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
561 != EDGE_INDEX_NO_EDGE && found_edge == 0)
562 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
563 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
564 BASIC_BLOCK (succ)));
565 }
566
567 for (succ = 0; succ < n_basic_blocks; succ++)
568 {
569 basic_block p = ENTRY_BLOCK_PTR;
570 basic_block s = BASIC_BLOCK (succ);
571 int found_edge = 0;
572
573 for (e = p->succ; e; e = e->succ_next)
574 if (e->dest == s)
575 {
576 found_edge = 1;
577 break;
578 }
579
580 for (e = s->pred; e; e = e->pred_next)
581 if (e->src == p)
582 {
583 found_edge = 1;
584 break;
585 }
586
587 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
588 == EDGE_INDEX_NO_EDGE && found_edge != 0)
589 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
590 succ);
591 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
592 != EDGE_INDEX_NO_EDGE && found_edge == 0)
593 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
594 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
595 BASIC_BLOCK (succ)));
596 }
597
598 for (pred = 0; pred < n_basic_blocks; pred++)
599 {
600 basic_block p = BASIC_BLOCK (pred);
601 basic_block s = EXIT_BLOCK_PTR;
602 int found_edge = 0;
603
604 for (e = p->succ; e; e = e->succ_next)
605 if (e->dest == s)
606 {
607 found_edge = 1;
608 break;
609 }
610
611 for (e = s->pred; e; e = e->pred_next)
612 if (e->src == p)
613 {
614 found_edge = 1;
615 break;
616 }
617
618 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
619 == EDGE_INDEX_NO_EDGE && found_edge != 0)
620 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
621 pred);
622 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
623 != EDGE_INDEX_NO_EDGE && found_edge == 0)
624 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
625 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
626 EXIT_BLOCK_PTR));
627 }
628 }
629
630 /* This routine will determine what, if any, edge there is between
631 a specified predecessor and successor. */
632
633 int
634 find_edge_index (edge_list, pred, succ)
635 struct edge_list *edge_list;
636 basic_block pred, succ;
637 {
638 int x;
639
640 for (x = 0; x < NUM_EDGES (edge_list); x++)
641 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
642 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
643 return x;
644
645 return (EDGE_INDEX_NO_EDGE);
646 }
647
648 /* Dump the list of basic blocks in the bitmap NODES. */
649
650 void
651 flow_nodes_print (str, nodes, file)
652 const char *str;
653 const sbitmap nodes;
654 FILE *file;
655 {
656 int node;
657
658 if (! nodes)
659 return;
660
661 fprintf (file, "%s { ", str);
662 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
663 fputs ("}\n", file);
664 }
665
666 /* Dump the list of edges in the array EDGE_LIST. */
667
668 void
669 flow_edge_list_print (str, edge_list, num_edges, file)
670 const char *str;
671 const edge *edge_list;
672 int num_edges;
673 FILE *file;
674 {
675 int i;
676
677 if (! edge_list)
678 return;
679
680 fprintf (file, "%s { ", str);
681 for (i = 0; i < num_edges; i++)
682 fprintf (file, "%d->%d ", edge_list[i]->src->index,
683 edge_list[i]->dest->index);
684
685 fputs ("}\n", file);
686 }
687
688 \f
689 /* This routine will remove any fake successor edges for a basic block.
690 When the edge is removed, it is also removed from whatever predecessor
691 list it is in. */
692
693 static void
694 remove_fake_successors (bb)
695 basic_block bb;
696 {
697 edge e;
698
699 for (e = bb->succ; e;)
700 {
701 edge tmp = e;
702
703 e = e->succ_next;
704 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
705 remove_edge (tmp);
706 }
707 }
708
709 /* This routine will remove all fake edges from the flow graph. If
710 we remove all fake successors, it will automatically remove all
711 fake predecessors. */
712
713 void
714 remove_fake_edges ()
715 {
716 int x;
717
718 for (x = 0; x < n_basic_blocks; x++)
719 remove_fake_successors (BASIC_BLOCK (x));
720
721 /* We've handled all successors except the entry block's. */
722 remove_fake_successors (ENTRY_BLOCK_PTR);
723 }
724
725 /* This function will add a fake edge between any block which has no
726 successors, and the exit block. Some data flow equations require these
727 edges to exist. */
728
729 void
730 add_noreturn_fake_exit_edges ()
731 {
732 int x;
733
734 for (x = 0; x < n_basic_blocks; x++)
735 if (BASIC_BLOCK (x)->succ == NULL)
736 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
737 }
738
739 /* This function adds a fake edge between any infinite loops to the
740 exit block. Some optimizations require a path from each node to
741 the exit node.
742
743 See also Morgan, Figure 3.10, pp. 82-83.
744
745 The current implementation is ugly, not attempting to minimize the
746 number of inserted fake edges. To reduce the number of fake edges
747 to insert, add fake edges from _innermost_ loops containing only
748 nodes not reachable from the exit block. */
749
750 void
751 connect_infinite_loops_to_exit ()
752 {
753 basic_block unvisited_block;
754 struct depth_first_search_dsS dfs_ds;
755
756 /* Perform depth-first search in the reverse graph to find nodes
757 reachable from the exit block. */
758 flow_dfs_compute_reverse_init (&dfs_ds);
759 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
760
761 /* Repeatedly add fake edges, updating the unreachable nodes. */
762 while (1)
763 {
764 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
765 if (!unvisited_block)
766 break;
767
768 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
769 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
770 }
771
772 flow_dfs_compute_reverse_finish (&dfs_ds);
773 return;
774 }
775 \f
776 /* Compute reverse top sort order */
777
778 void
779 flow_reverse_top_sort_order_compute (rts_order)
780 int *rts_order;
781 {
782 edge *stack;
783 int sp;
784 int postnum = 0;
785 sbitmap visited;
786
787 /* Allocate stack for back-tracking up CFG. */
788 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
789 sp = 0;
790
791 /* Allocate bitmap to track nodes that have been visited. */
792 visited = sbitmap_alloc (n_basic_blocks);
793
794 /* None of the nodes in the CFG have been visited yet. */
795 sbitmap_zero (visited);
796
797 /* Push the first edge on to the stack. */
798 stack[sp++] = ENTRY_BLOCK_PTR->succ;
799
800 while (sp)
801 {
802 edge e;
803 basic_block src;
804 basic_block dest;
805
806 /* Look at the edge on the top of the stack. */
807 e = stack[sp - 1];
808 src = e->src;
809 dest = e->dest;
810
811 /* Check if the edge destination has been visited yet. */
812 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
813 {
814 /* Mark that we have visited the destination. */
815 SET_BIT (visited, dest->index);
816
817 if (dest->succ)
818 /* Since the DEST node has been visited for the first
819 time, check its successors. */
820 stack[sp++] = dest->succ;
821 else
822 rts_order[postnum++] = dest->index;
823 }
824 else
825 {
826 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
827 rts_order[postnum++] = src->index;
828
829 if (e->succ_next)
830 stack[sp - 1] = e->succ_next;
831 else
832 sp--;
833 }
834 }
835
836 free (stack);
837 sbitmap_free (visited);
838 }
839
840 /* Compute the depth first search order and store in the array
841 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
842 RC_ORDER is non-zero, return the reverse completion number for each
843 node. Returns the number of nodes visited. A depth first search
844 tries to get as far away from the starting point as quickly as
845 possible. */
846
847 int
848 flow_depth_first_order_compute (dfs_order, rc_order)
849 int *dfs_order;
850 int *rc_order;
851 {
852 edge *stack;
853 int sp;
854 int dfsnum = 0;
855 int rcnum = n_basic_blocks - 1;
856 sbitmap visited;
857
858 /* Allocate stack for back-tracking up CFG. */
859 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
860 sp = 0;
861
862 /* Allocate bitmap to track nodes that have been visited. */
863 visited = sbitmap_alloc (n_basic_blocks);
864
865 /* None of the nodes in the CFG have been visited yet. */
866 sbitmap_zero (visited);
867
868 /* Push the first edge on to the stack. */
869 stack[sp++] = ENTRY_BLOCK_PTR->succ;
870
871 while (sp)
872 {
873 edge e;
874 basic_block src;
875 basic_block dest;
876
877 /* Look at the edge on the top of the stack. */
878 e = stack[sp - 1];
879 src = e->src;
880 dest = e->dest;
881
882 /* Check if the edge destination has been visited yet. */
883 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
884 {
885 /* Mark that we have visited the destination. */
886 SET_BIT (visited, dest->index);
887
888 if (dfs_order)
889 dfs_order[dfsnum] = dest->index;
890
891 dfsnum++;
892
893 if (dest->succ)
894 /* Since the DEST node has been visited for the first
895 time, check its successors. */
896 stack[sp++] = dest->succ;
897 else if (rc_order)
898 /* There are no successors for the DEST node so assign
899 its reverse completion number. */
900 rc_order[rcnum--] = dest->index;
901 }
902 else
903 {
904 if (! e->succ_next && src != ENTRY_BLOCK_PTR
905 && rc_order)
906 /* There are no more successors for the SRC node
907 so assign its reverse completion number. */
908 rc_order[rcnum--] = src->index;
909
910 if (e->succ_next)
911 stack[sp - 1] = e->succ_next;
912 else
913 sp--;
914 }
915 }
916
917 free (stack);
918 sbitmap_free (visited);
919
920 /* The number of nodes visited should not be greater than
921 n_basic_blocks. */
922 if (dfsnum > n_basic_blocks)
923 abort ();
924
925 /* There are some nodes left in the CFG that are unreachable. */
926 if (dfsnum < n_basic_blocks)
927 abort ();
928
929 return dfsnum;
930 }
931
932 struct dfst_node
933 {
934 unsigned nnodes;
935 struct dfst_node **node;
936 struct dfst_node *up;
937 };
938
939 /* Compute a preorder transversal ordering such that a sub-tree which
940 is the source of a cross edge appears before the sub-tree which is
941 the destination of the cross edge. This allows for easy detection
942 of all the entry blocks for a loop.
943
944 The ordering is compute by:
945
946 1) Generating a depth first spanning tree.
947
948 2) Walking the resulting tree from right to left. */
949
950 void
951 flow_preorder_transversal_compute (pot_order)
952 int *pot_order;
953 {
954 edge e;
955 edge *stack;
956 int i;
957 int max_successors;
958 int sp;
959 sbitmap visited;
960 struct dfst_node *node;
961 struct dfst_node *dfst;
962
963 /* Allocate stack for back-tracking up CFG. */
964 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
965 sp = 0;
966
967 /* Allocate the tree. */
968 dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
969 sizeof (struct dfst_node));
970
971 for (i = 0; i < n_basic_blocks; i++)
972 {
973 max_successors = 0;
974 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
975 max_successors++;
976
977 dfst[i].node
978 = (max_successors
979 ? (struct dfst_node **) xcalloc (max_successors,
980 sizeof (struct dfst_node *))
981 : NULL);
982 }
983
984 /* Allocate bitmap to track nodes that have been visited. */
985 visited = sbitmap_alloc (n_basic_blocks);
986
987 /* None of the nodes in the CFG have been visited yet. */
988 sbitmap_zero (visited);
989
990 /* Push the first edge on to the stack. */
991 stack[sp++] = ENTRY_BLOCK_PTR->succ;
992
993 while (sp)
994 {
995 basic_block src;
996 basic_block dest;
997
998 /* Look at the edge on the top of the stack. */
999 e = stack[sp - 1];
1000 src = e->src;
1001 dest = e->dest;
1002
1003 /* Check if the edge destination has been visited yet. */
1004 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1005 {
1006 /* Mark that we have visited the destination. */
1007 SET_BIT (visited, dest->index);
1008
1009 /* Add the destination to the preorder tree. */
1010 if (src != ENTRY_BLOCK_PTR)
1011 {
1012 dfst[src->index].node[dfst[src->index].nnodes++]
1013 = &dfst[dest->index];
1014 dfst[dest->index].up = &dfst[src->index];
1015 }
1016
1017 if (dest->succ)
1018 /* Since the DEST node has been visited for the first
1019 time, check its successors. */
1020 stack[sp++] = dest->succ;
1021 }
1022
1023 else if (e->succ_next)
1024 stack[sp - 1] = e->succ_next;
1025 else
1026 sp--;
1027 }
1028
1029 free (stack);
1030 sbitmap_free (visited);
1031
1032 /* Record the preorder transversal order by
1033 walking the tree from right to left. */
1034
1035 i = 0;
1036 node = &dfst[0];
1037 pot_order[i++] = 0;
1038
1039 while (node)
1040 {
1041 if (node->nnodes)
1042 {
1043 node = node->node[--node->nnodes];
1044 pot_order[i++] = node - dfst;
1045 }
1046 else
1047 node = node->up;
1048 }
1049
1050 /* Free the tree. */
1051
1052 for (i = 0; i < n_basic_blocks; i++)
1053 if (dfst[i].node)
1054 free (dfst[i].node);
1055
1056 free (dfst);
1057 }
1058
1059 /* Compute the depth first search order on the _reverse_ graph and
1060 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1061 Returns the number of nodes visited.
1062
1063 The computation is split into three pieces:
1064
1065 flow_dfs_compute_reverse_init () creates the necessary data
1066 structures.
1067
1068 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1069 structures. The block will start the search.
1070
1071 flow_dfs_compute_reverse_execute () continues (or starts) the
1072 search using the block on the top of the stack, stopping when the
1073 stack is empty.
1074
1075 flow_dfs_compute_reverse_finish () destroys the necessary data
1076 structures.
1077
1078 Thus, the user will probably call ..._init(), call ..._add_bb() to
1079 add a beginning basic block to the stack, call ..._execute(),
1080 possibly add another bb to the stack and again call ..._execute(),
1081 ..., and finally call _finish(). */
1082
1083 /* Initialize the data structures used for depth-first search on the
1084 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1085 added to the basic block stack. DATA is the current depth-first
1086 search context. If INITIALIZE_STACK is non-zero, there is an
1087 element on the stack. */
1088
1089 static void
1090 flow_dfs_compute_reverse_init (data)
1091 depth_first_search_ds data;
1092 {
1093 /* Allocate stack for back-tracking up CFG. */
1094 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1095 * sizeof (basic_block));
1096 data->sp = 0;
1097
1098 /* Allocate bitmap to track nodes that have been visited. */
1099 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1100
1101 /* None of the nodes in the CFG have been visited yet. */
1102 sbitmap_zero (data->visited_blocks);
1103
1104 return;
1105 }
1106
1107 /* Add the specified basic block to the top of the dfs data
1108 structures. When the search continues, it will start at the
1109 block. */
1110
1111 static void
1112 flow_dfs_compute_reverse_add_bb (data, bb)
1113 depth_first_search_ds data;
1114 basic_block bb;
1115 {
1116 data->stack[data->sp++] = bb;
1117 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1118 }
1119
1120 /* Continue the depth-first search through the reverse graph starting with the
1121 block at the stack's top and ending when the stack is empty. Visited nodes
1122 are marked. Returns an unvisited basic block, or NULL if there is none
1123 available. */
1124
1125 static basic_block
1126 flow_dfs_compute_reverse_execute (data)
1127 depth_first_search_ds data;
1128 {
1129 basic_block bb;
1130 edge e;
1131 int i;
1132
1133 while (data->sp > 0)
1134 {
1135 bb = data->stack[--data->sp];
1136
1137 /* Perform depth-first search on adjacent vertices. */
1138 for (e = bb->pred; e; e = e->pred_next)
1139 if (!TEST_BIT (data->visited_blocks,
1140 e->src->index - (INVALID_BLOCK + 1)))
1141 flow_dfs_compute_reverse_add_bb (data, e->src);
1142 }
1143
1144 /* Determine if there are unvisited basic blocks. */
1145 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1146 if (!TEST_BIT (data->visited_blocks, i))
1147 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1148
1149 return NULL;
1150 }
1151
1152 /* Destroy the data structures needed for depth-first search on the
1153 reverse graph. */
1154
1155 static void
1156 flow_dfs_compute_reverse_finish (data)
1157 depth_first_search_ds data;
1158 {
1159 free (data->stack);
1160 sbitmap_free (data->visited_blocks);
1161 }