Index: ChangeLog
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "tm_p.h"
32
33 /* Store the data structures necessary for depth-first search. */
34 struct depth_first_search_dsS {
35 /* stack for backtracking during the algorithm */
36 basic_block *stack;
37
38 /* number of edges in the stack. That is, positions 0, ..., sp-1
39 have edges. */
40 unsigned int sp;
41
42 /* record of basic blocks already seen by depth-first search */
43 sbitmap visited_blocks;
44 };
45 typedef struct depth_first_search_dsS *depth_first_search_ds;
46
47 static void flow_dfs_compute_reverse_init
48 PARAMS ((depth_first_search_ds));
49 static void flow_dfs_compute_reverse_add_bb
50 PARAMS ((depth_first_search_ds, basic_block));
51 static basic_block flow_dfs_compute_reverse_execute
52 PARAMS ((depth_first_search_ds));
53 static void flow_dfs_compute_reverse_finish
54 PARAMS ((depth_first_search_ds));
55 static void remove_fake_successors PARAMS ((basic_block));
56 static bool need_fake_edge_p PARAMS ((rtx));
57 \f
58 /* Return true if the block has no effect and only forwards control flow to
59 its single destination. */
60
61 bool
62 forwarder_block_p (bb)
63 basic_block bb;
64 {
65 rtx insn;
66
67 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
68 || !bb->succ || bb->succ->succ_next)
69 return false;
70
71 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
72 if (INSN_P (insn) && active_insn_p (insn))
73 return false;
74
75 return (!INSN_P (insn)
76 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
77 || !active_insn_p (insn));
78 }
79
80 /* Return nonzero if we can reach target from src by falling through. */
81
82 bool
83 can_fallthru (src, target)
84 basic_block src, target;
85 {
86 rtx insn = src->end;
87 rtx insn2 = target->head;
88
89 if (src->next_bb != target)
90 return 0;
91
92 if (!active_insn_p (insn2))
93 insn2 = next_active_insn (insn2);
94
95 /* ??? Later we may add code to move jump tables offline. */
96 return next_active_insn (insn) == insn2;
97 }
98 \f
99 /* Mark the back edges in DFS traversal.
100 Return non-zero if a loop (natural or otherwise) is present.
101 Inspired by Depth_First_Search_PP described in:
102
103 Advanced Compiler Design and Implementation
104 Steven Muchnick
105 Morgan Kaufmann, 1997
106
107 and heavily borrowed from flow_depth_first_order_compute. */
108
109 bool
110 mark_dfs_back_edges ()
111 {
112 edge *stack;
113 int *pre;
114 int *post;
115 int sp;
116 int prenum = 1;
117 int postnum = 1;
118 sbitmap visited;
119 bool found = false;
120
121 /* Allocate the preorder and postorder number arrays. */
122 pre = (int *) xcalloc (last_basic_block, sizeof (int));
123 post = (int *) xcalloc (last_basic_block, sizeof (int));
124
125 /* Allocate stack for back-tracking up CFG. */
126 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
127 sp = 0;
128
129 /* Allocate bitmap to track nodes that have been visited. */
130 visited = sbitmap_alloc (last_basic_block);
131
132 /* None of the nodes in the CFG have been visited yet. */
133 sbitmap_zero (visited);
134
135 /* Push the first edge on to the stack. */
136 stack[sp++] = ENTRY_BLOCK_PTR->succ;
137
138 while (sp)
139 {
140 edge e;
141 basic_block src;
142 basic_block dest;
143
144 /* Look at the edge on the top of the stack. */
145 e = stack[sp - 1];
146 src = e->src;
147 dest = e->dest;
148 e->flags &= ~EDGE_DFS_BACK;
149
150 /* Check if the edge destination has been visited yet. */
151 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
152 {
153 /* Mark that we have visited the destination. */
154 SET_BIT (visited, dest->index);
155
156 pre[dest->index] = prenum++;
157 if (dest->succ)
158 {
159 /* Since the DEST node has been visited for the first
160 time, check its successors. */
161 stack[sp++] = dest->succ;
162 }
163 else
164 post[dest->index] = postnum++;
165 }
166 else
167 {
168 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
169 && pre[src->index] >= pre[dest->index]
170 && post[dest->index] == 0)
171 e->flags |= EDGE_DFS_BACK, found = true;
172
173 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
174 post[src->index] = postnum++;
175
176 if (e->succ_next)
177 stack[sp - 1] = e->succ_next;
178 else
179 sp--;
180 }
181 }
182
183 free (pre);
184 free (post);
185 free (stack);
186 sbitmap_free (visited);
187
188 return found;
189 }
190
191 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
192
193 void
194 set_edge_can_fallthru_flag ()
195 {
196 basic_block bb;
197
198 FOR_EACH_BB (bb)
199 {
200 edge e;
201
202 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
203 for (e = bb->succ; e; e = e->succ_next)
204 if (e->flags & EDGE_FALLTHRU)
205 e->flags |= EDGE_CAN_FALLTHRU;
206
207 /* If the BB ends with an invertable condjump all (2) edges are
208 CAN_FALLTHRU edges. */
209 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
210 continue;
211 if (!any_condjump_p (bb->end))
212 continue;
213 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
214 continue;
215 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
216 bb->succ->flags |= EDGE_CAN_FALLTHRU;
217 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
218 }
219 }
220
221 /* Return true if we need to add fake edge to exit.
222 Helper function for the flow_call_edges_add. */
223
224 static bool
225 need_fake_edge_p (insn)
226 rtx insn;
227 {
228 if (!INSN_P (insn))
229 return false;
230
231 if ((GET_CODE (insn) == CALL_INSN
232 && !SIBLING_CALL_P (insn)
233 && !find_reg_note (insn, REG_NORETURN, NULL)
234 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
235 && !CONST_OR_PURE_CALL_P (insn)))
236 return true;
237
238 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
239 && MEM_VOLATILE_P (PATTERN (insn)))
240 || (GET_CODE (PATTERN (insn)) == PARALLEL
241 && asm_noperands (insn) != -1
242 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
243 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
244 }
245
246 /* Add fake edges to the function exit for any non constant and non noreturn
247 calls, volatile inline assembly in the bitmap of blocks specified by
248 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
249 that were split.
250
251 The goal is to expose cases in which entering a basic block does not imply
252 that all subsequent instructions must be executed. */
253
254 int
255 flow_call_edges_add (blocks)
256 sbitmap blocks;
257 {
258 int i;
259 int blocks_split = 0;
260 int last_bb = last_basic_block;
261 bool check_last_block = false;
262
263 if (n_basic_blocks == 0)
264 return 0;
265
266 if (! blocks)
267 check_last_block = true;
268 else
269 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
270
271 /* In the last basic block, before epilogue generation, there will be
272 a fallthru edge to EXIT. Special care is required if the last insn
273 of the last basic block is a call because make_edge folds duplicate
274 edges, which would result in the fallthru edge also being marked
275 fake, which would result in the fallthru edge being removed by
276 remove_fake_edges, which would result in an invalid CFG.
277
278 Moreover, we can't elide the outgoing fake edge, since the block
279 profiler needs to take this into account in order to solve the minimal
280 spanning tree in the case that the call doesn't return.
281
282 Handle this by adding a dummy instruction in a new last basic block. */
283 if (check_last_block)
284 {
285 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
286 rtx insn = bb->end;
287
288 /* Back up past insns that must be kept in the same block as a call. */
289 while (insn != bb->head
290 && keep_with_call_p (insn))
291 insn = PREV_INSN (insn);
292
293 if (need_fake_edge_p (insn))
294 {
295 edge e;
296
297 for (e = bb->succ; e; e = e->succ_next)
298 if (e->dest == EXIT_BLOCK_PTR)
299 break;
300
301 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
302 commit_edge_insertions ();
303 }
304 }
305
306 /* Now add fake edges to the function exit for any non constant
307 calls since there is no way that we can determine if they will
308 return or not... */
309
310 for (i = 0; i < last_bb; i++)
311 {
312 basic_block bb = BASIC_BLOCK (i);
313 rtx insn;
314 rtx prev_insn;
315
316 if (!bb)
317 continue;
318
319 if (blocks && !TEST_BIT (blocks, i))
320 continue;
321
322 for (insn = bb->end; ; insn = prev_insn)
323 {
324 prev_insn = PREV_INSN (insn);
325 if (need_fake_edge_p (insn))
326 {
327 edge e;
328 rtx split_at_insn = insn;
329
330 /* Don't split the block between a call and an insn that should
331 remain in the same block as the call. */
332 if (GET_CODE (insn) == CALL_INSN)
333 while (split_at_insn != bb->end
334 && keep_with_call_p (NEXT_INSN (split_at_insn)))
335 split_at_insn = NEXT_INSN (split_at_insn);
336
337 /* The handling above of the final block before the epilogue
338 should be enough to verify that there is no edge to the exit
339 block in CFG already. Calling make_edge in such case would
340 cause us to mark that edge as fake and remove it later. */
341
342 #ifdef ENABLE_CHECKING
343 if (split_at_insn == bb->end)
344 for (e = bb->succ; e; e = e->succ_next)
345 if (e->dest == EXIT_BLOCK_PTR)
346 abort ();
347 #endif
348
349 /* Note that the following may create a new basic block
350 and renumber the existing basic blocks. */
351 if (split_at_insn != bb->end)
352 {
353 e = split_block (bb, split_at_insn);
354 if (e)
355 blocks_split++;
356 }
357
358 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
359 }
360
361 if (insn == bb->head)
362 break;
363 }
364 }
365
366 if (blocks_split)
367 verify_flow_info ();
368
369 return blocks_split;
370 }
371
372 /* Find unreachable blocks. An unreachable block will have 0 in
373 the reachable bit in block->flags. A non-zero value indicates the
374 block is reachable. */
375
376 void
377 find_unreachable_blocks ()
378 {
379 edge e;
380 basic_block *tos, *worklist, bb;
381
382 tos = worklist =
383 (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
384
385 /* Clear all the reachability flags. */
386
387 FOR_EACH_BB (bb)
388 bb->flags &= ~BB_REACHABLE;
389
390 /* Add our starting points to the worklist. Almost always there will
391 be only one. It isn't inconceivable that we might one day directly
392 support Fortran alternate entry points. */
393
394 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
395 {
396 *tos++ = e->dest;
397
398 /* Mark the block reachable. */
399 e->dest->flags |= BB_REACHABLE;
400 }
401
402 /* Iterate: find everything reachable from what we've already seen. */
403
404 while (tos != worklist)
405 {
406 basic_block b = *--tos;
407
408 for (e = b->succ; e; e = e->succ_next)
409 if (!(e->dest->flags & BB_REACHABLE))
410 {
411 *tos++ = e->dest;
412 e->dest->flags |= BB_REACHABLE;
413 }
414 }
415
416 free (worklist);
417 }
418 \f
419 /* Functions to access an edge list with a vector representation.
420 Enough data is kept such that given an index number, the
421 pred and succ that edge represents can be determined, or
422 given a pred and a succ, its index number can be returned.
423 This allows algorithms which consume a lot of memory to
424 represent the normally full matrix of edge (pred,succ) with a
425 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
426 wasted space in the client code due to sparse flow graphs. */
427
428 /* This functions initializes the edge list. Basically the entire
429 flowgraph is processed, and all edges are assigned a number,
430 and the data structure is filled in. */
431
432 struct edge_list *
433 create_edge_list ()
434 {
435 struct edge_list *elist;
436 edge e;
437 int num_edges;
438 int block_count;
439 basic_block bb;
440
441 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
442
443 num_edges = 0;
444
445 /* Determine the number of edges in the flow graph by counting successor
446 edges on each basic block. */
447 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
448 {
449 for (e = bb->succ; e; e = e->succ_next)
450 num_edges++;
451 }
452
453 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
454 elist->num_blocks = block_count;
455 elist->num_edges = num_edges;
456 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
457
458 num_edges = 0;
459
460 /* Follow successors of blocks, and register these edges. */
461 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
462 for (e = bb->succ; e; e = e->succ_next)
463 elist->index_to_edge[num_edges++] = e;
464
465 return elist;
466 }
467
468 /* This function free's memory associated with an edge list. */
469
470 void
471 free_edge_list (elist)
472 struct edge_list *elist;
473 {
474 if (elist)
475 {
476 free (elist->index_to_edge);
477 free (elist);
478 }
479 }
480
481 /* This function provides debug output showing an edge list. */
482
483 void
484 print_edge_list (f, elist)
485 FILE *f;
486 struct edge_list *elist;
487 {
488 int x;
489
490 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
491 elist->num_blocks - 2, elist->num_edges);
492
493 for (x = 0; x < elist->num_edges; x++)
494 {
495 fprintf (f, " %-4d - edge(", x);
496 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
497 fprintf (f, "entry,");
498 else
499 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
500
501 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
502 fprintf (f, "exit)\n");
503 else
504 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
505 }
506 }
507
508 /* This function provides an internal consistency check of an edge list,
509 verifying that all edges are present, and that there are no
510 extra edges. */
511
512 void
513 verify_edge_list (f, elist)
514 FILE *f;
515 struct edge_list *elist;
516 {
517 int pred, succ, index;
518 edge e;
519 basic_block bb, p, s;
520
521 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
522 {
523 for (e = bb->succ; e; e = e->succ_next)
524 {
525 pred = e->src->index;
526 succ = e->dest->index;
527 index = EDGE_INDEX (elist, e->src, e->dest);
528 if (index == EDGE_INDEX_NO_EDGE)
529 {
530 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
531 continue;
532 }
533
534 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
535 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
536 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
537 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
538 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
539 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
540 }
541 }
542
543 /* We've verified that all the edges are in the list, now lets make sure
544 there are no spurious edges in the list. */
545
546 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
547 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
548 {
549 int found_edge = 0;
550
551 for (e = p->succ; e; e = e->succ_next)
552 if (e->dest == s)
553 {
554 found_edge = 1;
555 break;
556 }
557
558 for (e = s->pred; e; e = e->pred_next)
559 if (e->src == p)
560 {
561 found_edge = 1;
562 break;
563 }
564
565 if (EDGE_INDEX (elist, p, s)
566 == EDGE_INDEX_NO_EDGE && found_edge != 0)
567 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
568 p->index, s->index);
569 if (EDGE_INDEX (elist, p, s)
570 != EDGE_INDEX_NO_EDGE && found_edge == 0)
571 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
572 p->index, s->index, EDGE_INDEX (elist, p, s));
573 }
574 }
575
576 /* This routine will determine what, if any, edge there is between
577 a specified predecessor and successor. */
578
579 int
580 find_edge_index (edge_list, pred, succ)
581 struct edge_list *edge_list;
582 basic_block pred, succ;
583 {
584 int x;
585
586 for (x = 0; x < NUM_EDGES (edge_list); x++)
587 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
588 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
589 return x;
590
591 return (EDGE_INDEX_NO_EDGE);
592 }
593
594 /* Dump the list of basic blocks in the bitmap NODES. */
595
596 void
597 flow_nodes_print (str, nodes, file)
598 const char *str;
599 const sbitmap nodes;
600 FILE *file;
601 {
602 int node;
603
604 if (! nodes)
605 return;
606
607 fprintf (file, "%s { ", str);
608 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
609 fputs ("}\n", file);
610 }
611
612 /* Dump the list of edges in the array EDGE_LIST. */
613
614 void
615 flow_edge_list_print (str, edge_list, num_edges, file)
616 const char *str;
617 const edge *edge_list;
618 int num_edges;
619 FILE *file;
620 {
621 int i;
622
623 if (! edge_list)
624 return;
625
626 fprintf (file, "%s { ", str);
627 for (i = 0; i < num_edges; i++)
628 fprintf (file, "%d->%d ", edge_list[i]->src->index,
629 edge_list[i]->dest->index);
630
631 fputs ("}\n", file);
632 }
633
634 \f
635 /* This routine will remove any fake successor edges for a basic block.
636 When the edge is removed, it is also removed from whatever predecessor
637 list it is in. */
638
639 static void
640 remove_fake_successors (bb)
641 basic_block bb;
642 {
643 edge e;
644
645 for (e = bb->succ; e;)
646 {
647 edge tmp = e;
648
649 e = e->succ_next;
650 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
651 remove_edge (tmp);
652 }
653 }
654
655 /* This routine will remove all fake edges from the flow graph. If
656 we remove all fake successors, it will automatically remove all
657 fake predecessors. */
658
659 void
660 remove_fake_edges ()
661 {
662 basic_block bb;
663
664 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
665 remove_fake_successors (bb);
666 }
667
668 /* This function will add a fake edge between any block which has no
669 successors, and the exit block. Some data flow equations require these
670 edges to exist. */
671
672 void
673 add_noreturn_fake_exit_edges ()
674 {
675 basic_block bb;
676
677 FOR_EACH_BB (bb)
678 if (bb->succ == NULL)
679 make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
680 }
681
682 /* This function adds a fake edge between any infinite loops to the
683 exit block. Some optimizations require a path from each node to
684 the exit node.
685
686 See also Morgan, Figure 3.10, pp. 82-83.
687
688 The current implementation is ugly, not attempting to minimize the
689 number of inserted fake edges. To reduce the number of fake edges
690 to insert, add fake edges from _innermost_ loops containing only
691 nodes not reachable from the exit block. */
692
693 void
694 connect_infinite_loops_to_exit ()
695 {
696 basic_block unvisited_block;
697 struct depth_first_search_dsS dfs_ds;
698
699 /* Perform depth-first search in the reverse graph to find nodes
700 reachable from the exit block. */
701 flow_dfs_compute_reverse_init (&dfs_ds);
702 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
703
704 /* Repeatedly add fake edges, updating the unreachable nodes. */
705 while (1)
706 {
707 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
708 if (!unvisited_block)
709 break;
710
711 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
712 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
713 }
714
715 flow_dfs_compute_reverse_finish (&dfs_ds);
716 return;
717 }
718 \f
719 /* Compute reverse top sort order */
720
721 void
722 flow_reverse_top_sort_order_compute (rts_order)
723 int *rts_order;
724 {
725 edge *stack;
726 int sp;
727 int postnum = 0;
728 sbitmap visited;
729
730 /* Allocate stack for back-tracking up CFG. */
731 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
732 sp = 0;
733
734 /* Allocate bitmap to track nodes that have been visited. */
735 visited = sbitmap_alloc (last_basic_block);
736
737 /* None of the nodes in the CFG have been visited yet. */
738 sbitmap_zero (visited);
739
740 /* Push the first edge on to the stack. */
741 stack[sp++] = ENTRY_BLOCK_PTR->succ;
742
743 while (sp)
744 {
745 edge e;
746 basic_block src;
747 basic_block dest;
748
749 /* Look at the edge on the top of the stack. */
750 e = stack[sp - 1];
751 src = e->src;
752 dest = e->dest;
753
754 /* Check if the edge destination has been visited yet. */
755 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
756 {
757 /* Mark that we have visited the destination. */
758 SET_BIT (visited, dest->index);
759
760 if (dest->succ)
761 /* Since the DEST node has been visited for the first
762 time, check its successors. */
763 stack[sp++] = dest->succ;
764 else
765 rts_order[postnum++] = dest->index;
766 }
767 else
768 {
769 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
770 rts_order[postnum++] = src->index;
771
772 if (e->succ_next)
773 stack[sp - 1] = e->succ_next;
774 else
775 sp--;
776 }
777 }
778
779 free (stack);
780 sbitmap_free (visited);
781 }
782
783 /* Compute the depth first search order and store in the array
784 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
785 RC_ORDER is non-zero, return the reverse completion number for each
786 node. Returns the number of nodes visited. A depth first search
787 tries to get as far away from the starting point as quickly as
788 possible. */
789
790 int
791 flow_depth_first_order_compute (dfs_order, rc_order)
792 int *dfs_order;
793 int *rc_order;
794 {
795 edge *stack;
796 int sp;
797 int dfsnum = 0;
798 int rcnum = n_basic_blocks - 1;
799 sbitmap visited;
800
801 /* Allocate stack for back-tracking up CFG. */
802 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
803 sp = 0;
804
805 /* Allocate bitmap to track nodes that have been visited. */
806 visited = sbitmap_alloc (last_basic_block);
807
808 /* None of the nodes in the CFG have been visited yet. */
809 sbitmap_zero (visited);
810
811 /* Push the first edge on to the stack. */
812 stack[sp++] = ENTRY_BLOCK_PTR->succ;
813
814 while (sp)
815 {
816 edge e;
817 basic_block src;
818 basic_block dest;
819
820 /* Look at the edge on the top of the stack. */
821 e = stack[sp - 1];
822 src = e->src;
823 dest = e->dest;
824
825 /* Check if the edge destination has been visited yet. */
826 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
827 {
828 /* Mark that we have visited the destination. */
829 SET_BIT (visited, dest->index);
830
831 if (dfs_order)
832 dfs_order[dfsnum] = dest->index;
833
834 dfsnum++;
835
836 if (dest->succ)
837 /* Since the DEST node has been visited for the first
838 time, check its successors. */
839 stack[sp++] = dest->succ;
840 else if (rc_order)
841 /* There are no successors for the DEST node so assign
842 its reverse completion number. */
843 rc_order[rcnum--] = dest->index;
844 }
845 else
846 {
847 if (! e->succ_next && src != ENTRY_BLOCK_PTR
848 && rc_order)
849 /* There are no more successors for the SRC node
850 so assign its reverse completion number. */
851 rc_order[rcnum--] = src->index;
852
853 if (e->succ_next)
854 stack[sp - 1] = e->succ_next;
855 else
856 sp--;
857 }
858 }
859
860 free (stack);
861 sbitmap_free (visited);
862
863 /* The number of nodes visited should not be greater than
864 n_basic_blocks. */
865 if (dfsnum > n_basic_blocks)
866 abort ();
867
868 /* There are some nodes left in the CFG that are unreachable. */
869 if (dfsnum < n_basic_blocks)
870 abort ();
871
872 return dfsnum;
873 }
874
875 struct dfst_node
876 {
877 unsigned nnodes;
878 struct dfst_node **node;
879 struct dfst_node *up;
880 };
881
882 /* Compute a preorder transversal ordering such that a sub-tree which
883 is the source of a cross edge appears before the sub-tree which is
884 the destination of the cross edge. This allows for easy detection
885 of all the entry blocks for a loop.
886
887 The ordering is compute by:
888
889 1) Generating a depth first spanning tree.
890
891 2) Walking the resulting tree from right to left. */
892
893 void
894 flow_preorder_transversal_compute (pot_order)
895 int *pot_order;
896 {
897 edge e;
898 edge *stack;
899 int i;
900 int max_successors;
901 int sp;
902 sbitmap visited;
903 struct dfst_node *node;
904 struct dfst_node *dfst;
905 basic_block bb;
906
907 /* Allocate stack for back-tracking up CFG. */
908 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
909 sp = 0;
910
911 /* Allocate the tree. */
912 dfst = (struct dfst_node *) xcalloc (last_basic_block,
913 sizeof (struct dfst_node));
914
915 FOR_EACH_BB (bb)
916 {
917 max_successors = 0;
918 for (e = bb->succ; e; e = e->succ_next)
919 max_successors++;
920
921 dfst[bb->index].node
922 = (max_successors
923 ? (struct dfst_node **) xcalloc (max_successors,
924 sizeof (struct dfst_node *))
925 : NULL);
926 }
927
928 /* Allocate bitmap to track nodes that have been visited. */
929 visited = sbitmap_alloc (last_basic_block);
930
931 /* None of the nodes in the CFG have been visited yet. */
932 sbitmap_zero (visited);
933
934 /* Push the first edge on to the stack. */
935 stack[sp++] = ENTRY_BLOCK_PTR->succ;
936
937 while (sp)
938 {
939 basic_block src;
940 basic_block dest;
941
942 /* Look at the edge on the top of the stack. */
943 e = stack[sp - 1];
944 src = e->src;
945 dest = e->dest;
946
947 /* Check if the edge destination has been visited yet. */
948 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
949 {
950 /* Mark that we have visited the destination. */
951 SET_BIT (visited, dest->index);
952
953 /* Add the destination to the preorder tree. */
954 if (src != ENTRY_BLOCK_PTR)
955 {
956 dfst[src->index].node[dfst[src->index].nnodes++]
957 = &dfst[dest->index];
958 dfst[dest->index].up = &dfst[src->index];
959 }
960
961 if (dest->succ)
962 /* Since the DEST node has been visited for the first
963 time, check its successors. */
964 stack[sp++] = dest->succ;
965 }
966
967 else if (e->succ_next)
968 stack[sp - 1] = e->succ_next;
969 else
970 sp--;
971 }
972
973 free (stack);
974 sbitmap_free (visited);
975
976 /* Record the preorder transversal order by
977 walking the tree from right to left. */
978
979 i = 0;
980 node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
981 pot_order[i++] = 0;
982
983 while (node)
984 {
985 if (node->nnodes)
986 {
987 node = node->node[--node->nnodes];
988 pot_order[i++] = node - dfst;
989 }
990 else
991 node = node->up;
992 }
993
994 /* Free the tree. */
995
996 for (i = 0; i < last_basic_block; i++)
997 if (dfst[i].node)
998 free (dfst[i].node);
999
1000 free (dfst);
1001 }
1002
1003 /* Compute the depth first search order on the _reverse_ graph and
1004 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1005 Returns the number of nodes visited.
1006
1007 The computation is split into three pieces:
1008
1009 flow_dfs_compute_reverse_init () creates the necessary data
1010 structures.
1011
1012 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1013 structures. The block will start the search.
1014
1015 flow_dfs_compute_reverse_execute () continues (or starts) the
1016 search using the block on the top of the stack, stopping when the
1017 stack is empty.
1018
1019 flow_dfs_compute_reverse_finish () destroys the necessary data
1020 structures.
1021
1022 Thus, the user will probably call ..._init(), call ..._add_bb() to
1023 add a beginning basic block to the stack, call ..._execute(),
1024 possibly add another bb to the stack and again call ..._execute(),
1025 ..., and finally call _finish(). */
1026
1027 /* Initialize the data structures used for depth-first search on the
1028 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1029 added to the basic block stack. DATA is the current depth-first
1030 search context. If INITIALIZE_STACK is non-zero, there is an
1031 element on the stack. */
1032
1033 static void
1034 flow_dfs_compute_reverse_init (data)
1035 depth_first_search_ds data;
1036 {
1037 /* Allocate stack for back-tracking up CFG. */
1038 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1039 * sizeof (basic_block));
1040 data->sp = 0;
1041
1042 /* Allocate bitmap to track nodes that have been visited. */
1043 data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1044
1045 /* None of the nodes in the CFG have been visited yet. */
1046 sbitmap_zero (data->visited_blocks);
1047
1048 return;
1049 }
1050
1051 /* Add the specified basic block to the top of the dfs data
1052 structures. When the search continues, it will start at the
1053 block. */
1054
1055 static void
1056 flow_dfs_compute_reverse_add_bb (data, bb)
1057 depth_first_search_ds data;
1058 basic_block bb;
1059 {
1060 data->stack[data->sp++] = bb;
1061 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1062 }
1063
1064 /* Continue the depth-first search through the reverse graph starting with the
1065 block at the stack's top and ending when the stack is empty. Visited nodes
1066 are marked. Returns an unvisited basic block, or NULL if there is none
1067 available. */
1068
1069 static basic_block
1070 flow_dfs_compute_reverse_execute (data)
1071 depth_first_search_ds data;
1072 {
1073 basic_block bb;
1074 edge e;
1075
1076 while (data->sp > 0)
1077 {
1078 bb = data->stack[--data->sp];
1079
1080 /* Perform depth-first search on adjacent vertices. */
1081 for (e = bb->pred; e; e = e->pred_next)
1082 if (!TEST_BIT (data->visited_blocks,
1083 e->src->index - (INVALID_BLOCK + 1)))
1084 flow_dfs_compute_reverse_add_bb (data, e->src);
1085 }
1086
1087 /* Determine if there are unvisited basic blocks. */
1088 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1089 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1090 return bb;
1091
1092 return NULL;
1093 }
1094
1095 /* Destroy the data structures needed for depth-first search on the
1096 reverse graph. */
1097
1098 static void
1099 flow_dfs_compute_reverse_finish (data)
1100 depth_first_search_ds data;
1101 {
1102 free (data->stack);
1103 sbitmap_free (data->visited_blocks);
1104 }
1105
1106 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1107 if REVERSE, go against direction of edges. Returns number of blocks
1108 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1109 int
1110 dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
1111 basic_block bb;
1112 int reverse;
1113 bool (*predicate) (basic_block, void *);
1114 basic_block *rslt;
1115 int rslt_max;
1116 void *data;
1117 {
1118 basic_block *st, lbb;
1119 int sp = 0, tv = 0;
1120
1121 st = xcalloc (rslt_max, sizeof (basic_block));
1122 rslt[tv++] = st[sp++] = bb;
1123 bb->flags |= BB_VISITED;
1124 while (sp)
1125 {
1126 edge e;
1127 lbb = st[--sp];
1128 if (reverse)
1129 {
1130 for (e = lbb->pred; e; e = e->pred_next)
1131 if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1132 {
1133 if (tv == rslt_max)
1134 abort ();
1135 rslt[tv++] = st[sp++] = e->src;
1136 e->src->flags |= BB_VISITED;
1137 }
1138 }
1139 else
1140 {
1141 for (e = lbb->succ; e; e = e->succ_next)
1142 if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1143 {
1144 if (tv == rslt_max)
1145 abort ();
1146 rslt[tv++] = st[sp++] = e->dest;
1147 e->dest->flags |= BB_VISITED;
1148 }
1149 }
1150 }
1151 free (st);
1152 for (sp = 0; sp < tv; sp++)
1153 rslt[sp]->flags &= ~BB_VISITED;
1154 return tv;
1155 }