cfganal.c (set_edge_can_fallthru_flag): Clear the EDGE_CAN_FALLTHRU flag before setti...
[gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "toplev.h"
33 #include "tm_p.h"
34
35 /* Store the data structures necessary for depth-first search. */
36 struct depth_first_search_dsS {
37 /* stack for backtracking during the algorithm */
38 basic_block *stack;
39
40 /* number of edges in the stack. That is, positions 0, ..., sp-1
41 have edges. */
42 unsigned int sp;
43
44 /* record of basic blocks already seen by depth-first search */
45 sbitmap visited_blocks;
46 };
47 typedef struct depth_first_search_dsS *depth_first_search_ds;
48
49 static void flow_dfs_compute_reverse_init
50 PARAMS ((depth_first_search_ds));
51 static void flow_dfs_compute_reverse_add_bb
52 PARAMS ((depth_first_search_ds, basic_block));
53 static basic_block flow_dfs_compute_reverse_execute
54 PARAMS ((depth_first_search_ds));
55 static void flow_dfs_compute_reverse_finish
56 PARAMS ((depth_first_search_ds));
57 static void remove_fake_successors PARAMS ((basic_block));
58 static bool need_fake_edge_p PARAMS ((rtx));
59 static bool flow_active_insn_p PARAMS ((rtx));
60 \f
61 /* Like active_insn_p, except keep the return value clobber around
62 even after reload. */
63
64 static bool
65 flow_active_insn_p (insn)
66 rtx insn;
67 {
68 if (active_insn_p (insn))
69 return true;
70
71 /* A clobber of the function return value exists for buggy
72 programs that fail to return a value. Its effect is to
73 keep the return value from being live across the entire
74 function. If we allow it to be skipped, we introduce the
75 possibility for register livetime aborts. */
76 if (GET_CODE (PATTERN (insn)) == CLOBBER
77 && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
78 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
79 return true;
80
81 return false;
82 }
83
84 /* Return true if the block has no effect and only forwards control flow to
85 its single destination. */
86
87 bool
88 forwarder_block_p (bb)
89 basic_block bb;
90 {
91 rtx insn;
92
93 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94 || !bb->succ || bb->succ->succ_next)
95 return false;
96
97 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
98 if (INSN_P (insn) && flow_active_insn_p (insn))
99 return false;
100
101 return (!INSN_P (insn)
102 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
103 || !flow_active_insn_p (insn));
104 }
105
106 /* Return nonzero if we can reach target from src by falling through. */
107
108 bool
109 can_fallthru (src, target)
110 basic_block src, target;
111 {
112 rtx insn = src->end;
113 rtx insn2 = target->head;
114
115 if (src->next_bb != target)
116 return 0;
117
118 if (!active_insn_p (insn2))
119 insn2 = next_active_insn (insn2);
120
121 /* ??? Later we may add code to move jump tables offline. */
122 return next_active_insn (insn) == insn2;
123 }
124 \f
125 /* Mark the back edges in DFS traversal.
126 Return nonzero if a loop (natural or otherwise) is present.
127 Inspired by Depth_First_Search_PP described in:
128
129 Advanced Compiler Design and Implementation
130 Steven Muchnick
131 Morgan Kaufmann, 1997
132
133 and heavily borrowed from flow_depth_first_order_compute. */
134
135 bool
136 mark_dfs_back_edges ()
137 {
138 edge *stack;
139 int *pre;
140 int *post;
141 int sp;
142 int prenum = 1;
143 int postnum = 1;
144 sbitmap visited;
145 bool found = false;
146
147 /* Allocate the preorder and postorder number arrays. */
148 pre = (int *) xcalloc (last_basic_block, sizeof (int));
149 post = (int *) xcalloc (last_basic_block, sizeof (int));
150
151 /* Allocate stack for back-tracking up CFG. */
152 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
153 sp = 0;
154
155 /* Allocate bitmap to track nodes that have been visited. */
156 visited = sbitmap_alloc (last_basic_block);
157
158 /* None of the nodes in the CFG have been visited yet. */
159 sbitmap_zero (visited);
160
161 /* Push the first edge on to the stack. */
162 stack[sp++] = ENTRY_BLOCK_PTR->succ;
163
164 while (sp)
165 {
166 edge e;
167 basic_block src;
168 basic_block dest;
169
170 /* Look at the edge on the top of the stack. */
171 e = stack[sp - 1];
172 src = e->src;
173 dest = e->dest;
174 e->flags &= ~EDGE_DFS_BACK;
175
176 /* Check if the edge destination has been visited yet. */
177 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
178 {
179 /* Mark that we have visited the destination. */
180 SET_BIT (visited, dest->index);
181
182 pre[dest->index] = prenum++;
183 if (dest->succ)
184 {
185 /* Since the DEST node has been visited for the first
186 time, check its successors. */
187 stack[sp++] = dest->succ;
188 }
189 else
190 post[dest->index] = postnum++;
191 }
192 else
193 {
194 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
195 && pre[src->index] >= pre[dest->index]
196 && post[dest->index] == 0)
197 e->flags |= EDGE_DFS_BACK, found = true;
198
199 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
200 post[src->index] = postnum++;
201
202 if (e->succ_next)
203 stack[sp - 1] = e->succ_next;
204 else
205 sp--;
206 }
207 }
208
209 free (pre);
210 free (post);
211 free (stack);
212 sbitmap_free (visited);
213
214 return found;
215 }
216
217 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
218
219 void
220 set_edge_can_fallthru_flag ()
221 {
222 basic_block bb;
223
224 FOR_EACH_BB (bb)
225 {
226 edge e;
227
228 for (e = bb->succ; e; e = e->succ_next)
229 {
230 e->flags &= ~EDGE_CAN_FALLTHRU;
231
232 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
233 if (e->flags & EDGE_FALLTHRU)
234 e->flags |= EDGE_CAN_FALLTHRU;
235 }
236
237 /* If the BB ends with an invertable condjump all (2) edges are
238 CAN_FALLTHRU edges. */
239 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
240 continue;
241 if (!any_condjump_p (bb->end))
242 continue;
243 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
244 continue;
245 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
246 bb->succ->flags |= EDGE_CAN_FALLTHRU;
247 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
248 }
249 }
250
251 /* Return true if we need to add fake edge to exit.
252 Helper function for the flow_call_edges_add. */
253
254 static bool
255 need_fake_edge_p (insn)
256 rtx insn;
257 {
258 if (!INSN_P (insn))
259 return false;
260
261 if ((GET_CODE (insn) == CALL_INSN
262 && !SIBLING_CALL_P (insn)
263 && !find_reg_note (insn, REG_NORETURN, NULL)
264 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
265 && !CONST_OR_PURE_CALL_P (insn)))
266 return true;
267
268 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
269 && MEM_VOLATILE_P (PATTERN (insn)))
270 || (GET_CODE (PATTERN (insn)) == PARALLEL
271 && asm_noperands (insn) != -1
272 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
273 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
274 }
275
276 /* Add fake edges to the function exit for any non constant and non noreturn
277 calls, volatile inline assembly in the bitmap of blocks specified by
278 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
279 that were split.
280
281 The goal is to expose cases in which entering a basic block does not imply
282 that all subsequent instructions must be executed. */
283
284 int
285 flow_call_edges_add (blocks)
286 sbitmap blocks;
287 {
288 int i;
289 int blocks_split = 0;
290 int last_bb = last_basic_block;
291 bool check_last_block = false;
292
293 if (n_basic_blocks == 0)
294 return 0;
295
296 if (! blocks)
297 check_last_block = true;
298 else
299 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
300
301 /* In the last basic block, before epilogue generation, there will be
302 a fallthru edge to EXIT. Special care is required if the last insn
303 of the last basic block is a call because make_edge folds duplicate
304 edges, which would result in the fallthru edge also being marked
305 fake, which would result in the fallthru edge being removed by
306 remove_fake_edges, which would result in an invalid CFG.
307
308 Moreover, we can't elide the outgoing fake edge, since the block
309 profiler needs to take this into account in order to solve the minimal
310 spanning tree in the case that the call doesn't return.
311
312 Handle this by adding a dummy instruction in a new last basic block. */
313 if (check_last_block)
314 {
315 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
316 rtx insn = bb->end;
317
318 /* Back up past insns that must be kept in the same block as a call. */
319 while (insn != bb->head
320 && keep_with_call_p (insn))
321 insn = PREV_INSN (insn);
322
323 if (need_fake_edge_p (insn))
324 {
325 edge e;
326
327 for (e = bb->succ; e; e = e->succ_next)
328 if (e->dest == EXIT_BLOCK_PTR)
329 {
330 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
331 commit_edge_insertions ();
332 break;
333 }
334 }
335 }
336
337 /* Now add fake edges to the function exit for any non constant
338 calls since there is no way that we can determine if they will
339 return or not... */
340
341 for (i = 0; i < last_bb; i++)
342 {
343 basic_block bb = BASIC_BLOCK (i);
344 rtx insn;
345 rtx prev_insn;
346
347 if (!bb)
348 continue;
349
350 if (blocks && !TEST_BIT (blocks, i))
351 continue;
352
353 for (insn = bb->end; ; insn = prev_insn)
354 {
355 prev_insn = PREV_INSN (insn);
356 if (need_fake_edge_p (insn))
357 {
358 edge e;
359 rtx split_at_insn = insn;
360
361 /* Don't split the block between a call and an insn that should
362 remain in the same block as the call. */
363 if (GET_CODE (insn) == CALL_INSN)
364 while (split_at_insn != bb->end
365 && keep_with_call_p (NEXT_INSN (split_at_insn)))
366 split_at_insn = NEXT_INSN (split_at_insn);
367
368 /* The handling above of the final block before the epilogue
369 should be enough to verify that there is no edge to the exit
370 block in CFG already. Calling make_edge in such case would
371 cause us to mark that edge as fake and remove it later. */
372
373 #ifdef ENABLE_CHECKING
374 if (split_at_insn == bb->end)
375 for (e = bb->succ; e; e = e->succ_next)
376 if (e->dest == EXIT_BLOCK_PTR)
377 abort ();
378 #endif
379
380 /* Note that the following may create a new basic block
381 and renumber the existing basic blocks. */
382 if (split_at_insn != bb->end)
383 {
384 e = split_block (bb, split_at_insn);
385 if (e)
386 blocks_split++;
387 }
388
389 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
390 }
391
392 if (insn == bb->head)
393 break;
394 }
395 }
396
397 if (blocks_split)
398 verify_flow_info ();
399
400 return blocks_split;
401 }
402
403 /* Find unreachable blocks. An unreachable block will have 0 in
404 the reachable bit in block->flags. A nonzero value indicates the
405 block is reachable. */
406
407 void
408 find_unreachable_blocks ()
409 {
410 edge e;
411 basic_block *tos, *worklist, bb;
412
413 tos = worklist =
414 (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
415
416 /* Clear all the reachability flags. */
417
418 FOR_EACH_BB (bb)
419 bb->flags &= ~BB_REACHABLE;
420
421 /* Add our starting points to the worklist. Almost always there will
422 be only one. It isn't inconceivable that we might one day directly
423 support Fortran alternate entry points. */
424
425 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
426 {
427 *tos++ = e->dest;
428
429 /* Mark the block reachable. */
430 e->dest->flags |= BB_REACHABLE;
431 }
432
433 /* Iterate: find everything reachable from what we've already seen. */
434
435 while (tos != worklist)
436 {
437 basic_block b = *--tos;
438
439 for (e = b->succ; e; e = e->succ_next)
440 if (!(e->dest->flags & BB_REACHABLE))
441 {
442 *tos++ = e->dest;
443 e->dest->flags |= BB_REACHABLE;
444 }
445 }
446
447 free (worklist);
448 }
449 \f
450 /* Functions to access an edge list with a vector representation.
451 Enough data is kept such that given an index number, the
452 pred and succ that edge represents can be determined, or
453 given a pred and a succ, its index number can be returned.
454 This allows algorithms which consume a lot of memory to
455 represent the normally full matrix of edge (pred,succ) with a
456 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
457 wasted space in the client code due to sparse flow graphs. */
458
459 /* This functions initializes the edge list. Basically the entire
460 flowgraph is processed, and all edges are assigned a number,
461 and the data structure is filled in. */
462
463 struct edge_list *
464 create_edge_list ()
465 {
466 struct edge_list *elist;
467 edge e;
468 int num_edges;
469 int block_count;
470 basic_block bb;
471
472 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
473
474 num_edges = 0;
475
476 /* Determine the number of edges in the flow graph by counting successor
477 edges on each basic block. */
478 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
479 {
480 for (e = bb->succ; e; e = e->succ_next)
481 num_edges++;
482 }
483
484 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
485 elist->num_blocks = block_count;
486 elist->num_edges = num_edges;
487 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
488
489 num_edges = 0;
490
491 /* Follow successors of blocks, and register these edges. */
492 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
493 for (e = bb->succ; e; e = e->succ_next)
494 elist->index_to_edge[num_edges++] = e;
495
496 return elist;
497 }
498
499 /* This function free's memory associated with an edge list. */
500
501 void
502 free_edge_list (elist)
503 struct edge_list *elist;
504 {
505 if (elist)
506 {
507 free (elist->index_to_edge);
508 free (elist);
509 }
510 }
511
512 /* This function provides debug output showing an edge list. */
513
514 void
515 print_edge_list (f, elist)
516 FILE *f;
517 struct edge_list *elist;
518 {
519 int x;
520
521 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
522 elist->num_blocks - 2, elist->num_edges);
523
524 for (x = 0; x < elist->num_edges; x++)
525 {
526 fprintf (f, " %-4d - edge(", x);
527 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
528 fprintf (f, "entry,");
529 else
530 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
531
532 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
533 fprintf (f, "exit)\n");
534 else
535 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
536 }
537 }
538
539 /* This function provides an internal consistency check of an edge list,
540 verifying that all edges are present, and that there are no
541 extra edges. */
542
543 void
544 verify_edge_list (f, elist)
545 FILE *f;
546 struct edge_list *elist;
547 {
548 int pred, succ, index;
549 edge e;
550 basic_block bb, p, s;
551
552 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
553 {
554 for (e = bb->succ; e; e = e->succ_next)
555 {
556 pred = e->src->index;
557 succ = e->dest->index;
558 index = EDGE_INDEX (elist, e->src, e->dest);
559 if (index == EDGE_INDEX_NO_EDGE)
560 {
561 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
562 continue;
563 }
564
565 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
566 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
567 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
568 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
569 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
570 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
571 }
572 }
573
574 /* We've verified that all the edges are in the list, now lets make sure
575 there are no spurious edges in the list. */
576
577 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
578 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
579 {
580 int found_edge = 0;
581
582 for (e = p->succ; e; e = e->succ_next)
583 if (e->dest == s)
584 {
585 found_edge = 1;
586 break;
587 }
588
589 for (e = s->pred; e; e = e->pred_next)
590 if (e->src == p)
591 {
592 found_edge = 1;
593 break;
594 }
595
596 if (EDGE_INDEX (elist, p, s)
597 == EDGE_INDEX_NO_EDGE && found_edge != 0)
598 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
599 p->index, s->index);
600 if (EDGE_INDEX (elist, p, s)
601 != EDGE_INDEX_NO_EDGE && found_edge == 0)
602 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
603 p->index, s->index, EDGE_INDEX (elist, p, s));
604 }
605 }
606
607 /* This routine will determine what, if any, edge there is between
608 a specified predecessor and successor. */
609
610 int
611 find_edge_index (edge_list, pred, succ)
612 struct edge_list *edge_list;
613 basic_block pred, succ;
614 {
615 int x;
616
617 for (x = 0; x < NUM_EDGES (edge_list); x++)
618 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
619 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
620 return x;
621
622 return (EDGE_INDEX_NO_EDGE);
623 }
624
625 /* Dump the list of basic blocks in the bitmap NODES. */
626
627 void
628 flow_nodes_print (str, nodes, file)
629 const char *str;
630 const sbitmap nodes;
631 FILE *file;
632 {
633 int node;
634
635 if (! nodes)
636 return;
637
638 fprintf (file, "%s { ", str);
639 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
640 fputs ("}\n", file);
641 }
642
643 /* Dump the list of edges in the array EDGE_LIST. */
644
645 void
646 flow_edge_list_print (str, edge_list, num_edges, file)
647 const char *str;
648 const edge *edge_list;
649 int num_edges;
650 FILE *file;
651 {
652 int i;
653
654 if (! edge_list)
655 return;
656
657 fprintf (file, "%s { ", str);
658 for (i = 0; i < num_edges; i++)
659 fprintf (file, "%d->%d ", edge_list[i]->src->index,
660 edge_list[i]->dest->index);
661
662 fputs ("}\n", file);
663 }
664
665 \f
666 /* This routine will remove any fake successor edges for a basic block.
667 When the edge is removed, it is also removed from whatever predecessor
668 list it is in. */
669
670 static void
671 remove_fake_successors (bb)
672 basic_block bb;
673 {
674 edge e;
675
676 for (e = bb->succ; e;)
677 {
678 edge tmp = e;
679
680 e = e->succ_next;
681 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
682 remove_edge (tmp);
683 }
684 }
685
686 /* This routine will remove all fake edges from the flow graph. If
687 we remove all fake successors, it will automatically remove all
688 fake predecessors. */
689
690 void
691 remove_fake_edges ()
692 {
693 basic_block bb;
694
695 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
696 remove_fake_successors (bb);
697 }
698
699 /* This function will add a fake edge between any block which has no
700 successors, and the exit block. Some data flow equations require these
701 edges to exist. */
702
703 void
704 add_noreturn_fake_exit_edges ()
705 {
706 basic_block bb;
707
708 FOR_EACH_BB (bb)
709 if (bb->succ == NULL)
710 make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
711 }
712
713 /* This function adds a fake edge between any infinite loops to the
714 exit block. Some optimizations require a path from each node to
715 the exit node.
716
717 See also Morgan, Figure 3.10, pp. 82-83.
718
719 The current implementation is ugly, not attempting to minimize the
720 number of inserted fake edges. To reduce the number of fake edges
721 to insert, add fake edges from _innermost_ loops containing only
722 nodes not reachable from the exit block. */
723
724 void
725 connect_infinite_loops_to_exit ()
726 {
727 basic_block unvisited_block;
728 struct depth_first_search_dsS dfs_ds;
729
730 /* Perform depth-first search in the reverse graph to find nodes
731 reachable from the exit block. */
732 flow_dfs_compute_reverse_init (&dfs_ds);
733 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
734
735 /* Repeatedly add fake edges, updating the unreachable nodes. */
736 while (1)
737 {
738 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
739 if (!unvisited_block)
740 break;
741
742 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
743 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
744 }
745
746 flow_dfs_compute_reverse_finish (&dfs_ds);
747 return;
748 }
749 \f
750 /* Compute reverse top sort order */
751
752 void
753 flow_reverse_top_sort_order_compute (rts_order)
754 int *rts_order;
755 {
756 edge *stack;
757 int sp;
758 int postnum = 0;
759 sbitmap visited;
760
761 /* Allocate stack for back-tracking up CFG. */
762 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
763 sp = 0;
764
765 /* Allocate bitmap to track nodes that have been visited. */
766 visited = sbitmap_alloc (last_basic_block);
767
768 /* None of the nodes in the CFG have been visited yet. */
769 sbitmap_zero (visited);
770
771 /* Push the first edge on to the stack. */
772 stack[sp++] = ENTRY_BLOCK_PTR->succ;
773
774 while (sp)
775 {
776 edge e;
777 basic_block src;
778 basic_block dest;
779
780 /* Look at the edge on the top of the stack. */
781 e = stack[sp - 1];
782 src = e->src;
783 dest = e->dest;
784
785 /* Check if the edge destination has been visited yet. */
786 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
787 {
788 /* Mark that we have visited the destination. */
789 SET_BIT (visited, dest->index);
790
791 if (dest->succ)
792 /* Since the DEST node has been visited for the first
793 time, check its successors. */
794 stack[sp++] = dest->succ;
795 else
796 rts_order[postnum++] = dest->index;
797 }
798 else
799 {
800 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
801 rts_order[postnum++] = src->index;
802
803 if (e->succ_next)
804 stack[sp - 1] = e->succ_next;
805 else
806 sp--;
807 }
808 }
809
810 free (stack);
811 sbitmap_free (visited);
812 }
813
814 /* Compute the depth first search order and store in the array
815 DFS_ORDER if nonzero, marking the nodes visited in VISITED. If
816 RC_ORDER is nonzero, return the reverse completion number for each
817 node. Returns the number of nodes visited. A depth first search
818 tries to get as far away from the starting point as quickly as
819 possible. */
820
821 int
822 flow_depth_first_order_compute (dfs_order, rc_order)
823 int *dfs_order;
824 int *rc_order;
825 {
826 edge *stack;
827 int sp;
828 int dfsnum = 0;
829 int rcnum = n_basic_blocks - 1;
830 sbitmap visited;
831
832 /* Allocate stack for back-tracking up CFG. */
833 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
834 sp = 0;
835
836 /* Allocate bitmap to track nodes that have been visited. */
837 visited = sbitmap_alloc (last_basic_block);
838
839 /* None of the nodes in the CFG have been visited yet. */
840 sbitmap_zero (visited);
841
842 /* Push the first edge on to the stack. */
843 stack[sp++] = ENTRY_BLOCK_PTR->succ;
844
845 while (sp)
846 {
847 edge e;
848 basic_block src;
849 basic_block dest;
850
851 /* Look at the edge on the top of the stack. */
852 e = stack[sp - 1];
853 src = e->src;
854 dest = e->dest;
855
856 /* Check if the edge destination has been visited yet. */
857 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
858 {
859 /* Mark that we have visited the destination. */
860 SET_BIT (visited, dest->index);
861
862 if (dfs_order)
863 dfs_order[dfsnum] = dest->index;
864
865 dfsnum++;
866
867 if (dest->succ)
868 /* Since the DEST node has been visited for the first
869 time, check its successors. */
870 stack[sp++] = dest->succ;
871 else if (rc_order)
872 /* There are no successors for the DEST node so assign
873 its reverse completion number. */
874 rc_order[rcnum--] = dest->index;
875 }
876 else
877 {
878 if (! e->succ_next && src != ENTRY_BLOCK_PTR
879 && rc_order)
880 /* There are no more successors for the SRC node
881 so assign its reverse completion number. */
882 rc_order[rcnum--] = src->index;
883
884 if (e->succ_next)
885 stack[sp - 1] = e->succ_next;
886 else
887 sp--;
888 }
889 }
890
891 free (stack);
892 sbitmap_free (visited);
893
894 /* The number of nodes visited should not be greater than
895 n_basic_blocks. */
896 if (dfsnum > n_basic_blocks)
897 abort ();
898
899 /* There are some nodes left in the CFG that are unreachable. */
900 if (dfsnum < n_basic_blocks)
901 abort ();
902
903 return dfsnum;
904 }
905
906 struct dfst_node
907 {
908 unsigned nnodes;
909 struct dfst_node **node;
910 struct dfst_node *up;
911 };
912
913 /* Compute a preorder transversal ordering such that a sub-tree which
914 is the source of a cross edge appears before the sub-tree which is
915 the destination of the cross edge. This allows for easy detection
916 of all the entry blocks for a loop.
917
918 The ordering is compute by:
919
920 1) Generating a depth first spanning tree.
921
922 2) Walking the resulting tree from right to left. */
923
924 void
925 flow_preorder_transversal_compute (pot_order)
926 int *pot_order;
927 {
928 edge e;
929 edge *stack;
930 int i;
931 int max_successors;
932 int sp;
933 sbitmap visited;
934 struct dfst_node *node;
935 struct dfst_node *dfst;
936 basic_block bb;
937
938 /* Allocate stack for back-tracking up CFG. */
939 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
940 sp = 0;
941
942 /* Allocate the tree. */
943 dfst = (struct dfst_node *) xcalloc (last_basic_block,
944 sizeof (struct dfst_node));
945
946 FOR_EACH_BB (bb)
947 {
948 max_successors = 0;
949 for (e = bb->succ; e; e = e->succ_next)
950 max_successors++;
951
952 dfst[bb->index].node
953 = (max_successors
954 ? (struct dfst_node **) xcalloc (max_successors,
955 sizeof (struct dfst_node *))
956 : NULL);
957 }
958
959 /* Allocate bitmap to track nodes that have been visited. */
960 visited = sbitmap_alloc (last_basic_block);
961
962 /* None of the nodes in the CFG have been visited yet. */
963 sbitmap_zero (visited);
964
965 /* Push the first edge on to the stack. */
966 stack[sp++] = ENTRY_BLOCK_PTR->succ;
967
968 while (sp)
969 {
970 basic_block src;
971 basic_block dest;
972
973 /* Look at the edge on the top of the stack. */
974 e = stack[sp - 1];
975 src = e->src;
976 dest = e->dest;
977
978 /* Check if the edge destination has been visited yet. */
979 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
980 {
981 /* Mark that we have visited the destination. */
982 SET_BIT (visited, dest->index);
983
984 /* Add the destination to the preorder tree. */
985 if (src != ENTRY_BLOCK_PTR)
986 {
987 dfst[src->index].node[dfst[src->index].nnodes++]
988 = &dfst[dest->index];
989 dfst[dest->index].up = &dfst[src->index];
990 }
991
992 if (dest->succ)
993 /* Since the DEST node has been visited for the first
994 time, check its successors. */
995 stack[sp++] = dest->succ;
996 }
997
998 else if (e->succ_next)
999 stack[sp - 1] = e->succ_next;
1000 else
1001 sp--;
1002 }
1003
1004 free (stack);
1005 sbitmap_free (visited);
1006
1007 /* Record the preorder transversal order by
1008 walking the tree from right to left. */
1009
1010 i = 0;
1011 node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
1012 pot_order[i++] = 0;
1013
1014 while (node)
1015 {
1016 if (node->nnodes)
1017 {
1018 node = node->node[--node->nnodes];
1019 pot_order[i++] = node - dfst;
1020 }
1021 else
1022 node = node->up;
1023 }
1024
1025 /* Free the tree. */
1026
1027 for (i = 0; i < last_basic_block; i++)
1028 if (dfst[i].node)
1029 free (dfst[i].node);
1030
1031 free (dfst);
1032 }
1033
1034 /* Compute the depth first search order on the _reverse_ graph and
1035 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1036 Returns the number of nodes visited.
1037
1038 The computation is split into three pieces:
1039
1040 flow_dfs_compute_reverse_init () creates the necessary data
1041 structures.
1042
1043 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1044 structures. The block will start the search.
1045
1046 flow_dfs_compute_reverse_execute () continues (or starts) the
1047 search using the block on the top of the stack, stopping when the
1048 stack is empty.
1049
1050 flow_dfs_compute_reverse_finish () destroys the necessary data
1051 structures.
1052
1053 Thus, the user will probably call ..._init(), call ..._add_bb() to
1054 add a beginning basic block to the stack, call ..._execute(),
1055 possibly add another bb to the stack and again call ..._execute(),
1056 ..., and finally call _finish(). */
1057
1058 /* Initialize the data structures used for depth-first search on the
1059 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1060 added to the basic block stack. DATA is the current depth-first
1061 search context. If INITIALIZE_STACK is nonzero, there is an
1062 element on the stack. */
1063
1064 static void
1065 flow_dfs_compute_reverse_init (data)
1066 depth_first_search_ds data;
1067 {
1068 /* Allocate stack for back-tracking up CFG. */
1069 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1070 * sizeof (basic_block));
1071 data->sp = 0;
1072
1073 /* Allocate bitmap to track nodes that have been visited. */
1074 data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1075
1076 /* None of the nodes in the CFG have been visited yet. */
1077 sbitmap_zero (data->visited_blocks);
1078
1079 return;
1080 }
1081
1082 /* Add the specified basic block to the top of the dfs data
1083 structures. When the search continues, it will start at the
1084 block. */
1085
1086 static void
1087 flow_dfs_compute_reverse_add_bb (data, bb)
1088 depth_first_search_ds data;
1089 basic_block bb;
1090 {
1091 data->stack[data->sp++] = bb;
1092 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1093 }
1094
1095 /* Continue the depth-first search through the reverse graph starting with the
1096 block at the stack's top and ending when the stack is empty. Visited nodes
1097 are marked. Returns an unvisited basic block, or NULL if there is none
1098 available. */
1099
1100 static basic_block
1101 flow_dfs_compute_reverse_execute (data)
1102 depth_first_search_ds data;
1103 {
1104 basic_block bb;
1105 edge e;
1106
1107 while (data->sp > 0)
1108 {
1109 bb = data->stack[--data->sp];
1110
1111 /* Perform depth-first search on adjacent vertices. */
1112 for (e = bb->pred; e; e = e->pred_next)
1113 if (!TEST_BIT (data->visited_blocks,
1114 e->src->index - (INVALID_BLOCK + 1)))
1115 flow_dfs_compute_reverse_add_bb (data, e->src);
1116 }
1117
1118 /* Determine if there are unvisited basic blocks. */
1119 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1120 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1121 return bb;
1122
1123 return NULL;
1124 }
1125
1126 /* Destroy the data structures needed for depth-first search on the
1127 reverse graph. */
1128
1129 static void
1130 flow_dfs_compute_reverse_finish (data)
1131 depth_first_search_ds data;
1132 {
1133 free (data->stack);
1134 sbitmap_free (data->visited_blocks);
1135 }
1136
1137 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1138 if REVERSE, go against direction of edges. Returns number of blocks
1139 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1140 int
1141 dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
1142 basic_block bb;
1143 int reverse;
1144 bool (*predicate) PARAMS ((basic_block, void *));
1145 basic_block *rslt;
1146 int rslt_max;
1147 void *data;
1148 {
1149 basic_block *st, lbb;
1150 int sp = 0, tv = 0;
1151
1152 st = xcalloc (rslt_max, sizeof (basic_block));
1153 rslt[tv++] = st[sp++] = bb;
1154 bb->flags |= BB_VISITED;
1155 while (sp)
1156 {
1157 edge e;
1158 lbb = st[--sp];
1159 if (reverse)
1160 {
1161 for (e = lbb->pred; e; e = e->pred_next)
1162 if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1163 {
1164 if (tv == rslt_max)
1165 abort ();
1166 rslt[tv++] = st[sp++] = e->src;
1167 e->src->flags |= BB_VISITED;
1168 }
1169 }
1170 else
1171 {
1172 for (e = lbb->succ; e; e = e->succ_next)
1173 if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1174 {
1175 if (tv == rslt_max)
1176 abort ();
1177 rslt[tv++] = st[sp++] = e->dest;
1178 e->dest->flags |= BB_VISITED;
1179 }
1180 }
1181 }
1182 free (st);
1183 for (sp = 0; sp < tv; sp++)
1184 rslt[sp]->flags &= ~BB_VISITED;
1185 return tv;
1186 }