flow.c (delete_dead_jumptables): New function.
[gcc.git] / gcc / bb-reorder.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* References:
22
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
25
26 TODO:
27
28 (1) Consider:
29
30 if (p) goto A; // predict taken
31 foo ();
32 A:
33 if (q) goto B; // predict taken
34 bar ();
35 B:
36 baz ();
37 return;
38
39 We'll currently reorder this as
40
41 if (!p) goto C;
42 A:
43 if (!q) goto D;
44 B:
45 baz ();
46 return;
47 D:
48 bar ();
49 goto B;
50 C:
51 foo ();
52 goto A;
53
54 A better ordering is
55
56 if (!p) goto C;
57 if (!q) goto D;
58 B:
59 baz ();
60 return;
61 C:
62 foo ();
63 if (q) goto B;
64 D:
65 bar ();
66 goto B;
67
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
71
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
74
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
78
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
81 */
82
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "toplev.h"
96 #include "recog.h"
97 #include "expr.h"
98 #include "obstack.h"
99
100
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
103 #endif
104
105
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
110
111 extern struct obstack flow_obstack;
112
113
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
116 {
117 int level;
118
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
120 rtx note_beg;
121
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
123 rtx note_end;
124
125 /* The bb containing note_beg (if any). */
126 basic_block bb_beg;
127
128 /* The bb containing note_end (if any). */
129 basic_block bb_end;
130
131 /* List of basic blocks contained within this scope. */
132 basic_block *bbs;
133
134 /* Number of blocks contained within this scope. */
135 int num_bbs;
136
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
139
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
142
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
145
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
148 } *scope;
149
150
151 /* Structure to hold information about the scope forest. */
152 typedef struct
153 {
154 /* Number of trees in forest. */
155 int num_trees;
156
157 /* List of tree roots. */
158 scope *trees;
159 } scope_forest_info;
160
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
163 {
164 rtx eff_head;
165 rtx eff_end;
166 scope scope;
167 basic_block next;
168 int index;
169 int visited;
170 } *reorder_block_def;
171
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
173
174
175 /* Local function prototypes. */
176 static rtx skip_insns_after_block PARAMS ((basic_block));
177 static void record_effective_endpoints PARAMS ((void));
178 static void make_reorder_chain PARAMS ((void));
179 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
180 static rtx label_for_bb PARAMS ((basic_block));
181 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
182 static void fixup_reorder_chain PARAMS ((void));
183 static void relate_bbs_with_scopes PARAMS ((scope));
184 static scope make_new_scope PARAMS ((int, rtx));
185 static void build_scope_forest PARAMS ((scope_forest_info *));
186 static void remove_scope_notes PARAMS ((void));
187 static void insert_intra_1 PARAMS ((scope, rtx *));
188 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
189 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
190 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
191 static void free_scope_forest_1 PARAMS ((scope));
192 static void free_scope_forest PARAMS ((scope_forest_info *));
193 void dump_scope_forest PARAMS ((scope_forest_info *));
194 static void dump_scope_forest_1 PARAMS ((scope, int));
195 static rtx get_next_bb_note PARAMS ((rtx));
196 static rtx get_prev_bb_note PARAMS ((rtx));
197
198 void verify_insn_chain PARAMS ((void));
199 \f
200 /* Skip over inter-block insns occurring after BB which are typically
201 associated with BB (e.g., barriers). If there are any such insns,
202 we return the last one. Otherwise, we return the end of BB. */
203
204 static rtx
205 skip_insns_after_block (bb)
206 basic_block bb;
207 {
208 rtx insn, last_insn, next_head, prev;
209
210 next_head = NULL_RTX;
211 if (bb->index + 1 != n_basic_blocks)
212 next_head = BASIC_BLOCK (bb->index + 1)->head;
213
214 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
215 {
216 if (insn == next_head)
217 break;
218
219 switch (GET_CODE (insn))
220 {
221 case BARRIER:
222 last_insn = insn;
223 continue;
224
225 case NOTE:
226 switch (NOTE_LINE_NUMBER (insn))
227 {
228 case NOTE_INSN_LOOP_END:
229 case NOTE_INSN_BLOCK_END:
230 last_insn = insn;
231 continue;
232 case NOTE_INSN_DELETED:
233 case NOTE_INSN_DELETED_LABEL:
234 continue;
235
236 default:
237 continue;
238 break;
239 }
240 break;
241
242 case CODE_LABEL:
243 if (NEXT_INSN (insn)
244 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
245 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
246 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
247 {
248 insn = NEXT_INSN (insn);
249 last_insn = insn;
250 continue;
251 }
252 break;
253
254 default:
255 break;
256 }
257
258 break;
259 }
260 /* It is possible to hit contradicting sequence. For instance:
261
262 jump_insn
263 NOTE_INSN_LOOP_BEG
264 barrier
265
266 Where barrier belongs to jump_insn, but the note does not.
267 This can be created by removing the basic block originally
268 following NOTE_INSN_LOOP_BEG.
269
270 In such case reorder the notes. */
271 for (insn = last_insn; insn != bb->end; insn = prev)
272 {
273 prev = PREV_INSN (insn);
274 if (GET_CODE (insn) == NOTE)
275 switch (NOTE_LINE_NUMBER (insn))
276 {
277 case NOTE_INSN_LOOP_END:
278 case NOTE_INSN_BLOCK_END:
279 case NOTE_INSN_DELETED:
280 case NOTE_INSN_DELETED_LABEL:
281 continue;
282 default:
283 reorder_insns (insn, insn, last_insn);
284 }
285 }
286
287 return last_insn;
288 }
289
290
291 /* Locate the effective beginning and end of the insn chain for each
292 block, as defined by skip_insns_after_block above. */
293
294 static void
295 record_effective_endpoints ()
296 {
297 rtx next_insn = get_insns ();
298 int i;
299
300 for (i = 0; i < n_basic_blocks; ++i)
301 {
302 basic_block bb = BASIC_BLOCK (i);
303 rtx end;
304
305 RBI (bb)->eff_head = next_insn;
306 end = skip_insns_after_block (bb);
307 RBI (bb)->eff_end = end;
308 next_insn = NEXT_INSN (end);
309 }
310 }
311
312
313 /* Compute an ordering for a subgraph beginning with block BB. Record the
314 ordering in RBI()->index and chained through RBI()->next. */
315
316 static void
317 make_reorder_chain ()
318 {
319 basic_block last_block = NULL;
320 basic_block prev = NULL;
321 int nbb_m1 = n_basic_blocks - 1;
322
323 /* If we've not got epilogue in RTL, we must fallthru to the exit.
324 Force the last block to be at the end. */
325 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
326 end of the function for stack unwinding purposes. */
327 if (! HAVE_epilogue)
328 {
329 last_block = BASIC_BLOCK (nbb_m1);
330 RBI (last_block)->visited = 1;
331 nbb_m1 -= 1;
332 }
333
334 /* Loop until we've placed every block. */
335 do
336 {
337 int i;
338 basic_block next = NULL;
339
340 /* Find the next unplaced block. */
341 /* ??? Get rid of this loop, and track which blocks are not yet
342 placed more directly, so as to avoid the O(N^2) worst case.
343 Perhaps keep a doubly-linked list of all to-be-placed blocks;
344 remove from the list as we place. The head of that list is
345 what we're looking for here. */
346
347 for (i = 0; i <= nbb_m1; ++i)
348 {
349 basic_block bb = BASIC_BLOCK (i);
350 if (! RBI (bb)->visited)
351 {
352 next = bb;
353 break;
354 }
355 }
356 if (! next)
357 abort ();
358
359 prev = make_reorder_chain_1 (next, prev);
360 }
361 while (RBI (prev)->index < nbb_m1);
362
363 /* Terminate the chain. */
364 if (! HAVE_epilogue)
365 {
366 RBI (prev)->next = last_block;
367 RBI (last_block)->index = RBI (prev)->index + 1;
368 prev = last_block;
369 }
370 RBI (prev)->next = NULL;
371 }
372
373 /* A helper function for make_reorder_chain.
374
375 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
376 These are assumed to be the error condition and we wish to cluster
377 all of them at the very end of the function for the benefit of cache
378 locality for the rest of the function.
379
380 ??? We could do slightly better by noticing earlier that some subgraph
381 has all paths leading to noreturn functions, but for there to be more
382 than one block in such a subgraph is rare. */
383
384 static basic_block
385 make_reorder_chain_1 (bb, prev)
386 basic_block bb;
387 basic_block prev;
388 {
389 edge e;
390 basic_block next;
391 rtx note;
392
393 /* Mark this block visited. */
394 if (prev)
395 {
396 int new_index;
397
398 restart:
399 RBI (prev)->next = bb;
400 new_index = RBI (prev)->index + 1;
401 RBI (bb)->index = new_index;
402
403 if (rtl_dump_file && prev->index + 1 != bb->index)
404 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
405 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
406 }
407 else
408 RBI (bb)->index = 0;
409 RBI (bb)->visited = 1;
410 prev = bb;
411
412 if (bb->succ == NULL)
413 return prev;
414
415 /* Find the most probable block. */
416
417 next = NULL;
418 if (any_condjump_p (bb->end)
419 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
420 {
421 int taken, probability;
422 edge e_taken, e_fall;
423
424 probability = INTVAL (XEXP (note, 0));
425 taken = probability > REG_BR_PROB_BASE / 2;
426
427 /* Find the normal taken edge and the normal fallthru edge.
428
429 Note, conditional jumps with other side effects may not
430 be fully optimized. In this case it is possible for
431 the conditional jump to branch to the same location as
432 the fallthru path.
433
434 We should probably work to improve optimization of that
435 case; however, it seems silly not to also deal with such
436 problems here if they happen to occur. */
437
438 e_taken = e_fall = NULL;
439 for (e = bb->succ; e ; e = e->succ_next)
440 {
441 if (e->flags & EDGE_FALLTHRU)
442 e_fall = e;
443 else if (! (e->flags & EDGE_EH))
444 e_taken = e;
445 }
446
447 next = (taken ? e_taken : e_fall)->dest;
448 }
449
450 /* In the absence of a prediction, disturb things as little as possible
451 by selecting the old "next" block from the list of successors. If
452 there had been a fallthru edge, that will be the one. */
453 if (! next)
454 {
455 for (e = bb->succ; e ; e = e->succ_next)
456 if (e->dest->index == bb->index + 1)
457 {
458 if ((e->flags & EDGE_FALLTHRU)
459 || (e->dest->succ
460 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
461 next = e->dest;
462 break;
463 }
464 }
465
466 /* Make sure we didn't select a silly next block. */
467 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
468 next = NULL;
469
470 /* Recurse on the successors. Unroll the last call, as the normal
471 case is exactly one or two edges, and we can tail recurse. */
472 for (e = bb->succ; e; e = e->succ_next)
473 if (e->dest != EXIT_BLOCK_PTR
474 && ! RBI (e->dest)->visited
475 && e->dest->succ
476 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
477 {
478 if (next)
479 {
480 prev = make_reorder_chain_1 (next, prev);
481 next = RBI (e->dest)->visited ? NULL : e->dest;
482 }
483 else
484 next = e->dest;
485 }
486 if (next)
487 {
488 bb = next;
489 goto restart;
490 }
491
492 return prev;
493 }
494
495
496 /* Locate or create a label for a given basic block. */
497
498 static rtx
499 label_for_bb (bb)
500 basic_block bb;
501 {
502 rtx label = bb->head;
503
504 if (GET_CODE (label) != CODE_LABEL)
505 {
506 if (rtl_dump_file)
507 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
508 bb->index, RBI (bb)->index);
509
510 label = emit_label_before (gen_label_rtx (), label);
511 if (bb->head == RBI (bb)->eff_head)
512 RBI (bb)->eff_head = label;
513 bb->head = label;
514 }
515
516 return label;
517 }
518
519
520 /* Emit a jump to BB after insn AFTER. */
521
522 static rtx
523 emit_jump_to_block_after (bb, after)
524 basic_block bb;
525 rtx after;
526 {
527 rtx jump;
528
529 if (bb != EXIT_BLOCK_PTR)
530 {
531 rtx label = label_for_bb (bb);
532 jump = emit_jump_insn_after (gen_jump (label), after);
533 JUMP_LABEL (jump) = label;
534 LABEL_NUSES (label) += 1;
535 if (basic_block_for_insn)
536 set_block_for_new_insns (jump, bb);
537
538 if (rtl_dump_file)
539 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
540 bb->index, RBI (bb)->index);
541 }
542 else
543 {
544 #ifdef HAVE_return
545 if (! HAVE_return)
546 abort ();
547 jump = emit_jump_insn_after (gen_return (), after);
548
549 if (rtl_dump_file)
550 fprintf (rtl_dump_file, "Emitting return\n");
551 #else
552 abort ();
553 #endif
554 }
555
556 return jump;
557 }
558
559
560 /* Given a reorder chain, rearrange the code to match. */
561
562 static void
563 fixup_reorder_chain ()
564 {
565 basic_block bb, last_bb;
566
567 /* First do the bulk reordering -- rechain the blocks without regard to
568 the needed changes to jumps and labels. */
569
570 last_bb = BASIC_BLOCK (0);
571 bb = RBI (last_bb)->next;
572 while (bb)
573 {
574 rtx last_e = RBI (last_bb)->eff_end;
575 rtx curr_h = RBI (bb)->eff_head;
576
577 NEXT_INSN (last_e) = curr_h;
578 PREV_INSN (curr_h) = last_e;
579
580 last_bb = bb;
581 bb = RBI (bb)->next;
582 }
583 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
584 set_last_insn (RBI (last_bb)->eff_end);
585
586 /* Now add jumps and labels as needed to match the blocks new
587 outgoing edges. */
588
589 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
590 {
591 edge e_fall, e_taken, e;
592 rtx jump_insn, barrier_insn, bb_end_insn;
593 basic_block nb;
594
595 if (bb->succ == NULL)
596 continue;
597
598 /* Find the old fallthru edge, and another non-EH edge for
599 a taken jump. */
600 e_taken = e_fall = NULL;
601 for (e = bb->succ; e ; e = e->succ_next)
602 if (e->flags & EDGE_FALLTHRU)
603 e_fall = e;
604 else if (! (e->flags & EDGE_EH))
605 e_taken = e;
606
607 bb_end_insn = bb->end;
608 if (GET_CODE (bb_end_insn) == JUMP_INSN)
609 {
610 if (any_uncondjump_p (bb_end_insn))
611 {
612 /* If the destination is still not next, nothing to do. */
613 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
614 continue;
615
616 /* Otherwise, we can remove the jump and cleanup the edge. */
617 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
618 RBI (bb)->eff_end = skip_insns_after_block (bb);
619 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
620
621 if (rtl_dump_file)
622 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
623 bb->index, RBI (bb)->index);
624 continue;
625 }
626 else if (any_condjump_p (bb_end_insn))
627 {
628 /* If the old fallthru is still next, nothing to do. */
629 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
630 || (RBI (bb)->index == n_basic_blocks - 1
631 && e_fall->dest == EXIT_BLOCK_PTR))
632 continue;
633
634 /* There is one special case: if *neither* block is next,
635 such as happens at the very end of a function, then we'll
636 need to add a new unconditional jump. Choose the taken
637 edge based on known or assumed probability. */
638 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
639 {
640 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
641 if (note
642 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
643 && invert_jump (bb_end_insn,
644 label_for_bb (e_fall->dest), 0))
645 {
646 e_fall->flags &= ~EDGE_FALLTHRU;
647 e_taken->flags |= EDGE_FALLTHRU;
648 e = e_fall, e_fall = e_taken, e_taken = e;
649 }
650 }
651
652 /* Otherwise we can try to invert the jump. This will
653 basically never fail, however, keep up the pretense. */
654 else if (invert_jump (bb_end_insn,
655 label_for_bb (e_fall->dest), 0))
656 {
657 e_fall->flags &= ~EDGE_FALLTHRU;
658 e_taken->flags |= EDGE_FALLTHRU;
659 continue;
660 }
661 }
662 else if (returnjump_p (bb_end_insn))
663 continue;
664 else
665 {
666 /* Otherwise we have some switch or computed jump. In the
667 99% case, there should not have been a fallthru edge. */
668 if (! e_fall)
669 continue;
670 #ifdef CASE_DROPS_THROUGH
671 /* Except for VAX. Since we didn't have predication for the
672 tablejump, the fallthru block should not have moved. */
673 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
674 continue;
675 bb_end_insn = skip_insns_after_block (bb);
676 #else
677 abort ();
678 #endif
679 }
680 }
681 else
682 {
683 /* No fallthru implies a noreturn function with EH edges, or
684 something similarly bizarre. In any case, we don't need to
685 do anything. */
686 if (! e_fall)
687 continue;
688
689 /* If the fallthru block is still next, nothing to do. */
690 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
691 || (RBI (bb)->index == n_basic_blocks - 1
692 && e_fall->dest == EXIT_BLOCK_PTR))
693 continue;
694
695 /* We need a new jump insn. If the block has only one outgoing
696 edge, then we can stuff the new jump insn in directly. */
697 if (bb->succ->succ_next == NULL)
698 {
699 e_fall->flags &= ~EDGE_FALLTHRU;
700
701 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
702 bb->end = jump_insn;
703 barrier_insn = emit_barrier_after (jump_insn);
704 RBI (bb)->eff_end = barrier_insn;
705 continue;
706 }
707 }
708
709 /* We got here if we need to add a new jump insn in a new block
710 across the edge e_fall. */
711
712 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
713 barrier_insn = emit_barrier_after (jump_insn);
714
715 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
716 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
717
718 nb = BASIC_BLOCK (n_basic_blocks - 1);
719 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
720 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
721 nb->local_set = 0;
722
723 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
724 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
725
726 nb->aux = xmalloc (sizeof (struct reorder_block_def));
727 RBI (nb)->eff_head = nb->head;
728 RBI (nb)->eff_end = barrier_insn;
729 RBI (nb)->scope = RBI (bb)->scope;
730 RBI (nb)->index = RBI (bb)->index + 1;
731 RBI (nb)->visited = 1;
732 RBI (nb)->next = RBI (bb)->next;
733 RBI (bb)->next = nb;
734
735 /* Link to new block. */
736 make_edge (NULL, nb, e_fall->dest, 0);
737 redirect_edge_succ (e_fall, nb);
738
739 /* Don't process this new block. */
740 bb = nb;
741
742 /* Fix subsequent reorder block indices to reflect new block. */
743 while ((nb = RBI (nb)->next) != NULL)
744 RBI (nb)->index += 1;
745 }
746
747 /* Put basic_block_info in the new order. */
748 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
749 {
750 bb->index = RBI (bb)->index;
751 BASIC_BLOCK (bb->index) = bb;
752 }
753 }
754
755
756 /* Perform sanity checks on the insn chain.
757 1. Check that next/prev pointers are consistent in both the forward and
758 reverse direction.
759 2. Count insns in chain, going both directions, and check if equal.
760 3. Check that get_last_insn () returns the actual end of chain. */
761
762 void
763 verify_insn_chain ()
764 {
765 rtx x,
766 prevx,
767 nextx;
768 int insn_cnt1,
769 insn_cnt2;
770
771 prevx = NULL;
772 insn_cnt1 = 1;
773 for (x = get_insns (); x; x = NEXT_INSN (x))
774 {
775 if (PREV_INSN (x) != prevx)
776 {
777 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
778 fprintf (stderr, "previous insn:\n");
779 debug_rtx (prevx);
780 fprintf (stderr, "current insn:\n");
781 debug_rtx (x);
782 abort ();
783 }
784 ++insn_cnt1;
785 prevx = x;
786 }
787
788 if (prevx != get_last_insn ())
789 {
790 fprintf (stderr, "last_insn corrupt.\n");
791 abort ();
792 }
793
794 nextx = NULL;
795 insn_cnt2 = 1;
796 for (x = get_last_insn (); x; x = PREV_INSN (x))
797 {
798 if (NEXT_INSN (x) != nextx)
799 {
800 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
801 fprintf (stderr, "current insn:\n");
802 debug_rtx (x);
803 fprintf (stderr, "next insn:\n");
804 debug_rtx (nextx);
805 abort ();
806 }
807 ++insn_cnt2;
808 nextx = x;
809 }
810
811 if (insn_cnt1 != insn_cnt2)
812 {
813 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
814 insn_cnt1, insn_cnt2);
815 abort ();
816 }
817 }
818
819 static rtx
820 get_next_bb_note (x)
821 rtx x;
822 {
823 while (x)
824 {
825 if (NOTE_INSN_BASIC_BLOCK_P (x))
826 return x;
827 x = NEXT_INSN (x);
828 }
829 return NULL;
830 }
831
832
833 static rtx
834 get_prev_bb_note (x)
835 rtx x;
836 {
837 while (x)
838 {
839 if (NOTE_INSN_BASIC_BLOCK_P (x))
840 return x;
841 x = PREV_INSN (x);
842 }
843 return NULL;
844 }
845
846
847 /* Determine and record the relationships between basic blocks and
848 scopes in scope tree S. */
849
850 static void
851 relate_bbs_with_scopes (s)
852 scope s;
853 {
854 scope p;
855 int i, bbi1, bbi2, bbs_spanned;
856 rtx bbnote;
857
858 for (p = s->inner; p; p = p->next)
859 relate_bbs_with_scopes (p);
860
861 bbi1 = bbi2 = -1;
862 bbs_spanned = 0;
863
864 /* If the begin and end notes are both inside the same basic block,
865 or if they are both outside of basic blocks, then we know immediately
866 how they are related. Otherwise, we need to poke around to make the
867 determination. */
868 if (s->bb_beg != s->bb_end)
869 {
870 if (s->bb_beg && s->bb_end)
871 {
872 /* Both notes are in different bbs. This implies that all the
873 basic blocks spanned by the pair of notes are contained in
874 this scope. */
875 bbi1 = s->bb_beg->index;
876 bbi2 = s->bb_end->index;
877 bbs_spanned = 1;
878 }
879 else if (! s->bb_beg)
880 {
881 /* First note is outside of a bb. If the scope spans more than
882 one basic block, then they all are contained within this
883 scope. Otherwise, this scope is contained within the basic
884 block. */
885 bbnote = get_next_bb_note (s->note_beg);
886 if (! bbnote)
887 abort ();
888 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
889 {
890 bbs_spanned = 0;
891 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
892 }
893 else
894 {
895 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
896 bbi2 = s->bb_end->index;
897 s->bb_end = NULL;
898 bbs_spanned = 1;
899 }
900 }
901 else /* ! s->bb_end */
902 {
903 /* Second note is outside of a bb. If the scope spans more than
904 one basic block, then they all are contained within this
905 scope. Otherwise, this scope is contained within the basic
906 block. */
907 bbnote = get_prev_bb_note (s->note_end);
908 if (! bbnote)
909 abort ();
910 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
911 {
912 bbs_spanned = 0;
913 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
914 }
915 else
916 {
917 bbi1 = s->bb_beg->index;
918 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
919 s->bb_beg = NULL;
920 bbs_spanned = 1;
921 }
922 }
923 }
924 else
925 {
926 if (s->bb_beg)
927 /* Both notes are in the same bb, which implies the block
928 contains this scope. */
929 bbs_spanned = 0;
930 else
931 {
932 rtx x1, x2;
933 /* Both notes are outside of any bbs. This implies that all the
934 basic blocks spanned by the pair of notes are contained in
935 this scope.
936 There is a degenerate case to consider. If the notes do not
937 span any basic blocks, then it is an empty scope that can
938 safely be deleted or ignored. Mark these with level = -1. */
939
940 x1 = get_next_bb_note (s->note_beg);
941 x2 = get_prev_bb_note (s->note_end);
942 if (! (x1 && x2))
943 {
944 s->level = -1;
945 bbs_spanned = 0;
946 }
947 else
948 {
949 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
950 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
951 bbs_spanned = 1;
952 }
953 }
954 }
955
956 /* If the scope spans one or more basic blocks, we record them. We
957 only record the bbs that are immediately contained within this
958 scope. Note that if a scope is contained within a bb, we can tell
959 by checking that bb_beg = bb_end and that they are non-null. */
960 if (bbs_spanned)
961 {
962 int j = 0;
963
964 s->num_bbs = 0;
965 for (i = bbi1; i <= bbi2; i++)
966 if (! RBI (BASIC_BLOCK (i))->scope)
967 s->num_bbs++;
968
969 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
970 for (i = bbi1; i <= bbi2; i++)
971 {
972 basic_block curr_bb = BASIC_BLOCK (i);
973 if (! RBI (curr_bb)->scope)
974 {
975 s->bbs[j++] = curr_bb;
976 RBI (curr_bb)->scope = s;
977 }
978 }
979 }
980 else
981 s->num_bbs = 0;
982 }
983
984
985 /* Allocate and initialize a new scope structure with scope level LEVEL,
986 and record the NOTE beginning the scope. */
987
988 static scope
989 make_new_scope (level, note)
990 int level;
991 rtx note;
992 {
993 scope new_scope = xcalloc (1, sizeof (struct scope_def));
994 new_scope->level = level;
995 new_scope->note_beg = note;
996 return new_scope;
997 }
998
999
1000 /* Build a forest representing the scope structure of the function.
1001 Return a pointer to a structure describing the forest. */
1002
1003 static void
1004 build_scope_forest (forest)
1005 scope_forest_info *forest;
1006 {
1007 rtx x;
1008 int level, bbi, i;
1009 basic_block curr_bb;
1010 scope root, curr_scope = 0;
1011
1012 forest->num_trees = 0;
1013 forest->trees = NULL;
1014 level = -1;
1015 root = NULL;
1016 curr_bb = NULL;
1017 bbi = 0;
1018 for (x = get_insns (); x; x = NEXT_INSN (x))
1019 {
1020 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1021 curr_bb = BASIC_BLOCK (bbi);
1022
1023 if (GET_CODE (x) == NOTE)
1024 {
1025 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1026 {
1027 if (root)
1028 {
1029 scope new_scope;
1030 if (! curr_scope)
1031 abort();
1032 level++;
1033 new_scope = make_new_scope (level, x);
1034 new_scope->outer = curr_scope;
1035 new_scope->next = NULL;
1036 if (! curr_scope->inner)
1037 {
1038 curr_scope->inner = new_scope;
1039 curr_scope->inner_last = new_scope;
1040 }
1041 else
1042 {
1043 curr_scope->inner_last->next = new_scope;
1044 curr_scope->inner_last = new_scope;
1045 }
1046 curr_scope = curr_scope->inner_last;
1047 }
1048 else
1049 {
1050 int ntrees = forest->num_trees;
1051 level++;
1052 curr_scope = make_new_scope (level, x);
1053 root = curr_scope;
1054 forest->trees = xrealloc (forest->trees,
1055 sizeof (scope) * (ntrees + 1));
1056 forest->trees[forest->num_trees++] = root;
1057 }
1058 curr_scope->bb_beg = curr_bb;
1059 }
1060 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1061 {
1062 curr_scope->bb_end = curr_bb;
1063 curr_scope->note_end = x;
1064 level--;
1065 curr_scope = curr_scope->outer;
1066 if (level == -1)
1067 root = NULL;
1068 }
1069 } /* if note */
1070
1071 if (curr_bb && curr_bb->end == x)
1072 {
1073 curr_bb = NULL;
1074 bbi++;
1075 }
1076
1077 } /* for */
1078
1079 for (i = 0; i < forest->num_trees; i++)
1080 relate_bbs_with_scopes (forest->trees[i]);
1081 }
1082
1083
1084 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1085 the insn chain. */
1086
1087 static void
1088 remove_scope_notes ()
1089 {
1090 rtx x, next;
1091 basic_block currbb = NULL;
1092
1093 for (x = get_insns (); x; x = next)
1094 {
1095 next = NEXT_INSN (x);
1096 if (NOTE_INSN_BASIC_BLOCK_P (x))
1097 currbb = NOTE_BASIC_BLOCK (x);
1098
1099 if (GET_CODE (x) == NOTE
1100 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1101 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1102 {
1103 /* Check if the scope note happens to be the end of a bb. */
1104 if (currbb && x == currbb->end)
1105 currbb->end = PREV_INSN (x);
1106 if (currbb && x == currbb->head)
1107 abort ();
1108
1109 if (PREV_INSN (x))
1110 {
1111 NEXT_INSN (PREV_INSN (x)) = next;
1112 PREV_INSN (next) = PREV_INSN (x);
1113
1114 NEXT_INSN (x) = NULL;
1115 PREV_INSN (x) = NULL;
1116 }
1117 else
1118 abort ();
1119 }
1120 }
1121 }
1122
1123
1124 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1125
1126 static void
1127 insert_intra_1 (s, ip)
1128 scope s;
1129 rtx *ip;
1130 {
1131 scope p;
1132
1133 if (NOTE_BLOCK (s->note_beg))
1134 {
1135 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1136 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1137 }
1138
1139 for (p = s->inner; p; p = p->next)
1140 insert_intra_1 (p, ip);
1141
1142 if (NOTE_BLOCK (s->note_beg))
1143 {
1144 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1145 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1146 }
1147 }
1148
1149
1150 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1151 scopes that are contained within BB. */
1152
1153 static void
1154 insert_intra_bb_scope_notes (bb)
1155 basic_block bb;
1156 {
1157 scope s = RBI (bb)->scope;
1158 scope p;
1159 rtx ip;
1160
1161 if (! s)
1162 return;
1163
1164 ip = bb->head;
1165 if (GET_CODE (ip) == CODE_LABEL)
1166 ip = NEXT_INSN (ip);
1167
1168 for (p = s->inner; p; p = p->next)
1169 {
1170 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1171 insert_intra_1 (p, &ip);
1172 }
1173 }
1174
1175
1176 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1177 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1178 notes before BB2 such that the notes are correctly balanced. If BB1 or
1179 BB2 is NULL, we are inserting scope notes for the first and last basic
1180 blocks, respectively. */
1181
1182 static void
1183 insert_inter_bb_scope_notes (bb1, bb2)
1184 basic_block bb1;
1185 basic_block bb2;
1186 {
1187 rtx ip;
1188 scope com;
1189
1190 /* It is possible that a basic block is not contained in any scope.
1191 In that case, we either open or close a scope but not both. */
1192 if (bb1 && bb2)
1193 {
1194 scope s1 = RBI (bb1)->scope;
1195 scope s2 = RBI (bb2)->scope;
1196 if (! s1 && ! s2)
1197 return;
1198 if (! s1)
1199 bb1 = NULL;
1200 else if (! s2)
1201 bb2 = NULL;
1202 }
1203
1204 /* Find common ancestor scope. */
1205 if (bb1 && bb2)
1206 {
1207 scope s1 = RBI (bb1)->scope;
1208 scope s2 = RBI (bb2)->scope;
1209 while (s1 != s2)
1210 {
1211 if (! (s1 && s2))
1212 abort ();
1213 if (s1->level > s2->level)
1214 s1 = s1->outer;
1215 else if (s2->level > s1->level)
1216 s2 = s2->outer;
1217 else
1218 {
1219 s1 = s1->outer;
1220 s2 = s2->outer;
1221 }
1222 }
1223 com = s1;
1224 }
1225 else
1226 com = NULL;
1227
1228 /* Close scopes. */
1229 if (bb1)
1230 {
1231 scope s = RBI (bb1)->scope;
1232 ip = RBI (bb1)->eff_end;
1233 while (s != com)
1234 {
1235 if (NOTE_BLOCK (s->note_beg))
1236 {
1237 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1238 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1239 }
1240 s = s->outer;
1241 }
1242 }
1243
1244 /* Open scopes. */
1245 if (bb2)
1246 {
1247 scope s = RBI (bb2)->scope;
1248 ip = bb2->head;
1249 while (s != com)
1250 {
1251 if (NOTE_BLOCK (s->note_beg))
1252 {
1253 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1254 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1255 }
1256 s = s->outer;
1257 }
1258 }
1259 }
1260
1261
1262 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1263 on the scope forest and the newly reordered basic blocks. */
1264
1265 static void
1266 rebuild_scope_notes (forest)
1267 scope_forest_info *forest;
1268 {
1269 int i;
1270
1271 if (forest->num_trees == 0)
1272 return;
1273
1274 /* Start by opening the scopes before the first basic block. */
1275 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1276
1277 /* Then, open and close scopes as needed between blocks. */
1278 for (i = 0; i < n_basic_blocks - 1; i++)
1279 {
1280 basic_block bb1 = BASIC_BLOCK (i);
1281 basic_block bb2 = BASIC_BLOCK (i + 1);
1282 if (RBI (bb1)->scope != RBI (bb2)->scope)
1283 insert_inter_bb_scope_notes (bb1, bb2);
1284 insert_intra_bb_scope_notes (bb1);
1285 }
1286
1287 /* Finally, close the scopes after the last basic block. */
1288 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1289 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1290 }
1291
1292
1293 /* Free the storage associated with the scope tree at S. */
1294
1295 static void
1296 free_scope_forest_1 (s)
1297 scope s;
1298 {
1299 scope p, next;
1300
1301 for (p = s->inner; p; p = next)
1302 {
1303 next = p->next;
1304 free_scope_forest_1 (p);
1305 }
1306
1307 if (s->bbs)
1308 free (s->bbs);
1309 free (s);
1310 }
1311
1312
1313 /* Free the storage associated with the scope forest. */
1314
1315 static void
1316 free_scope_forest (forest)
1317 scope_forest_info *forest;
1318 {
1319 int i;
1320 for (i = 0; i < forest->num_trees; i++)
1321 free_scope_forest_1 (forest->trees[i]);
1322 }
1323
1324
1325 /* Visualize the scope forest. */
1326
1327 void
1328 dump_scope_forest (forest)
1329 scope_forest_info *forest;
1330 {
1331 if (forest->num_trees == 0)
1332 fprintf (stderr, "\n< Empty scope forest >\n");
1333 else
1334 {
1335 int i;
1336 fprintf (stderr, "\n< Scope forest >\n");
1337 for (i = 0; i < forest->num_trees; i++)
1338 dump_scope_forest_1 (forest->trees[i], 0);
1339 }
1340 }
1341
1342
1343 /* Recursive portion of dump_scope_forest. */
1344
1345 static void
1346 dump_scope_forest_1 (s, indent)
1347 scope s;
1348 int indent;
1349 {
1350 scope p;
1351 int i;
1352
1353 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1354 && RBI (s->bb_beg)->scope
1355 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1356 {
1357 fprintf (stderr, "%*s", indent, "");
1358 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1359 }
1360
1361 fprintf (stderr, "%*s", indent, "");
1362 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1363 (PTR) NOTE_BLOCK (s->note_beg));
1364
1365 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1366 for (i = 0; i < s->num_bbs; i++)
1367 fprintf (stderr, " %d", s->bbs[i]->index);
1368 fprintf (stderr, "\n");
1369
1370 for (p = s->inner; p; p = p->next)
1371 dump_scope_forest_1 (p, indent + 2);
1372
1373 fprintf (stderr, "%*s", indent, "");
1374 fprintf (stderr, "}\n");
1375 }
1376
1377
1378 /* Reorder basic blocks. The main entry point to this file. */
1379
1380 void
1381 reorder_basic_blocks ()
1382 {
1383 scope_forest_info forest;
1384 int i;
1385
1386 if (n_basic_blocks <= 1)
1387 return;
1388
1389 for (i = 0; i < n_basic_blocks; i++)
1390 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1391
1392 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1393
1394 build_scope_forest (&forest);
1395 remove_scope_notes ();
1396
1397 record_effective_endpoints ();
1398 make_reorder_chain ();
1399 fixup_reorder_chain ();
1400
1401 #ifdef ENABLE_CHECKING
1402 verify_insn_chain ();
1403 #endif
1404
1405 rebuild_scope_notes (&forest);
1406 free_scope_forest (&forest);
1407 reorder_blocks ();
1408
1409 for (i = 0; i < n_basic_blocks; i++)
1410 free (BASIC_BLOCK (i)->aux);
1411
1412 free (EXIT_BLOCK_PTR->aux);
1413
1414 #ifdef ENABLE_CHECKING
1415 verify_flow_info ();
1416 #endif
1417 }