bb-reorder.c (skip_insn_after_block): Get past the line number notes.
[gcc.git] / gcc / bb-reorder.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* References:
22
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
25
26 TODO:
27
28 (1) Consider:
29
30 if (p) goto A; // predict taken
31 foo ();
32 A:
33 if (q) goto B; // predict taken
34 bar ();
35 B:
36 baz ();
37 return;
38
39 We'll currently reorder this as
40
41 if (!p) goto C;
42 A:
43 if (!q) goto D;
44 B:
45 baz ();
46 return;
47 D:
48 bar ();
49 goto B;
50 C:
51 foo ();
52 goto A;
53
54 A better ordering is
55
56 if (!p) goto C;
57 if (!q) goto D;
58 B:
59 baz ();
60 return;
61 C:
62 foo ();
63 if (q) goto B;
64 D:
65 bar ();
66 goto B;
67
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
71
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
74
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
78
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
81 */
82
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "toplev.h"
96 #include "recog.h"
97 #include "expr.h"
98 #include "obstack.h"
99
100
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
103 #endif
104
105
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
110
111 extern struct obstack flow_obstack;
112
113
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
116 {
117 int level;
118
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
120 rtx note_beg;
121
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
123 rtx note_end;
124
125 /* The bb containing note_beg (if any). */
126 basic_block bb_beg;
127
128 /* The bb containing note_end (if any). */
129 basic_block bb_end;
130
131 /* List of basic blocks contained within this scope. */
132 basic_block *bbs;
133
134 /* Number of blocks contained within this scope. */
135 int num_bbs;
136
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
139
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
142
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
145
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
148 } *scope;
149
150
151 /* Structure to hold information about the scope forest. */
152 typedef struct
153 {
154 /* Number of trees in forest. */
155 int num_trees;
156
157 /* List of tree roots. */
158 scope *trees;
159 } scope_forest_info;
160
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
163 {
164 rtx eff_head;
165 rtx eff_end;
166 scope scope;
167 basic_block next;
168 int index;
169 int visited;
170 } *reorder_block_def;
171
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
173
174
175 /* Local function prototypes. */
176 static rtx skip_insns_after_block PARAMS ((basic_block));
177 static void record_effective_endpoints PARAMS ((void));
178 static void make_reorder_chain PARAMS ((void));
179 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
180 static rtx label_for_bb PARAMS ((basic_block));
181 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
182 static void fixup_reorder_chain PARAMS ((void));
183 static void relate_bbs_with_scopes PARAMS ((scope));
184 static scope make_new_scope PARAMS ((int, rtx));
185 static void build_scope_forest PARAMS ((scope_forest_info *));
186 static void remove_scope_notes PARAMS ((void));
187 static void insert_intra_1 PARAMS ((scope, rtx *));
188 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
189 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
190 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
191 static void free_scope_forest_1 PARAMS ((scope));
192 static void free_scope_forest PARAMS ((scope_forest_info *));
193 void dump_scope_forest PARAMS ((scope_forest_info *));
194 static void dump_scope_forest_1 PARAMS ((scope, int));
195 static rtx get_next_bb_note PARAMS ((rtx));
196 static rtx get_prev_bb_note PARAMS ((rtx));
197
198 void verify_insn_chain PARAMS ((void));
199 \f
200 /* Skip over inter-block insns occurring after BB which are typically
201 associated with BB (e.g., barriers). If there are any such insns,
202 we return the last one. Otherwise, we return the end of BB. */
203
204 static rtx
205 skip_insns_after_block (bb)
206 basic_block bb;
207 {
208 rtx insn, last_insn, next_head;
209
210 next_head = NULL_RTX;
211 if (bb->index + 1 != n_basic_blocks)
212 next_head = BASIC_BLOCK (bb->index + 1)->head;
213
214 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
215 {
216 if (insn == next_head)
217 break;
218
219 switch (GET_CODE (insn))
220 {
221 case BARRIER:
222 last_insn = insn;
223 continue;
224
225 case NOTE:
226 switch (NOTE_LINE_NUMBER (insn))
227 {
228 case NOTE_INSN_LOOP_END:
229 case NOTE_INSN_BLOCK_END:
230 last_insn = insn;
231 continue;
232 case NOTE_INSN_DELETED:
233 case NOTE_INSN_DELETED_LABEL:
234 continue;
235
236 default:
237 /* Make line notes attached to the succesor block unless they
238 are followed by something attached to predecesor block.
239 These notes remained after removing code in the predecesor
240 block and thus should be kept together. */
241 if (NOTE_LINE_NUMBER (insn) >= 0)
242 continue;
243 break;
244 }
245 break;
246
247 case CODE_LABEL:
248 if (NEXT_INSN (insn)
249 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
250 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
251 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
252 {
253 insn = NEXT_INSN (insn);
254 last_insn = insn;
255 continue;
256 }
257 break;
258
259 default:
260 break;
261 }
262
263 break;
264 }
265
266 return last_insn;
267 }
268
269
270 /* Locate the effective beginning and end of the insn chain for each
271 block, as defined by skip_insns_after_block above. */
272
273 static void
274 record_effective_endpoints ()
275 {
276 rtx next_insn = get_insns ();
277 int i;
278
279 for (i = 0; i < n_basic_blocks; ++i)
280 {
281 basic_block bb = BASIC_BLOCK (i);
282 rtx end;
283
284 RBI (bb)->eff_head = next_insn;
285 end = skip_insns_after_block (bb);
286 RBI (bb)->eff_end = end;
287 next_insn = NEXT_INSN (end);
288 }
289 }
290
291
292 /* Compute an ordering for a subgraph beginning with block BB. Record the
293 ordering in RBI()->index and chained through RBI()->next. */
294
295 static void
296 make_reorder_chain ()
297 {
298 basic_block last_block = NULL;
299 basic_block prev = NULL;
300 int nbb_m1 = n_basic_blocks - 1;
301
302 /* If we've not got epilogue in RTL, we must fallthru to the exit.
303 Force the last block to be at the end. */
304 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
305 end of the function for stack unwinding purposes. */
306 if (! HAVE_epilogue)
307 {
308 last_block = BASIC_BLOCK (nbb_m1);
309 RBI (last_block)->visited = 1;
310 nbb_m1 -= 1;
311 }
312
313 /* Loop until we've placed every block. */
314 do
315 {
316 int i;
317 basic_block next = NULL;
318
319 /* Find the next unplaced block. */
320 /* ??? Get rid of this loop, and track which blocks are not yet
321 placed more directly, so as to avoid the O(N^2) worst case.
322 Perhaps keep a doubly-linked list of all to-be-placed blocks;
323 remove from the list as we place. The head of that list is
324 what we're looking for here. */
325
326 for (i = 0; i <= nbb_m1; ++i)
327 {
328 basic_block bb = BASIC_BLOCK (i);
329 if (! RBI (bb)->visited)
330 {
331 next = bb;
332 break;
333 }
334 }
335 if (! next)
336 abort ();
337
338 prev = make_reorder_chain_1 (next, prev);
339 }
340 while (RBI (prev)->index < nbb_m1);
341
342 /* Terminate the chain. */
343 if (! HAVE_epilogue)
344 {
345 RBI (prev)->next = last_block;
346 RBI (last_block)->index = RBI (prev)->index + 1;
347 prev = last_block;
348 }
349 RBI (prev)->next = NULL;
350 }
351
352 /* A helper function for make_reorder_chain.
353
354 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
355 These are assumed to be the error condition and we wish to cluster
356 all of them at the very end of the function for the benefit of cache
357 locality for the rest of the function.
358
359 ??? We could do slightly better by noticing earlier that some subgraph
360 has all paths leading to noreturn functions, but for there to be more
361 than one block in such a subgraph is rare. */
362
363 static basic_block
364 make_reorder_chain_1 (bb, prev)
365 basic_block bb;
366 basic_block prev;
367 {
368 edge e;
369 basic_block next;
370 rtx note;
371
372 /* Mark this block visited. */
373 if (prev)
374 {
375 int new_index;
376
377 restart:
378 RBI (prev)->next = bb;
379 new_index = RBI (prev)->index + 1;
380 RBI (bb)->index = new_index;
381
382 if (rtl_dump_file && prev->index + 1 != bb->index)
383 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
384 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
385 }
386 else
387 RBI (bb)->index = 0;
388 RBI (bb)->visited = 1;
389 prev = bb;
390
391 if (bb->succ == NULL)
392 return prev;
393
394 /* Find the most probable block. */
395
396 next = NULL;
397 if (any_condjump_p (bb->end)
398 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
399 {
400 int taken, probability;
401 edge e_taken, e_fall;
402
403 probability = INTVAL (XEXP (note, 0));
404 taken = probability > REG_BR_PROB_BASE / 2;
405
406 /* Find the normal taken edge and the normal fallthru edge.
407
408 Note, conditional jumps with other side effects may not
409 be fully optimized. In this case it is possible for
410 the conditional jump to branch to the same location as
411 the fallthru path.
412
413 We should probably work to improve optimization of that
414 case; however, it seems silly not to also deal with such
415 problems here if they happen to occur. */
416
417 e_taken = e_fall = NULL;
418 for (e = bb->succ; e ; e = e->succ_next)
419 {
420 if (e->flags & EDGE_FALLTHRU)
421 e_fall = e;
422 else if (! (e->flags & EDGE_EH))
423 e_taken = e;
424 }
425
426 next = (taken ? e_taken : e_fall)->dest;
427 }
428
429 /* In the absence of a prediction, disturb things as little as possible
430 by selecting the old "next" block from the list of successors. If
431 there had been a fallthru edge, that will be the one. */
432 if (! next)
433 {
434 for (e = bb->succ; e ; e = e->succ_next)
435 if (e->dest->index == bb->index + 1)
436 {
437 if ((e->flags & EDGE_FALLTHRU)
438 || (e->dest->succ
439 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
440 next = e->dest;
441 break;
442 }
443 }
444
445 /* Make sure we didn't select a silly next block. */
446 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
447 next = NULL;
448
449 /* Recurse on the successors. Unroll the last call, as the normal
450 case is exactly one or two edges, and we can tail recurse. */
451 for (e = bb->succ; e; e = e->succ_next)
452 if (e->dest != EXIT_BLOCK_PTR
453 && ! RBI (e->dest)->visited
454 && e->dest->succ
455 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
456 {
457 if (next)
458 {
459 prev = make_reorder_chain_1 (next, prev);
460 next = RBI (e->dest)->visited ? NULL : e->dest;
461 }
462 else
463 next = e->dest;
464 }
465 if (next)
466 {
467 bb = next;
468 goto restart;
469 }
470
471 return prev;
472 }
473
474
475 /* Locate or create a label for a given basic block. */
476
477 static rtx
478 label_for_bb (bb)
479 basic_block bb;
480 {
481 rtx label = bb->head;
482
483 if (GET_CODE (label) != CODE_LABEL)
484 {
485 if (rtl_dump_file)
486 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
487 bb->index, RBI (bb)->index);
488
489 label = emit_label_before (gen_label_rtx (), label);
490 if (bb->head == RBI (bb)->eff_head)
491 RBI (bb)->eff_head = label;
492 bb->head = label;
493 }
494
495 return label;
496 }
497
498
499 /* Emit a jump to BB after insn AFTER. */
500
501 static rtx
502 emit_jump_to_block_after (bb, after)
503 basic_block bb;
504 rtx after;
505 {
506 rtx jump;
507
508 if (bb != EXIT_BLOCK_PTR)
509 {
510 rtx label = label_for_bb (bb);
511 jump = emit_jump_insn_after (gen_jump (label), after);
512 JUMP_LABEL (jump) = label;
513 LABEL_NUSES (label) += 1;
514 if (basic_block_for_insn)
515 set_block_for_new_insns (jump, bb);
516
517 if (rtl_dump_file)
518 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
519 bb->index, RBI (bb)->index);
520 }
521 else
522 {
523 #ifdef HAVE_return
524 if (! HAVE_return)
525 abort ();
526 jump = emit_jump_insn_after (gen_return (), after);
527
528 if (rtl_dump_file)
529 fprintf (rtl_dump_file, "Emitting return\n");
530 #else
531 abort ();
532 #endif
533 }
534
535 return jump;
536 }
537
538
539 /* Given a reorder chain, rearrange the code to match. */
540
541 static void
542 fixup_reorder_chain ()
543 {
544 basic_block bb, last_bb;
545
546 /* First do the bulk reordering -- rechain the blocks without regard to
547 the needed changes to jumps and labels. */
548
549 last_bb = BASIC_BLOCK (0);
550 bb = RBI (last_bb)->next;
551 while (bb)
552 {
553 rtx last_e = RBI (last_bb)->eff_end;
554 rtx curr_h = RBI (bb)->eff_head;
555
556 NEXT_INSN (last_e) = curr_h;
557 PREV_INSN (curr_h) = last_e;
558
559 last_bb = bb;
560 bb = RBI (bb)->next;
561 }
562 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
563 set_last_insn (RBI (last_bb)->eff_end);
564
565 /* Now add jumps and labels as needed to match the blocks new
566 outgoing edges. */
567
568 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
569 {
570 edge e_fall, e_taken, e;
571 rtx jump_insn, barrier_insn, bb_end_insn;
572 basic_block nb;
573
574 if (bb->succ == NULL)
575 continue;
576
577 /* Find the old fallthru edge, and another non-EH edge for
578 a taken jump. */
579 e_taken = e_fall = NULL;
580 for (e = bb->succ; e ; e = e->succ_next)
581 if (e->flags & EDGE_FALLTHRU)
582 e_fall = e;
583 else if (! (e->flags & EDGE_EH))
584 e_taken = e;
585
586 bb_end_insn = bb->end;
587 if (GET_CODE (bb_end_insn) == JUMP_INSN)
588 {
589 if (any_uncondjump_p (bb_end_insn))
590 {
591 /* If the destination is still not next, nothing to do. */
592 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
593 continue;
594
595 /* Otherwise, we can remove the jump and cleanup the edge. */
596 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
597 RBI (bb)->eff_end = skip_insns_after_block (bb);
598 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
599
600 if (rtl_dump_file)
601 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
602 bb->index, RBI (bb)->index);
603 continue;
604 }
605 else if (any_condjump_p (bb_end_insn))
606 {
607 /* If the old fallthru is still next, nothing to do. */
608 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
609 || (RBI (bb)->index == n_basic_blocks - 1
610 && e_fall->dest == EXIT_BLOCK_PTR))
611 continue;
612
613 /* There is one special case: if *neither* block is next,
614 such as happens at the very end of a function, then we'll
615 need to add a new unconditional jump. Choose the taken
616 edge based on known or assumed probability. */
617 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
618 {
619 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
620 if (note
621 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
622 && invert_jump (bb_end_insn,
623 label_for_bb (e_fall->dest), 0))
624 {
625 e_fall->flags &= ~EDGE_FALLTHRU;
626 e_taken->flags |= EDGE_FALLTHRU;
627 e = e_fall, e_fall = e_taken, e_taken = e;
628 }
629 }
630
631 /* Otherwise we can try to invert the jump. This will
632 basically never fail, however, keep up the pretense. */
633 else if (invert_jump (bb_end_insn,
634 label_for_bb (e_fall->dest), 0))
635 {
636 e_fall->flags &= ~EDGE_FALLTHRU;
637 e_taken->flags |= EDGE_FALLTHRU;
638 continue;
639 }
640 }
641 else if (returnjump_p (bb_end_insn))
642 continue;
643 else
644 {
645 /* Otherwise we have some switch or computed jump. In the
646 99% case, there should not have been a fallthru edge. */
647 if (! e_fall)
648 continue;
649 #ifdef CASE_DROPS_THROUGH
650 /* Except for VAX. Since we didn't have predication for the
651 tablejump, the fallthru block should not have moved. */
652 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
653 continue;
654 bb_end_insn = skip_insns_after_block (bb);
655 #else
656 abort ();
657 #endif
658 }
659 }
660 else
661 {
662 /* No fallthru implies a noreturn function with EH edges, or
663 something similarly bizarre. In any case, we don't need to
664 do anything. */
665 if (! e_fall)
666 continue;
667
668 /* If the fallthru block is still next, nothing to do. */
669 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
670 || (RBI (bb)->index == n_basic_blocks - 1
671 && e_fall->dest == EXIT_BLOCK_PTR))
672 continue;
673
674 /* We need a new jump insn. If the block has only one outgoing
675 edge, then we can stuff the new jump insn in directly. */
676 if (bb->succ->succ_next == NULL)
677 {
678 e_fall->flags &= ~EDGE_FALLTHRU;
679
680 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
681 bb->end = jump_insn;
682 barrier_insn = emit_barrier_after (jump_insn);
683 RBI (bb)->eff_end = barrier_insn;
684 continue;
685 }
686 }
687
688 /* We got here if we need to add a new jump insn in a new block
689 across the edge e_fall. */
690
691 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
692 barrier_insn = emit_barrier_after (jump_insn);
693
694 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
695 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
696
697 nb = BASIC_BLOCK (n_basic_blocks - 1);
698 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
699 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
700 nb->local_set = 0;
701
702 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
703 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
704
705 nb->aux = xmalloc (sizeof (struct reorder_block_def));
706 RBI (nb)->eff_head = nb->head;
707 RBI (nb)->eff_end = barrier_insn;
708 RBI (nb)->scope = RBI (bb)->scope;
709 RBI (nb)->index = RBI (bb)->index + 1;
710 RBI (nb)->visited = 1;
711 RBI (nb)->next = RBI (bb)->next;
712 RBI (bb)->next = nb;
713
714 /* Link to new block. */
715 make_edge (NULL, nb, e_fall->dest, 0);
716 redirect_edge_succ (e_fall, nb);
717
718 /* Don't process this new block. */
719 bb = nb;
720
721 /* Fix subsequent reorder block indices to reflect new block. */
722 while ((nb = RBI (nb)->next) != NULL)
723 RBI (nb)->index += 1;
724 }
725
726 /* Put basic_block_info in the new order. */
727 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
728 {
729 bb->index = RBI (bb)->index;
730 BASIC_BLOCK (bb->index) = bb;
731 }
732 }
733
734
735 /* Perform sanity checks on the insn chain.
736 1. Check that next/prev pointers are consistent in both the forward and
737 reverse direction.
738 2. Count insns in chain, going both directions, and check if equal.
739 3. Check that get_last_insn () returns the actual end of chain. */
740
741 void
742 verify_insn_chain ()
743 {
744 rtx x,
745 prevx,
746 nextx;
747 int insn_cnt1,
748 insn_cnt2;
749
750 prevx = NULL;
751 insn_cnt1 = 1;
752 for (x = get_insns (); x; x = NEXT_INSN (x))
753 {
754 if (PREV_INSN (x) != prevx)
755 {
756 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
757 fprintf (stderr, "previous insn:\n");
758 debug_rtx (prevx);
759 fprintf (stderr, "current insn:\n");
760 debug_rtx (x);
761 abort ();
762 }
763 ++insn_cnt1;
764 prevx = x;
765 }
766
767 if (prevx != get_last_insn ())
768 {
769 fprintf (stderr, "last_insn corrupt.\n");
770 abort ();
771 }
772
773 nextx = NULL;
774 insn_cnt2 = 1;
775 for (x = get_last_insn (); x; x = PREV_INSN (x))
776 {
777 if (NEXT_INSN (x) != nextx)
778 {
779 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
780 fprintf (stderr, "current insn:\n");
781 debug_rtx (x);
782 fprintf (stderr, "next insn:\n");
783 debug_rtx (nextx);
784 abort ();
785 }
786 ++insn_cnt2;
787 nextx = x;
788 }
789
790 if (insn_cnt1 != insn_cnt2)
791 {
792 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
793 insn_cnt1, insn_cnt2);
794 abort ();
795 }
796 }
797
798 static rtx
799 get_next_bb_note (x)
800 rtx x;
801 {
802 while (x)
803 {
804 if (NOTE_INSN_BASIC_BLOCK_P (x))
805 return x;
806 x = NEXT_INSN (x);
807 }
808 return NULL;
809 }
810
811
812 static rtx
813 get_prev_bb_note (x)
814 rtx x;
815 {
816 while (x)
817 {
818 if (NOTE_INSN_BASIC_BLOCK_P (x))
819 return x;
820 x = PREV_INSN (x);
821 }
822 return NULL;
823 }
824
825
826 /* Determine and record the relationships between basic blocks and
827 scopes in scope tree S. */
828
829 static void
830 relate_bbs_with_scopes (s)
831 scope s;
832 {
833 scope p;
834 int i, bbi1, bbi2, bbs_spanned;
835 rtx bbnote;
836
837 for (p = s->inner; p; p = p->next)
838 relate_bbs_with_scopes (p);
839
840 bbi1 = bbi2 = -1;
841 bbs_spanned = 0;
842
843 /* If the begin and end notes are both inside the same basic block,
844 or if they are both outside of basic blocks, then we know immediately
845 how they are related. Otherwise, we need to poke around to make the
846 determination. */
847 if (s->bb_beg != s->bb_end)
848 {
849 if (s->bb_beg && s->bb_end)
850 {
851 /* Both notes are in different bbs. This implies that all the
852 basic blocks spanned by the pair of notes are contained in
853 this scope. */
854 bbi1 = s->bb_beg->index;
855 bbi2 = s->bb_end->index;
856 bbs_spanned = 1;
857 }
858 else if (! s->bb_beg)
859 {
860 /* First note is outside of a bb. If the scope spans more than
861 one basic block, then they all are contained within this
862 scope. Otherwise, this scope is contained within the basic
863 block. */
864 bbnote = get_next_bb_note (s->note_beg);
865 if (! bbnote)
866 abort ();
867 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
868 {
869 bbs_spanned = 0;
870 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
871 }
872 else
873 {
874 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
875 bbi2 = s->bb_end->index;
876 s->bb_end = NULL;
877 bbs_spanned = 1;
878 }
879 }
880 else /* ! s->bb_end */
881 {
882 /* Second note is outside of a bb. If the scope spans more than
883 one basic block, then they all are contained within this
884 scope. Otherwise, this scope is contained within the basic
885 block. */
886 bbnote = get_prev_bb_note (s->note_end);
887 if (! bbnote)
888 abort ();
889 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
890 {
891 bbs_spanned = 0;
892 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
893 }
894 else
895 {
896 bbi1 = s->bb_beg->index;
897 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
898 s->bb_beg = NULL;
899 bbs_spanned = 1;
900 }
901 }
902 }
903 else
904 {
905 if (s->bb_beg)
906 /* Both notes are in the same bb, which implies the block
907 contains this scope. */
908 bbs_spanned = 0;
909 else
910 {
911 rtx x1, x2;
912 /* Both notes are outside of any bbs. This implies that all the
913 basic blocks spanned by the pair of notes are contained in
914 this scope.
915 There is a degenerate case to consider. If the notes do not
916 span any basic blocks, then it is an empty scope that can
917 safely be deleted or ignored. Mark these with level = -1. */
918
919 x1 = get_next_bb_note (s->note_beg);
920 x2 = get_prev_bb_note (s->note_end);
921 if (! (x1 && x2))
922 {
923 s->level = -1;
924 bbs_spanned = 0;
925 }
926 else
927 {
928 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
929 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
930 bbs_spanned = 1;
931 }
932 }
933 }
934
935 /* If the scope spans one or more basic blocks, we record them. We
936 only record the bbs that are immediately contained within this
937 scope. Note that if a scope is contained within a bb, we can tell
938 by checking that bb_beg = bb_end and that they are non-null. */
939 if (bbs_spanned)
940 {
941 int j = 0;
942
943 s->num_bbs = 0;
944 for (i = bbi1; i <= bbi2; i++)
945 if (! RBI (BASIC_BLOCK (i))->scope)
946 s->num_bbs++;
947
948 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
949 for (i = bbi1; i <= bbi2; i++)
950 {
951 basic_block curr_bb = BASIC_BLOCK (i);
952 if (! RBI (curr_bb)->scope)
953 {
954 s->bbs[j++] = curr_bb;
955 RBI (curr_bb)->scope = s;
956 }
957 }
958 }
959 else
960 s->num_bbs = 0;
961 }
962
963
964 /* Allocate and initialize a new scope structure with scope level LEVEL,
965 and record the NOTE beginning the scope. */
966
967 static scope
968 make_new_scope (level, note)
969 int level;
970 rtx note;
971 {
972 scope new_scope = xcalloc (1, sizeof (struct scope_def));
973 new_scope->level = level;
974 new_scope->note_beg = note;
975 return new_scope;
976 }
977
978
979 /* Build a forest representing the scope structure of the function.
980 Return a pointer to a structure describing the forest. */
981
982 static void
983 build_scope_forest (forest)
984 scope_forest_info *forest;
985 {
986 rtx x;
987 int level, bbi, i;
988 basic_block curr_bb;
989 scope root, curr_scope = 0;
990
991 forest->num_trees = 0;
992 forest->trees = NULL;
993 level = -1;
994 root = NULL;
995 curr_bb = NULL;
996 bbi = 0;
997 for (x = get_insns (); x; x = NEXT_INSN (x))
998 {
999 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1000 curr_bb = BASIC_BLOCK (bbi);
1001
1002 if (GET_CODE (x) == NOTE)
1003 {
1004 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1005 {
1006 if (root)
1007 {
1008 scope new_scope;
1009 if (! curr_scope)
1010 abort();
1011 level++;
1012 new_scope = make_new_scope (level, x);
1013 new_scope->outer = curr_scope;
1014 new_scope->next = NULL;
1015 if (! curr_scope->inner)
1016 {
1017 curr_scope->inner = new_scope;
1018 curr_scope->inner_last = new_scope;
1019 }
1020 else
1021 {
1022 curr_scope->inner_last->next = new_scope;
1023 curr_scope->inner_last = new_scope;
1024 }
1025 curr_scope = curr_scope->inner_last;
1026 }
1027 else
1028 {
1029 int ntrees = forest->num_trees;
1030 level++;
1031 curr_scope = make_new_scope (level, x);
1032 root = curr_scope;
1033 forest->trees = xrealloc (forest->trees,
1034 sizeof (scope) * (ntrees + 1));
1035 forest->trees[forest->num_trees++] = root;
1036 }
1037 curr_scope->bb_beg = curr_bb;
1038 }
1039 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1040 {
1041 curr_scope->bb_end = curr_bb;
1042 curr_scope->note_end = x;
1043 level--;
1044 curr_scope = curr_scope->outer;
1045 if (level == -1)
1046 root = NULL;
1047 }
1048 } /* if note */
1049
1050 if (curr_bb && curr_bb->end == x)
1051 {
1052 curr_bb = NULL;
1053 bbi++;
1054 }
1055
1056 } /* for */
1057
1058 for (i = 0; i < forest->num_trees; i++)
1059 relate_bbs_with_scopes (forest->trees[i]);
1060 }
1061
1062
1063 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1064 the insn chain. */
1065
1066 static void
1067 remove_scope_notes ()
1068 {
1069 rtx x, next;
1070 basic_block currbb = NULL;
1071
1072 for (x = get_insns (); x; x = next)
1073 {
1074 next = NEXT_INSN (x);
1075 if (NOTE_INSN_BASIC_BLOCK_P (x))
1076 currbb = NOTE_BASIC_BLOCK (x);
1077
1078 if (GET_CODE (x) == NOTE
1079 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1080 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1081 {
1082 /* Check if the scope note happens to be the end of a bb. */
1083 if (currbb && x == currbb->end)
1084 currbb->end = PREV_INSN (x);
1085 if (currbb && x == currbb->head)
1086 abort ();
1087
1088 if (PREV_INSN (x))
1089 {
1090 NEXT_INSN (PREV_INSN (x)) = next;
1091 PREV_INSN (next) = PREV_INSN (x);
1092
1093 NEXT_INSN (x) = NULL;
1094 PREV_INSN (x) = NULL;
1095 }
1096 else
1097 abort ();
1098 }
1099 }
1100 }
1101
1102
1103 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1104
1105 static void
1106 insert_intra_1 (s, ip)
1107 scope s;
1108 rtx *ip;
1109 {
1110 scope p;
1111
1112 if (NOTE_BLOCK (s->note_beg))
1113 {
1114 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1115 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1116 }
1117
1118 for (p = s->inner; p; p = p->next)
1119 insert_intra_1 (p, ip);
1120
1121 if (NOTE_BLOCK (s->note_beg))
1122 {
1123 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1124 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1125 }
1126 }
1127
1128
1129 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1130 scopes that are contained within BB. */
1131
1132 static void
1133 insert_intra_bb_scope_notes (bb)
1134 basic_block bb;
1135 {
1136 scope s = RBI (bb)->scope;
1137 scope p;
1138 rtx ip;
1139
1140 if (! s)
1141 return;
1142
1143 ip = bb->head;
1144 if (GET_CODE (ip) == CODE_LABEL)
1145 ip = NEXT_INSN (ip);
1146
1147 for (p = s->inner; p; p = p->next)
1148 {
1149 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1150 insert_intra_1 (p, &ip);
1151 }
1152 }
1153
1154
1155 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1156 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1157 notes before BB2 such that the notes are correctly balanced. If BB1 or
1158 BB2 is NULL, we are inserting scope notes for the first and last basic
1159 blocks, respectively. */
1160
1161 static void
1162 insert_inter_bb_scope_notes (bb1, bb2)
1163 basic_block bb1;
1164 basic_block bb2;
1165 {
1166 rtx ip;
1167 scope com;
1168
1169 /* It is possible that a basic block is not contained in any scope.
1170 In that case, we either open or close a scope but not both. */
1171 if (bb1 && bb2)
1172 {
1173 scope s1 = RBI (bb1)->scope;
1174 scope s2 = RBI (bb2)->scope;
1175 if (! s1 && ! s2)
1176 return;
1177 if (! s1)
1178 bb1 = NULL;
1179 else if (! s2)
1180 bb2 = NULL;
1181 }
1182
1183 /* Find common ancestor scope. */
1184 if (bb1 && bb2)
1185 {
1186 scope s1 = RBI (bb1)->scope;
1187 scope s2 = RBI (bb2)->scope;
1188 while (s1 != s2)
1189 {
1190 if (! (s1 && s2))
1191 abort ();
1192 if (s1->level > s2->level)
1193 s1 = s1->outer;
1194 else if (s2->level > s1->level)
1195 s2 = s2->outer;
1196 else
1197 {
1198 s1 = s1->outer;
1199 s2 = s2->outer;
1200 }
1201 }
1202 com = s1;
1203 }
1204 else
1205 com = NULL;
1206
1207 /* Close scopes. */
1208 if (bb1)
1209 {
1210 scope s = RBI (bb1)->scope;
1211 ip = RBI (bb1)->eff_end;
1212 while (s != com)
1213 {
1214 if (NOTE_BLOCK (s->note_beg))
1215 {
1216 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1217 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1218 }
1219 s = s->outer;
1220 }
1221 }
1222
1223 /* Open scopes. */
1224 if (bb2)
1225 {
1226 scope s = RBI (bb2)->scope;
1227 ip = bb2->head;
1228 while (s != com)
1229 {
1230 if (NOTE_BLOCK (s->note_beg))
1231 {
1232 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1233 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1234 }
1235 s = s->outer;
1236 }
1237 }
1238 }
1239
1240
1241 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1242 on the scope forest and the newly reordered basic blocks. */
1243
1244 static void
1245 rebuild_scope_notes (forest)
1246 scope_forest_info *forest;
1247 {
1248 int i;
1249
1250 if (forest->num_trees == 0)
1251 return;
1252
1253 /* Start by opening the scopes before the first basic block. */
1254 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1255
1256 /* Then, open and close scopes as needed between blocks. */
1257 for (i = 0; i < n_basic_blocks - 1; i++)
1258 {
1259 basic_block bb1 = BASIC_BLOCK (i);
1260 basic_block bb2 = BASIC_BLOCK (i + 1);
1261 if (RBI (bb1)->scope != RBI (bb2)->scope)
1262 insert_inter_bb_scope_notes (bb1, bb2);
1263 insert_intra_bb_scope_notes (bb1);
1264 }
1265
1266 /* Finally, close the scopes after the last basic block. */
1267 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1268 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1269 }
1270
1271
1272 /* Free the storage associated with the scope tree at S. */
1273
1274 static void
1275 free_scope_forest_1 (s)
1276 scope s;
1277 {
1278 scope p, next;
1279
1280 for (p = s->inner; p; p = next)
1281 {
1282 next = p->next;
1283 free_scope_forest_1 (p);
1284 }
1285
1286 if (s->bbs)
1287 free (s->bbs);
1288 free (s);
1289 }
1290
1291
1292 /* Free the storage associated with the scope forest. */
1293
1294 static void
1295 free_scope_forest (forest)
1296 scope_forest_info *forest;
1297 {
1298 int i;
1299 for (i = 0; i < forest->num_trees; i++)
1300 free_scope_forest_1 (forest->trees[i]);
1301 }
1302
1303
1304 /* Visualize the scope forest. */
1305
1306 void
1307 dump_scope_forest (forest)
1308 scope_forest_info *forest;
1309 {
1310 if (forest->num_trees == 0)
1311 fprintf (stderr, "\n< Empty scope forest >\n");
1312 else
1313 {
1314 int i;
1315 fprintf (stderr, "\n< Scope forest >\n");
1316 for (i = 0; i < forest->num_trees; i++)
1317 dump_scope_forest_1 (forest->trees[i], 0);
1318 }
1319 }
1320
1321
1322 /* Recursive portion of dump_scope_forest. */
1323
1324 static void
1325 dump_scope_forest_1 (s, indent)
1326 scope s;
1327 int indent;
1328 {
1329 scope p;
1330 int i;
1331
1332 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1333 && RBI (s->bb_beg)->scope
1334 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1335 {
1336 fprintf (stderr, "%*s", indent, "");
1337 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1338 }
1339
1340 fprintf (stderr, "%*s", indent, "");
1341 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1342 (PTR) NOTE_BLOCK (s->note_beg));
1343
1344 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1345 for (i = 0; i < s->num_bbs; i++)
1346 fprintf (stderr, " %d", s->bbs[i]->index);
1347 fprintf (stderr, "\n");
1348
1349 for (p = s->inner; p; p = p->next)
1350 dump_scope_forest_1 (p, indent + 2);
1351
1352 fprintf (stderr, "%*s", indent, "");
1353 fprintf (stderr, "}\n");
1354 }
1355
1356
1357 /* Reorder basic blocks. The main entry point to this file. */
1358
1359 void
1360 reorder_basic_blocks ()
1361 {
1362 scope_forest_info forest;
1363 int i;
1364
1365 if (n_basic_blocks <= 1)
1366 return;
1367
1368 for (i = 0; i < n_basic_blocks; i++)
1369 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1370
1371 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1372
1373 build_scope_forest (&forest);
1374 remove_scope_notes ();
1375
1376 record_effective_endpoints ();
1377 make_reorder_chain ();
1378 fixup_reorder_chain ();
1379
1380 #ifdef ENABLE_CHECKING
1381 verify_insn_chain ();
1382 #endif
1383
1384 rebuild_scope_notes (&forest);
1385 free_scope_forest (&forest);
1386 reorder_blocks ();
1387
1388 for (i = 0; i < n_basic_blocks; i++)
1389 free (BASIC_BLOCK (i)->aux);
1390
1391 free (EXIT_BLOCK_PTR->aux);
1392
1393 #ifdef ENABLE_CHECKING
1394 verify_flow_info ();
1395 #endif
1396 }