Put phi nodes after NOTE_INSN_BASIC_BLOCK.
[gcc.git] / gcc / bb-reorder.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* References:
22
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
25
26 TODO:
27
28 (1) Consider:
29
30 if (p) goto A; // predict taken
31 foo ();
32 A:
33 if (q) goto B; // predict taken
34 bar ();
35 B:
36 baz ();
37 return;
38
39 We'll currently reorder this as
40
41 if (!p) goto C;
42 A:
43 if (!q) goto D;
44 B:
45 baz ();
46 return;
47 D:
48 bar ();
49 goto B;
50 C:
51 foo ();
52 goto A;
53
54 A better ordering is
55
56 if (!p) goto C;
57 if (!q) goto D;
58 B:
59 baz ();
60 return;
61 C:
62 foo ();
63 if (q) goto B;
64 D:
65 bar ();
66 goto B;
67
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
71
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
74
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
78
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
81 */
82
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "except.h"
96 #include "toplev.h"
97 #include "recog.h"
98 #include "insn-flags.h"
99 #include "expr.h"
100 #include "obstack.h"
101
102
103 #ifndef HAVE_epilogue
104 #define HAVE_epilogue 0
105 #endif
106
107
108 /* The contents of the current function definition are allocated
109 in this obstack, and all are freed at the end of the function.
110 For top-level functions, this is temporary_obstack.
111 Separate obstacks are made for nested functions. */
112
113 extern struct obstack *function_obstack;
114
115
116 /* Structure to hold information about lexical scopes. */
117 typedef struct scope_def
118 {
119 int level;
120
121 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 rtx note_beg;
123
124 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 rtx note_end;
126
127 /* The bb containing note_beg (if any). */
128 basic_block bb_beg;
129
130 /* The bb containing note_end (if any). */
131 basic_block bb_end;
132
133 /* List of basic blocks contained within this scope. */
134 basic_block *bbs;
135
136 /* Number of blocks contained within this scope. */
137 int num_bbs;
138
139 /* The outer scope or NULL if outermost scope. */
140 struct scope_def *outer;
141
142 /* The first inner scope or NULL if innermost scope. */
143 struct scope_def *inner;
144
145 /* The last inner scope or NULL if innermost scope. */
146 struct scope_def *inner_last;
147
148 /* Link to the next (sibling) scope. */
149 struct scope_def *next;
150 } *scope;
151
152
153 /* Structure to hold information about the scope forest. */
154 typedef struct
155 {
156 /* Number of trees in forest. */
157 int num_trees;
158
159 /* List of tree roots. */
160 scope *trees;
161 } scope_forest_info;
162
163 /* Structure to hold information about the blocks during reordering. */
164 typedef struct reorder_block_def
165 {
166 rtx eff_head;
167 rtx eff_end;
168 scope scope;
169 basic_block next;
170 int index;
171 int visited;
172 } *reorder_block_def;
173
174 #define RBI(BB) ((reorder_block_def) (BB)->aux)
175
176
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
199
200 void verify_insn_chain PARAMS ((void));
201 \f
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
205
206 static rtx
207 skip_insns_after_block (bb)
208 basic_block bb;
209 {
210 rtx insn, last_insn, next_head;
211
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
215
216 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
217 {
218 if (insn == next_head)
219 break;
220
221 switch (GET_CODE (insn))
222 {
223 case BARRIER:
224 continue;
225
226 case NOTE:
227 switch (NOTE_LINE_NUMBER (insn))
228 {
229 case NOTE_INSN_LOOP_END:
230 case NOTE_INSN_BLOCK_END:
231 case NOTE_INSN_DELETED:
232 case NOTE_INSN_DELETED_LABEL:
233 continue;
234
235 default:
236 break;
237 }
238 break;
239
240 case CODE_LABEL:
241 if (NEXT_INSN (insn)
242 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
243 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
244 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
245 {
246 insn = NEXT_INSN (insn);
247 continue;
248 }
249 break;
250
251 default:
252 break;
253 }
254
255 break;
256 }
257
258 return last_insn;
259 }
260
261
262 /* Locate the effective beginning and end of the insn chain for each
263 block, as defined by skip_insns_after_block above. */
264
265 static void
266 record_effective_endpoints ()
267 {
268 rtx next_insn = get_insns ();
269 int i;
270
271 for (i = 0; i < n_basic_blocks; ++i)
272 {
273 basic_block bb = BASIC_BLOCK (i);
274 rtx end;
275
276 RBI (bb)->eff_head = next_insn;
277 end = skip_insns_after_block (bb);
278 RBI (bb)->eff_end = end;
279 next_insn = NEXT_INSN (end);
280 }
281 }
282
283
284 /* Compute an ordering for a subgraph beginning with block BB. Record the
285 ordering in RBI()->index and chained through RBI()->next. */
286
287 static void
288 make_reorder_chain ()
289 {
290 basic_block last_block = NULL;
291 basic_block prev = NULL;
292 int nbb_m1 = n_basic_blocks - 1;
293
294 /* If we've not got epilogue in RTL, we must fallthru to the exit.
295 Force the last block to be at the end. */
296 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
297 end of the function for stack unwinding purposes. */
298 if (! HAVE_epilogue)
299 {
300 last_block = BASIC_BLOCK (nbb_m1);
301 RBI (last_block)->visited = 1;
302 nbb_m1 -= 1;
303 }
304
305 /* Loop until we've placed every block. */
306 do
307 {
308 int i;
309 basic_block next = NULL;
310
311 /* Find the next unplaced block. */
312 /* ??? Get rid of this loop, and track which blocks are not yet
313 placed more directly, so as to avoid the O(N^2) worst case.
314 Perhaps keep a doubly-linked list of all to-be-placed blocks;
315 remove from the list as we place. The head of that list is
316 what we're looking for here. */
317
318 for (i = 0; i <= nbb_m1; ++i)
319 {
320 basic_block bb = BASIC_BLOCK (i);
321 if (! RBI (bb)->visited)
322 {
323 next = bb;
324 break;
325 }
326 }
327 if (! next)
328 abort ();
329
330 prev = make_reorder_chain_1 (next, prev);
331 }
332 while (RBI (prev)->index < nbb_m1);
333
334 /* Terminate the chain. */
335 if (! HAVE_epilogue)
336 {
337 RBI (prev)->next = last_block;
338 RBI (last_block)->index = RBI (prev)->index + 1;
339 prev = last_block;
340 }
341 RBI (prev)->next = NULL;
342 }
343
344 /* A helper function for make_reorder_chain.
345
346 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
347 These are assumed to be the error condition and we wish to cluster
348 all of them at the very end of the function for the benefit of cache
349 locality for the rest of the function.
350
351 ??? We could do slightly better by noticing earlier that some subgraph
352 has all paths leading to noreturn functions, but for there to be more
353 than one block in such a subgraph is rare. */
354
355 static basic_block
356 make_reorder_chain_1 (bb, prev)
357 basic_block bb;
358 basic_block prev;
359 {
360 edge e;
361 basic_block next;
362 rtx note;
363
364 /* Mark this block visited. */
365 if (prev)
366 {
367 int new_index;
368
369 restart:
370 RBI (prev)->next = bb;
371 new_index = RBI (prev)->index + 1;
372 RBI (bb)->index = new_index;
373
374 if (rtl_dump_file && prev->index + 1 != bb->index)
375 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
376 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
377 }
378 else
379 RBI (bb)->index = 0;
380 RBI (bb)->visited = 1;
381 prev = bb;
382
383 if (bb->succ == NULL)
384 return prev;
385
386 /* Find the most probable block. */
387
388 next = NULL;
389 if (any_condjump_p (bb->end)
390 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
391 {
392 int taken, probability;
393 edge e_taken, e_fall;
394
395 probability = INTVAL (XEXP (note, 0));
396 taken = probability > REG_BR_PROB_BASE / 2;
397
398 /* Find the normal taken edge and the normal fallthru edge.
399 Note that there may in fact be other edges due to
400 asynchronous_exceptions. */
401
402 e_taken = e_fall = NULL;
403 for (e = bb->succ; e ; e = e->succ_next)
404 if (e->flags & EDGE_FALLTHRU)
405 e_fall = e;
406 else if (! (e->flags & EDGE_EH))
407 e_taken = e;
408
409 next = (taken ? e_taken : e_fall)->dest;
410 }
411
412 /* In the absence of a prediction, disturb things as little as possible
413 by selecting the old "next" block from the list of successors. If
414 there had been a fallthru edge, that will be the one. */
415 if (! next)
416 {
417 for (e = bb->succ; e ; e = e->succ_next)
418 if (e->dest->index == bb->index + 1)
419 {
420 if ((e->flags & EDGE_FALLTHRU)
421 || (e->dest->succ
422 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
423 next = e->dest;
424 break;
425 }
426 }
427
428 /* Make sure we didn't select a silly next block. */
429 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
430 next = NULL;
431
432 /* Recurse on the successors. Unroll the last call, as the normal
433 case is exactly one or two edges, and we can tail recurse. */
434 for (e = bb->succ; e; e = e->succ_next)
435 if (e->dest != EXIT_BLOCK_PTR
436 && ! RBI (e->dest)->visited
437 && e->dest->succ
438 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
439 {
440 if (next)
441 {
442 prev = make_reorder_chain_1 (next, prev);
443 next = RBI (e->dest)->visited ? NULL : e->dest;
444 }
445 else
446 next = e->dest;
447 }
448 if (next)
449 {
450 bb = next;
451 goto restart;
452 }
453
454 return prev;
455 }
456
457
458 /* Locate or create a label for a given basic block. */
459
460 static rtx
461 label_for_bb (bb)
462 basic_block bb;
463 {
464 rtx label = bb->head;
465
466 if (GET_CODE (label) != CODE_LABEL)
467 {
468 if (rtl_dump_file)
469 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
470 bb->index, RBI (bb)->index);
471
472 label = emit_label_before (gen_label_rtx (), label);
473 if (bb->head == RBI (bb)->eff_head)
474 RBI (bb)->eff_head = label;
475 bb->head = label;
476 }
477
478 return label;
479 }
480
481
482 /* Emit a jump to BB after insn AFTER. */
483
484 static rtx
485 emit_jump_to_block_after (bb, after)
486 basic_block bb;
487 rtx after;
488 {
489 rtx jump;
490
491 if (bb != EXIT_BLOCK_PTR)
492 {
493 rtx label = label_for_bb (bb);
494 jump = emit_jump_insn_after (gen_jump (label), after);
495 JUMP_LABEL (jump) = label;
496 LABEL_NUSES (label) += 1;
497
498 if (rtl_dump_file)
499 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
500 bb->index, RBI (bb)->index);
501 }
502 else
503 {
504 #ifdef HAVE_return
505 if (! HAVE_return)
506 abort ();
507 jump = emit_jump_insn_after (gen_return (), after);
508
509 if (rtl_dump_file)
510 fprintf (rtl_dump_file, "Emitting return\n");
511 #else
512 abort ();
513 #endif
514 }
515
516 return jump;
517 }
518
519
520 /* Given a reorder chain, rearrange the code to match. */
521
522 static void
523 fixup_reorder_chain ()
524 {
525 basic_block bb, last_bb;
526
527 /* First do the bulk reordering -- rechain the blocks without regard to
528 the needed changes to jumps and labels. */
529
530 last_bb = BASIC_BLOCK (0);
531 bb = RBI (last_bb)->next;
532 while (bb)
533 {
534 rtx last_e = RBI (last_bb)->eff_end;
535 rtx curr_h = RBI (bb)->eff_head;
536
537 NEXT_INSN (last_e) = curr_h;
538 PREV_INSN (curr_h) = last_e;
539
540 last_bb = bb;
541 bb = RBI (bb)->next;
542 }
543 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
544 set_last_insn (RBI (last_bb)->eff_end);
545
546 /* Now add jumps and labels as needed to match the blocks new
547 outgoing edges. */
548
549 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
550 {
551 edge e_fall, e_taken, e;
552 rtx jump_insn, barrier_insn;
553 basic_block nb;
554
555 if (bb->succ == NULL)
556 continue;
557
558 /* Find the old fallthru edge, and another non-EH edge for
559 a taken jump. */
560 e_taken = e_fall = NULL;
561 for (e = bb->succ; e ; e = e->succ_next)
562 if (e->flags & EDGE_FALLTHRU)
563 e_fall = e;
564 else if (! (e->flags & EDGE_EH))
565 e_taken = e;
566
567 if (GET_CODE (bb->end) == JUMP_INSN)
568 {
569 if (any_uncondjump_p (bb->end))
570 {
571 /* If the destination is still not next, nothing to do. */
572 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
573 continue;
574
575 /* Otherwise, we can remove the jump and cleanup the edge. */
576 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
577 RBI (bb)->eff_end = skip_insns_after_block (bb);
578 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
579
580 if (rtl_dump_file)
581 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
582 bb->index, RBI (bb)->index);
583 continue;
584 }
585 else if (any_condjump_p (bb->end))
586 {
587 /* If the old fallthru is still next, nothing to do. */
588 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
589 || (RBI (bb)->index == n_basic_blocks - 1
590 && e_fall->dest == EXIT_BLOCK_PTR))
591 continue;
592
593 /* There is one special case: if *neither* block is next,
594 such as happens at the very end of a function, then we'll
595 need to add a new unconditional jump. Choose the taken
596 edge based on known or assumed probability. */
597 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
598 {
599 rtx note = find_reg_note (bb->end, REG_BR_PROB, 0);
600 if (note
601 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
602 && invert_jump (bb->end, label_for_bb (e_fall->dest), 0))
603 {
604 e_fall->flags &= ~EDGE_FALLTHRU;
605 e_taken->flags |= EDGE_FALLTHRU;
606 e = e_fall, e_fall = e_taken, e_taken = e;
607 }
608 }
609
610 /* Otherwise we can try to invert the jump. This will
611 basically never fail, however, keep up the pretense. */
612 else if (invert_jump (bb->end, label_for_bb (e_fall->dest), 0))
613 {
614 e_fall->flags &= ~EDGE_FALLTHRU;
615 e_taken->flags |= EDGE_FALLTHRU;
616 continue;
617 }
618 }
619 else if (returnjump_p (bb->end))
620 continue;
621 else
622 {
623 /* Otherwise we have some switch or computed jump. In the
624 99% case, there should not have been a fallthru edge. */
625 if (! e_fall)
626 continue;
627 #ifdef CASE_DROPS_THROUGH
628 /* Except for VAX. Since we didn't have predication for the
629 tablejump, the fallthru block should not have moved. */
630 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
631 continue;
632 #endif
633 abort ();
634 }
635 }
636 else
637 {
638 /* No fallthru implies a noreturn function with EH edges, or
639 something similarly bizarre. In any case, we don't need to
640 do anything. */
641 if (! e_fall)
642 continue;
643
644 /* If the fallthru block is still next, nothing to do. */
645 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
646 || (RBI (bb)->index == n_basic_blocks - 1
647 && e_fall->dest == EXIT_BLOCK_PTR))
648 continue;
649
650 /* We need a new jump insn. If the block has only one outgoing
651 edge, then we can stuff the new jump insn in directly. */
652 if (bb->succ->succ_next == NULL)
653 {
654 e_fall->flags &= ~EDGE_FALLTHRU;
655
656 jump_insn = emit_jump_to_block_after (e_fall->dest, bb->end);
657 bb->end = jump_insn;
658 barrier_insn = emit_barrier_after (jump_insn);
659 RBI (bb)->eff_end = barrier_insn;
660 continue;
661 }
662 }
663
664 /* We got here if we need to add a new jump insn in a new block
665 across the edge e_fall. */
666
667 jump_insn = emit_jump_to_block_after (e_fall->dest, bb->end);
668 barrier_insn = emit_barrier_after (jump_insn);
669
670 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
671 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
672
673 nb = BASIC_BLOCK (n_basic_blocks - 1);
674 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
675 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
676 nb->local_set = 0;
677
678 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
679 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
680
681 nb->aux = xmalloc (sizeof (struct reorder_block_def));
682 RBI (nb)->eff_head = nb->head;
683 RBI (nb)->eff_end = barrier_insn;
684 RBI (nb)->scope = RBI (bb)->scope;
685 RBI (nb)->index = RBI (bb)->index + 1;
686 RBI (nb)->visited = 1;
687 RBI (nb)->next = RBI (bb)->next;
688 RBI (bb)->next = nb;
689
690 /* Link to new block. */
691 make_edge (NULL, nb, e_fall->dest, 0);
692 redirect_edge_succ (e_fall, nb);
693
694 /* Don't process this new block. */
695 bb = nb;
696
697 /* Fix subsequent reorder block indices to reflect new block. */
698 while ((nb = RBI (nb)->next) != NULL)
699 RBI (nb)->index += 1;
700 }
701
702 /* Put basic_block_info in the new order. */
703 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
704 {
705 bb->index = RBI (bb)->index;
706 BASIC_BLOCK (bb->index) = bb;
707 }
708 }
709
710
711 /* Perform sanity checks on the insn chain.
712 1. Check that next/prev pointers are consistent in both the forward and
713 reverse direction.
714 2. Count insns in chain, going both directions, and check if equal.
715 3. Check that get_last_insn () returns the actual end of chain. */
716
717 void
718 verify_insn_chain ()
719 {
720 rtx x,
721 prevx,
722 nextx;
723 int insn_cnt1,
724 insn_cnt2;
725
726 prevx = NULL;
727 insn_cnt1 = 1;
728 for (x = get_insns (); x; x = NEXT_INSN (x))
729 {
730 if (PREV_INSN (x) != prevx)
731 {
732 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
733 fprintf (stderr, "previous insn:\n");
734 debug_rtx (prevx);
735 fprintf (stderr, "current insn:\n");
736 debug_rtx (x);
737 abort ();
738 }
739 ++insn_cnt1;
740 prevx = x;
741 }
742
743 if (prevx != get_last_insn ())
744 {
745 fprintf (stderr, "last_insn corrupt.\n");
746 abort ();
747 }
748
749 nextx = NULL;
750 insn_cnt2 = 1;
751 for (x = get_last_insn (); x; x = PREV_INSN (x))
752 {
753 if (NEXT_INSN (x) != nextx)
754 {
755 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
756 fprintf (stderr, "current insn:\n");
757 debug_rtx (x);
758 fprintf (stderr, "next insn:\n");
759 debug_rtx (nextx);
760 abort ();
761 }
762 ++insn_cnt2;
763 nextx = x;
764 }
765
766 if (insn_cnt1 != insn_cnt2)
767 {
768 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
769 insn_cnt1, insn_cnt2);
770 abort ();
771 }
772 }
773
774 static rtx
775 get_next_bb_note (x)
776 rtx x;
777 {
778 while (x)
779 {
780 if (NOTE_INSN_BASIC_BLOCK_P (x))
781 return x;
782 x = NEXT_INSN (x);
783 }
784 return NULL;
785 }
786
787
788 static rtx
789 get_prev_bb_note (x)
790 rtx x;
791 {
792 while (x)
793 {
794 if (NOTE_INSN_BASIC_BLOCK_P (x))
795 return x;
796 x = PREV_INSN (x);
797 }
798 return NULL;
799 }
800
801
802 /* Determine and record the relationships between basic blocks and
803 scopes in scope tree S. */
804
805 static void
806 relate_bbs_with_scopes (s)
807 scope s;
808 {
809 scope p;
810 int i, bbi1, bbi2, bbs_spanned;
811 rtx bbnote;
812
813 for (p = s->inner; p; p = p->next)
814 relate_bbs_with_scopes (p);
815
816 bbi1 = bbi2 = -1;
817 bbs_spanned = 0;
818
819 /* If the begin and end notes are both inside the same basic block,
820 or if they are both outside of basic blocks, then we know immediately
821 how they are related. Otherwise, we need to poke around to make the
822 determination. */
823 if (s->bb_beg != s->bb_end)
824 {
825 if (s->bb_beg && s->bb_end)
826 {
827 /* Both notes are in different bbs. This implies that all the
828 basic blocks spanned by the pair of notes are contained in
829 this scope. */
830 bbi1 = s->bb_beg->index;
831 bbi2 = s->bb_end->index;
832 bbs_spanned = 1;
833 }
834 else if (! s->bb_beg)
835 {
836 /* First note is outside of a bb. If the scope spans more than
837 one basic block, then they all are contained within this
838 scope. Otherwise, this scope is contained within the basic
839 block. */
840 bbnote = get_next_bb_note (s->note_beg);
841 if (! bbnote)
842 abort ();
843 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
844 {
845 bbs_spanned = 0;
846 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
847 }
848 else
849 {
850 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
851 bbi2 = s->bb_end->index;
852 s->bb_end = NULL;
853 bbs_spanned = 1;
854 }
855 }
856 else /* ! s->bb_end */
857 {
858 /* Second note is outside of a bb. If the scope spans more than
859 one basic block, then they all are contained within this
860 scope. Otherwise, this scope is contained within the basic
861 block. */
862 bbnote = get_prev_bb_note (s->note_end);
863 if (! bbnote)
864 abort ();
865 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
866 {
867 bbs_spanned = 0;
868 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
869 }
870 else
871 {
872 bbi1 = s->bb_beg->index;
873 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
874 s->bb_beg = NULL;
875 bbs_spanned = 1;
876 }
877 }
878 }
879 else
880 {
881 if (s->bb_beg)
882 /* Both notes are in the same bb, which implies the block
883 contains this scope. */
884 bbs_spanned = 0;
885 else
886 {
887 rtx x1, x2;
888 /* Both notes are outside of any bbs. This implies that all the
889 basic blocks spanned by the pair of notes are contained in
890 this scope.
891 There is a degenerate case to consider. If the notes do not
892 span any basic blocks, then it is an empty scope that can
893 safely be deleted or ignored. Mark these with level = -1. */
894
895 x1 = get_next_bb_note (s->note_beg);
896 x2 = get_prev_bb_note (s->note_end);
897 if (! (x1 && x2))
898 {
899 s->level = -1;
900 bbs_spanned = 0;
901 }
902 else
903 {
904 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
905 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
906 bbs_spanned = 1;
907 }
908 }
909 }
910
911 /* If the scope spans one or more basic blocks, we record them. We
912 only record the bbs that are immediately contained within this
913 scope. Note that if a scope is contained within a bb, we can tell
914 by checking that bb_beg = bb_end and that they are non-null. */
915 if (bbs_spanned)
916 {
917 int j = 0;
918
919 s->num_bbs = 0;
920 for (i = bbi1; i <= bbi2; i++)
921 if (! RBI (BASIC_BLOCK (i))->scope)
922 s->num_bbs++;
923
924 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
925 for (i = bbi1; i <= bbi2; i++)
926 {
927 basic_block curr_bb = BASIC_BLOCK (i);
928 if (! RBI (curr_bb)->scope)
929 {
930 s->bbs[j++] = curr_bb;
931 RBI (curr_bb)->scope = s;
932 }
933 }
934 }
935 else
936 s->num_bbs = 0;
937 }
938
939
940 /* Allocate and initialize a new scope structure with scope level LEVEL,
941 and record the NOTE beginning the scope. */
942
943 static scope
944 make_new_scope (level, note)
945 int level;
946 rtx note;
947 {
948 scope new_scope = xcalloc (1, sizeof (struct scope_def));
949 new_scope->level = level;
950 new_scope->note_beg = note;
951 return new_scope;
952 }
953
954
955 /* Build a forest representing the scope structure of the function.
956 Return a pointer to a structure describing the forest. */
957
958 static void
959 build_scope_forest (forest)
960 scope_forest_info *forest;
961 {
962 rtx x;
963 int level, bbi, i;
964 basic_block curr_bb;
965 scope root, curr_scope = 0;
966
967 forest->num_trees = 0;
968 forest->trees = NULL;
969 level = -1;
970 root = NULL;
971 curr_bb = NULL;
972 bbi = 0;
973 for (x = get_insns (); x; x = NEXT_INSN (x))
974 {
975 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
976 curr_bb = BASIC_BLOCK (bbi);
977
978 if (GET_CODE (x) == NOTE)
979 {
980 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
981 {
982 if (root)
983 {
984 scope new_scope;
985 if (! curr_scope)
986 abort();
987 level++;
988 new_scope = make_new_scope (level, x);
989 new_scope->outer = curr_scope;
990 new_scope->next = NULL;
991 if (! curr_scope->inner)
992 {
993 curr_scope->inner = new_scope;
994 curr_scope->inner_last = new_scope;
995 }
996 else
997 {
998 curr_scope->inner_last->next = new_scope;
999 curr_scope->inner_last = new_scope;
1000 }
1001 curr_scope = curr_scope->inner_last;
1002 }
1003 else
1004 {
1005 int ntrees = forest->num_trees;
1006 level++;
1007 curr_scope = make_new_scope (level, x);
1008 root = curr_scope;
1009 forest->trees = xrealloc (forest->trees,
1010 sizeof (scope) * (ntrees + 1));
1011 forest->trees[forest->num_trees++] = root;
1012 }
1013 curr_scope->bb_beg = curr_bb;
1014 }
1015 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1016 {
1017 curr_scope->bb_end = curr_bb;
1018 curr_scope->note_end = x;
1019 level--;
1020 curr_scope = curr_scope->outer;
1021 if (level == -1)
1022 root = NULL;
1023 }
1024 } /* if note */
1025
1026 if (curr_bb && curr_bb->end == x)
1027 {
1028 curr_bb = NULL;
1029 bbi++;
1030 }
1031
1032 } /* for */
1033
1034 for (i = 0; i < forest->num_trees; i++)
1035 relate_bbs_with_scopes (forest->trees[i]);
1036 }
1037
1038
1039 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1040 the insn chain. */
1041
1042 static void
1043 remove_scope_notes ()
1044 {
1045 rtx x, next;
1046 basic_block currbb = NULL;
1047
1048 for (x = get_insns (); x; x = next)
1049 {
1050 next = NEXT_INSN (x);
1051 if (NOTE_INSN_BASIC_BLOCK_P (x))
1052 currbb = NOTE_BASIC_BLOCK (x);
1053
1054 if (GET_CODE (x) == NOTE
1055 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1056 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1057 {
1058 /* Check if the scope note happens to be the end of a bb. */
1059 if (currbb && x == currbb->end)
1060 currbb->end = PREV_INSN (x);
1061 if (currbb && x == currbb->head)
1062 abort ();
1063
1064 if (PREV_INSN (x))
1065 {
1066 NEXT_INSN (PREV_INSN (x)) = next;
1067 PREV_INSN (next) = PREV_INSN (x);
1068
1069 NEXT_INSN (x) = NULL;
1070 PREV_INSN (x) = NULL;
1071 }
1072 else
1073 abort ();
1074 }
1075 }
1076 }
1077
1078
1079 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1080
1081 static void
1082 insert_intra_1 (s, ip)
1083 scope s;
1084 rtx *ip;
1085 {
1086 scope p;
1087
1088 if (NOTE_BLOCK (s->note_beg))
1089 {
1090 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1091 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1092 }
1093
1094 for (p = s->inner; p; p = p->next)
1095 insert_intra_1 (p, ip);
1096
1097 if (NOTE_BLOCK (s->note_beg))
1098 {
1099 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1100 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1101 }
1102 }
1103
1104
1105 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1106 scopes that are contained within BB. */
1107
1108 static void
1109 insert_intra_bb_scope_notes (bb)
1110 basic_block bb;
1111 {
1112 scope s = RBI (bb)->scope;
1113 scope p;
1114 rtx ip;
1115
1116 if (! s)
1117 return;
1118
1119 ip = bb->head;
1120 if (GET_CODE (ip) == CODE_LABEL)
1121 ip = NEXT_INSN (ip);
1122
1123 for (p = s->inner; p; p = p->next)
1124 {
1125 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1126 insert_intra_1 (p, &ip);
1127 }
1128 }
1129
1130
1131 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1132 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1133 notes before BB2 such that the notes are correctly balanced. If BB1 or
1134 BB2 is NULL, we are inserting scope notes for the first and last basic
1135 blocks, respectively. */
1136
1137 static void
1138 insert_inter_bb_scope_notes (bb1, bb2)
1139 basic_block bb1;
1140 basic_block bb2;
1141 {
1142 rtx ip;
1143 scope com;
1144
1145 /* It is possible that a basic block is not contained in any scope.
1146 In that case, we either open or close a scope but not both. */
1147 if (bb1 && bb2)
1148 {
1149 scope s1 = RBI (bb1)->scope;
1150 scope s2 = RBI (bb2)->scope;
1151 if (! s1 && ! s2)
1152 return;
1153 if (! s1)
1154 bb1 = NULL;
1155 else if (! s2)
1156 bb2 = NULL;
1157 }
1158
1159 /* Find common ancestor scope. */
1160 if (bb1 && bb2)
1161 {
1162 scope s1 = RBI (bb1)->scope;
1163 scope s2 = RBI (bb2)->scope;
1164 while (s1 != s2)
1165 {
1166 if (! (s1 && s2))
1167 abort ();
1168 if (s1->level > s2->level)
1169 s1 = s1->outer;
1170 else if (s2->level > s1->level)
1171 s2 = s2->outer;
1172 else
1173 {
1174 s1 = s1->outer;
1175 s2 = s2->outer;
1176 }
1177 }
1178 com = s1;
1179 }
1180 else
1181 com = NULL;
1182
1183 /* Close scopes. */
1184 if (bb1)
1185 {
1186 scope s = RBI (bb1)->scope;
1187 ip = RBI (bb1)->eff_end;
1188 while (s != com)
1189 {
1190 if (NOTE_BLOCK (s->note_beg))
1191 {
1192 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1193 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1194 }
1195 s = s->outer;
1196 }
1197 }
1198
1199 /* Open scopes. */
1200 if (bb2)
1201 {
1202 scope s = RBI (bb2)->scope;
1203 ip = bb2->head;
1204 while (s != com)
1205 {
1206 if (NOTE_BLOCK (s->note_beg))
1207 {
1208 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1209 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1210 }
1211 s = s->outer;
1212 }
1213 }
1214 }
1215
1216
1217 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1218 on the scope forest and the newly reordered basic blocks. */
1219
1220 static void
1221 rebuild_scope_notes (forest)
1222 scope_forest_info *forest;
1223 {
1224 int i;
1225
1226 if (forest->num_trees == 0)
1227 return;
1228
1229 /* Start by opening the scopes before the first basic block. */
1230 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1231
1232 /* Then, open and close scopes as needed between blocks. */
1233 for (i = 0; i < n_basic_blocks - 1; i++)
1234 {
1235 basic_block bb1 = BASIC_BLOCK (i);
1236 basic_block bb2 = BASIC_BLOCK (i + 1);
1237 if (RBI (bb1)->scope != RBI (bb2)->scope)
1238 insert_inter_bb_scope_notes (bb1, bb2);
1239 insert_intra_bb_scope_notes (bb1);
1240 }
1241
1242 /* Finally, close the scopes after the last basic block. */
1243 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1244 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1245 }
1246
1247
1248 /* Free the storage associated with the scope tree at S. */
1249
1250 static void
1251 free_scope_forest_1 (s)
1252 scope s;
1253 {
1254 scope p, next;
1255
1256 for (p = s->inner; p; p = next)
1257 {
1258 next = p->next;
1259 free_scope_forest_1 (p);
1260 }
1261
1262 if (s->bbs)
1263 free (s->bbs);
1264 free (s);
1265 }
1266
1267
1268 /* Free the storage associated with the scope forest. */
1269
1270 static void
1271 free_scope_forest (forest)
1272 scope_forest_info *forest;
1273 {
1274 int i;
1275 for (i = 0; i < forest->num_trees; i++)
1276 free_scope_forest_1 (forest->trees[i]);
1277 }
1278
1279
1280 /* Visualize the scope forest. */
1281
1282 void
1283 dump_scope_forest (forest)
1284 scope_forest_info *forest;
1285 {
1286 if (forest->num_trees == 0)
1287 fprintf (stderr, "\n< Empty scope forest >\n");
1288 else
1289 {
1290 int i;
1291 fprintf (stderr, "\n< Scope forest >\n");
1292 for (i = 0; i < forest->num_trees; i++)
1293 dump_scope_forest_1 (forest->trees[i], 0);
1294 }
1295 }
1296
1297
1298 /* Recursive portion of dump_scope_forest. */
1299
1300 static void
1301 dump_scope_forest_1 (s, indent)
1302 scope s;
1303 int indent;
1304 {
1305 scope p;
1306 int i;
1307
1308 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1309 && RBI (s->bb_beg)->scope
1310 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1311 {
1312 fprintf (stderr, "%*s", indent, "");
1313 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1314 }
1315
1316 fprintf (stderr, "%*s", indent, "");
1317 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1318 (PTR) NOTE_BLOCK (s->note_beg));
1319
1320 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1321 for (i = 0; i < s->num_bbs; i++)
1322 fprintf (stderr, " %d", s->bbs[i]->index);
1323 fprintf (stderr, "\n");
1324
1325 for (p = s->inner; p; p = p->next)
1326 dump_scope_forest_1 (p, indent + 2);
1327
1328 fprintf (stderr, "%*s", indent, "");
1329 fprintf (stderr, "}\n");
1330 }
1331
1332
1333 /* Reorder basic blocks. The main entry point to this file. */
1334
1335 void
1336 reorder_basic_blocks ()
1337 {
1338 scope_forest_info forest;
1339 int i;
1340
1341 if (n_basic_blocks <= 1)
1342 return;
1343
1344 /* We do not currently handle correct re-placement of EH notes. */
1345 for (i = 0; i < n_basic_blocks; i++)
1346 {
1347 edge e;
1348 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1349 if (e->flags & EDGE_EH)
1350 return;
1351 }
1352
1353 for (i = 0; i < n_basic_blocks; i++)
1354 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1355
1356 build_scope_forest (&forest);
1357 remove_scope_notes ();
1358
1359 record_effective_endpoints ();
1360 make_reorder_chain ();
1361 fixup_reorder_chain ();
1362
1363 #ifdef ENABLE_CHECKING
1364 verify_insn_chain ();
1365 #endif
1366
1367 rebuild_scope_notes (&forest);
1368 free_scope_forest (&forest);
1369 reorder_blocks ();
1370
1371 for (i = 0; i < n_basic_blocks; i++)
1372 free (BASIC_BLOCK (i)->aux);
1373
1374 #ifdef ENABLE_CHECKING
1375 verify_flow_info ();
1376 #endif
1377 }