bb-reorder.c (fixup_reorder_chain): Add jump in new block after switch for CASE_DROPS...
[gcc.git] / gcc / bb-reorder.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* References:
22
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
25
26 TODO:
27
28 (1) Consider:
29
30 if (p) goto A; // predict taken
31 foo ();
32 A:
33 if (q) goto B; // predict taken
34 bar ();
35 B:
36 baz ();
37 return;
38
39 We'll currently reorder this as
40
41 if (!p) goto C;
42 A:
43 if (!q) goto D;
44 B:
45 baz ();
46 return;
47 D:
48 bar ();
49 goto B;
50 C:
51 foo ();
52 goto A;
53
54 A better ordering is
55
56 if (!p) goto C;
57 if (!q) goto D;
58 B:
59 baz ();
60 return;
61 C:
62 foo ();
63 if (q) goto B;
64 D:
65 bar ();
66 goto B;
67
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
71
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
74
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
78
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
81 */
82
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "except.h"
96 #include "toplev.h"
97 #include "recog.h"
98 #include "insn-flags.h"
99 #include "expr.h"
100 #include "obstack.h"
101
102
103 #ifndef HAVE_epilogue
104 #define HAVE_epilogue 0
105 #endif
106
107
108 /* The contents of the current function definition are allocated
109 in this obstack, and all are freed at the end of the function.
110 For top-level functions, this is temporary_obstack.
111 Separate obstacks are made for nested functions. */
112
113 extern struct obstack *function_obstack;
114
115
116 /* Structure to hold information about lexical scopes. */
117 typedef struct scope_def
118 {
119 int level;
120
121 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 rtx note_beg;
123
124 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 rtx note_end;
126
127 /* The bb containing note_beg (if any). */
128 basic_block bb_beg;
129
130 /* The bb containing note_end (if any). */
131 basic_block bb_end;
132
133 /* List of basic blocks contained within this scope. */
134 basic_block *bbs;
135
136 /* Number of blocks contained within this scope. */
137 int num_bbs;
138
139 /* The outer scope or NULL if outermost scope. */
140 struct scope_def *outer;
141
142 /* The first inner scope or NULL if innermost scope. */
143 struct scope_def *inner;
144
145 /* The last inner scope or NULL if innermost scope. */
146 struct scope_def *inner_last;
147
148 /* Link to the next (sibling) scope. */
149 struct scope_def *next;
150 } *scope;
151
152
153 /* Structure to hold information about the scope forest. */
154 typedef struct
155 {
156 /* Number of trees in forest. */
157 int num_trees;
158
159 /* List of tree roots. */
160 scope *trees;
161 } scope_forest_info;
162
163 /* Structure to hold information about the blocks during reordering. */
164 typedef struct reorder_block_def
165 {
166 rtx eff_head;
167 rtx eff_end;
168 scope scope;
169 basic_block next;
170 int index;
171 int visited;
172 } *reorder_block_def;
173
174 #define RBI(BB) ((reorder_block_def) (BB)->aux)
175
176
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
199
200 void verify_insn_chain PARAMS ((void));
201 \f
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
205
206 static rtx
207 skip_insns_after_block (bb)
208 basic_block bb;
209 {
210 rtx insn, last_insn, next_head;
211
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
215
216 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
217 {
218 if (insn == next_head)
219 break;
220
221 switch (GET_CODE (insn))
222 {
223 case BARRIER:
224 continue;
225
226 case NOTE:
227 switch (NOTE_LINE_NUMBER (insn))
228 {
229 case NOTE_INSN_LOOP_END:
230 case NOTE_INSN_BLOCK_END:
231 case NOTE_INSN_DELETED:
232 case NOTE_INSN_DELETED_LABEL:
233 continue;
234
235 default:
236 break;
237 }
238 break;
239
240 case CODE_LABEL:
241 if (NEXT_INSN (insn)
242 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
243 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
244 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
245 {
246 insn = NEXT_INSN (insn);
247 continue;
248 }
249 break;
250
251 default:
252 break;
253 }
254
255 break;
256 }
257
258 return last_insn;
259 }
260
261
262 /* Locate the effective beginning and end of the insn chain for each
263 block, as defined by skip_insns_after_block above. */
264
265 static void
266 record_effective_endpoints ()
267 {
268 rtx next_insn = get_insns ();
269 int i;
270
271 for (i = 0; i < n_basic_blocks; ++i)
272 {
273 basic_block bb = BASIC_BLOCK (i);
274 rtx end;
275
276 RBI (bb)->eff_head = next_insn;
277 end = skip_insns_after_block (bb);
278 RBI (bb)->eff_end = end;
279 next_insn = NEXT_INSN (end);
280 }
281 }
282
283
284 /* Compute an ordering for a subgraph beginning with block BB. Record the
285 ordering in RBI()->index and chained through RBI()->next. */
286
287 static void
288 make_reorder_chain ()
289 {
290 basic_block last_block = NULL;
291 basic_block prev = NULL;
292 int nbb_m1 = n_basic_blocks - 1;
293
294 /* If we've not got epilogue in RTL, we must fallthru to the exit.
295 Force the last block to be at the end. */
296 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
297 end of the function for stack unwinding purposes. */
298 if (! HAVE_epilogue)
299 {
300 last_block = BASIC_BLOCK (nbb_m1);
301 RBI (last_block)->visited = 1;
302 nbb_m1 -= 1;
303 }
304
305 /* Loop until we've placed every block. */
306 do
307 {
308 int i;
309 basic_block next = NULL;
310
311 /* Find the next unplaced block. */
312 /* ??? Get rid of this loop, and track which blocks are not yet
313 placed more directly, so as to avoid the O(N^2) worst case.
314 Perhaps keep a doubly-linked list of all to-be-placed blocks;
315 remove from the list as we place. The head of that list is
316 what we're looking for here. */
317
318 for (i = 0; i <= nbb_m1; ++i)
319 {
320 basic_block bb = BASIC_BLOCK (i);
321 if (! RBI (bb)->visited)
322 {
323 next = bb;
324 break;
325 }
326 }
327 if (! next)
328 abort ();
329
330 prev = make_reorder_chain_1 (next, prev);
331 }
332 while (RBI (prev)->index < nbb_m1);
333
334 /* Terminate the chain. */
335 if (! HAVE_epilogue)
336 {
337 RBI (prev)->next = last_block;
338 RBI (last_block)->index = RBI (prev)->index + 1;
339 prev = last_block;
340 }
341 RBI (prev)->next = NULL;
342 }
343
344 /* A helper function for make_reorder_chain.
345
346 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
347 These are assumed to be the error condition and we wish to cluster
348 all of them at the very end of the function for the benefit of cache
349 locality for the rest of the function.
350
351 ??? We could do slightly better by noticing earlier that some subgraph
352 has all paths leading to noreturn functions, but for there to be more
353 than one block in such a subgraph is rare. */
354
355 static basic_block
356 make_reorder_chain_1 (bb, prev)
357 basic_block bb;
358 basic_block prev;
359 {
360 edge e;
361 basic_block next;
362 rtx note;
363
364 /* Mark this block visited. */
365 if (prev)
366 {
367 int new_index;
368
369 restart:
370 RBI (prev)->next = bb;
371 new_index = RBI (prev)->index + 1;
372 RBI (bb)->index = new_index;
373
374 if (rtl_dump_file && prev->index + 1 != bb->index)
375 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
376 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
377 }
378 else
379 RBI (bb)->index = 0;
380 RBI (bb)->visited = 1;
381 prev = bb;
382
383 if (bb->succ == NULL)
384 return prev;
385
386 /* Find the most probable block. */
387
388 next = NULL;
389 if (any_condjump_p (bb->end)
390 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
391 {
392 int taken, probability;
393 edge e_taken, e_fall;
394
395 probability = INTVAL (XEXP (note, 0));
396 taken = probability > REG_BR_PROB_BASE / 2;
397
398 /* Find the normal taken edge and the normal fallthru edge.
399 Note that there may in fact be other edges due to
400 asynchronous_exceptions. */
401
402 e_taken = e_fall = NULL;
403 for (e = bb->succ; e ; e = e->succ_next)
404 if (e->flags & EDGE_FALLTHRU)
405 e_fall = e;
406 else if (! (e->flags & EDGE_EH))
407 e_taken = e;
408
409 next = (taken ? e_taken : e_fall)->dest;
410 }
411
412 /* In the absence of a prediction, disturb things as little as possible
413 by selecting the old "next" block from the list of successors. If
414 there had been a fallthru edge, that will be the one. */
415 if (! next)
416 {
417 for (e = bb->succ; e ; e = e->succ_next)
418 if (e->dest->index == bb->index + 1)
419 {
420 if ((e->flags & EDGE_FALLTHRU)
421 || (e->dest->succ
422 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
423 next = e->dest;
424 break;
425 }
426 }
427
428 /* Make sure we didn't select a silly next block. */
429 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
430 next = NULL;
431
432 /* Recurse on the successors. Unroll the last call, as the normal
433 case is exactly one or two edges, and we can tail recurse. */
434 for (e = bb->succ; e; e = e->succ_next)
435 if (e->dest != EXIT_BLOCK_PTR
436 && ! RBI (e->dest)->visited
437 && e->dest->succ
438 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
439 {
440 if (next)
441 {
442 prev = make_reorder_chain_1 (next, prev);
443 next = RBI (e->dest)->visited ? NULL : e->dest;
444 }
445 else
446 next = e->dest;
447 }
448 if (next)
449 {
450 bb = next;
451 goto restart;
452 }
453
454 return prev;
455 }
456
457
458 /* Locate or create a label for a given basic block. */
459
460 static rtx
461 label_for_bb (bb)
462 basic_block bb;
463 {
464 rtx label = bb->head;
465
466 if (GET_CODE (label) != CODE_LABEL)
467 {
468 if (rtl_dump_file)
469 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
470 bb->index, RBI (bb)->index);
471
472 label = emit_label_before (gen_label_rtx (), label);
473 if (bb->head == RBI (bb)->eff_head)
474 RBI (bb)->eff_head = label;
475 bb->head = label;
476 }
477
478 return label;
479 }
480
481
482 /* Emit a jump to BB after insn AFTER. */
483
484 static rtx
485 emit_jump_to_block_after (bb, after)
486 basic_block bb;
487 rtx after;
488 {
489 rtx jump;
490
491 if (bb != EXIT_BLOCK_PTR)
492 {
493 rtx label = label_for_bb (bb);
494 jump = emit_jump_insn_after (gen_jump (label), after);
495 JUMP_LABEL (jump) = label;
496 LABEL_NUSES (label) += 1;
497
498 if (rtl_dump_file)
499 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
500 bb->index, RBI (bb)->index);
501 }
502 else
503 {
504 #ifdef HAVE_return
505 if (! HAVE_return)
506 abort ();
507 jump = emit_jump_insn_after (gen_return (), after);
508
509 if (rtl_dump_file)
510 fprintf (rtl_dump_file, "Emitting return\n");
511 #else
512 abort ();
513 #endif
514 }
515
516 return jump;
517 }
518
519
520 /* Given a reorder chain, rearrange the code to match. */
521
522 static void
523 fixup_reorder_chain ()
524 {
525 basic_block bb, last_bb;
526
527 /* First do the bulk reordering -- rechain the blocks without regard to
528 the needed changes to jumps and labels. */
529
530 last_bb = BASIC_BLOCK (0);
531 bb = RBI (last_bb)->next;
532 while (bb)
533 {
534 rtx last_e = RBI (last_bb)->eff_end;
535 rtx curr_h = RBI (bb)->eff_head;
536
537 NEXT_INSN (last_e) = curr_h;
538 PREV_INSN (curr_h) = last_e;
539
540 last_bb = bb;
541 bb = RBI (bb)->next;
542 }
543 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
544 set_last_insn (RBI (last_bb)->eff_end);
545
546 /* Now add jumps and labels as needed to match the blocks new
547 outgoing edges. */
548
549 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
550 {
551 edge e_fall, e_taken, e;
552 rtx jump_insn, barrier_insn, bb_end_insn;
553 basic_block nb;
554
555 if (bb->succ == NULL)
556 continue;
557
558 /* Find the old fallthru edge, and another non-EH edge for
559 a taken jump. */
560 e_taken = e_fall = NULL;
561 for (e = bb->succ; e ; e = e->succ_next)
562 if (e->flags & EDGE_FALLTHRU)
563 e_fall = e;
564 else if (! (e->flags & EDGE_EH))
565 e_taken = e;
566
567 bb_end_insn = bb->end;
568 if (GET_CODE (bb_end_insn) == JUMP_INSN)
569 {
570 if (any_uncondjump_p (bb_end_insn))
571 {
572 /* If the destination is still not next, nothing to do. */
573 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
574 continue;
575
576 /* Otherwise, we can remove the jump and cleanup the edge. */
577 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
578 RBI (bb)->eff_end = skip_insns_after_block (bb);
579 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
580
581 if (rtl_dump_file)
582 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
583 bb->index, RBI (bb)->index);
584 continue;
585 }
586 else if (any_condjump_p (bb_end_insn))
587 {
588 /* If the old fallthru is still next, nothing to do. */
589 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
590 || (RBI (bb)->index == n_basic_blocks - 1
591 && e_fall->dest == EXIT_BLOCK_PTR))
592 continue;
593
594 /* There is one special case: if *neither* block is next,
595 such as happens at the very end of a function, then we'll
596 need to add a new unconditional jump. Choose the taken
597 edge based on known or assumed probability. */
598 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
599 {
600 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
601 if (note
602 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
603 && invert_jump (bb_end_insn,
604 label_for_bb (e_fall->dest), 0))
605 {
606 e_fall->flags &= ~EDGE_FALLTHRU;
607 e_taken->flags |= EDGE_FALLTHRU;
608 e = e_fall, e_fall = e_taken, e_taken = e;
609 }
610 }
611
612 /* Otherwise we can try to invert the jump. This will
613 basically never fail, however, keep up the pretense. */
614 else if (invert_jump (bb_end_insn,
615 label_for_bb (e_fall->dest), 0))
616 {
617 e_fall->flags &= ~EDGE_FALLTHRU;
618 e_taken->flags |= EDGE_FALLTHRU;
619 continue;
620 }
621 }
622 else if (returnjump_p (bb_end_insn))
623 continue;
624 else
625 {
626 /* Otherwise we have some switch or computed jump. In the
627 99% case, there should not have been a fallthru edge. */
628 if (! e_fall)
629 continue;
630 #ifdef CASE_DROPS_THROUGH
631 /* Except for VAX. Since we didn't have predication for the
632 tablejump, the fallthru block should not have moved. */
633 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
634 continue;
635 bb_end_insn = skip_insns_after_block (bb);
636 #else
637 abort ();
638 #endif
639 }
640 }
641 else
642 {
643 /* No fallthru implies a noreturn function with EH edges, or
644 something similarly bizarre. In any case, we don't need to
645 do anything. */
646 if (! e_fall)
647 continue;
648
649 /* If the fallthru block is still next, nothing to do. */
650 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
651 || (RBI (bb)->index == n_basic_blocks - 1
652 && e_fall->dest == EXIT_BLOCK_PTR))
653 continue;
654
655 /* We need a new jump insn. If the block has only one outgoing
656 edge, then we can stuff the new jump insn in directly. */
657 if (bb->succ->succ_next == NULL)
658 {
659 e_fall->flags &= ~EDGE_FALLTHRU;
660
661 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
662 bb->end = jump_insn;
663 barrier_insn = emit_barrier_after (jump_insn);
664 RBI (bb)->eff_end = barrier_insn;
665 continue;
666 }
667 }
668
669 /* We got here if we need to add a new jump insn in a new block
670 across the edge e_fall. */
671
672 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
673 barrier_insn = emit_barrier_after (jump_insn);
674
675 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
676 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
677
678 nb = BASIC_BLOCK (n_basic_blocks - 1);
679 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
680 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
681 nb->local_set = 0;
682
683 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
684 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
685
686 nb->aux = xmalloc (sizeof (struct reorder_block_def));
687 RBI (nb)->eff_head = nb->head;
688 RBI (nb)->eff_end = barrier_insn;
689 RBI (nb)->scope = RBI (bb)->scope;
690 RBI (nb)->index = RBI (bb)->index + 1;
691 RBI (nb)->visited = 1;
692 RBI (nb)->next = RBI (bb)->next;
693 RBI (bb)->next = nb;
694
695 /* Link to new block. */
696 make_edge (NULL, nb, e_fall->dest, 0);
697 redirect_edge_succ (e_fall, nb);
698
699 /* Don't process this new block. */
700 bb = nb;
701
702 /* Fix subsequent reorder block indices to reflect new block. */
703 while ((nb = RBI (nb)->next) != NULL)
704 RBI (nb)->index += 1;
705 }
706
707 /* Put basic_block_info in the new order. */
708 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
709 {
710 bb->index = RBI (bb)->index;
711 BASIC_BLOCK (bb->index) = bb;
712 }
713 }
714
715
716 /* Perform sanity checks on the insn chain.
717 1. Check that next/prev pointers are consistent in both the forward and
718 reverse direction.
719 2. Count insns in chain, going both directions, and check if equal.
720 3. Check that get_last_insn () returns the actual end of chain. */
721
722 void
723 verify_insn_chain ()
724 {
725 rtx x,
726 prevx,
727 nextx;
728 int insn_cnt1,
729 insn_cnt2;
730
731 prevx = NULL;
732 insn_cnt1 = 1;
733 for (x = get_insns (); x; x = NEXT_INSN (x))
734 {
735 if (PREV_INSN (x) != prevx)
736 {
737 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
738 fprintf (stderr, "previous insn:\n");
739 debug_rtx (prevx);
740 fprintf (stderr, "current insn:\n");
741 debug_rtx (x);
742 abort ();
743 }
744 ++insn_cnt1;
745 prevx = x;
746 }
747
748 if (prevx != get_last_insn ())
749 {
750 fprintf (stderr, "last_insn corrupt.\n");
751 abort ();
752 }
753
754 nextx = NULL;
755 insn_cnt2 = 1;
756 for (x = get_last_insn (); x; x = PREV_INSN (x))
757 {
758 if (NEXT_INSN (x) != nextx)
759 {
760 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
761 fprintf (stderr, "current insn:\n");
762 debug_rtx (x);
763 fprintf (stderr, "next insn:\n");
764 debug_rtx (nextx);
765 abort ();
766 }
767 ++insn_cnt2;
768 nextx = x;
769 }
770
771 if (insn_cnt1 != insn_cnt2)
772 {
773 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
774 insn_cnt1, insn_cnt2);
775 abort ();
776 }
777 }
778
779 static rtx
780 get_next_bb_note (x)
781 rtx x;
782 {
783 while (x)
784 {
785 if (NOTE_INSN_BASIC_BLOCK_P (x))
786 return x;
787 x = NEXT_INSN (x);
788 }
789 return NULL;
790 }
791
792
793 static rtx
794 get_prev_bb_note (x)
795 rtx x;
796 {
797 while (x)
798 {
799 if (NOTE_INSN_BASIC_BLOCK_P (x))
800 return x;
801 x = PREV_INSN (x);
802 }
803 return NULL;
804 }
805
806
807 /* Determine and record the relationships between basic blocks and
808 scopes in scope tree S. */
809
810 static void
811 relate_bbs_with_scopes (s)
812 scope s;
813 {
814 scope p;
815 int i, bbi1, bbi2, bbs_spanned;
816 rtx bbnote;
817
818 for (p = s->inner; p; p = p->next)
819 relate_bbs_with_scopes (p);
820
821 bbi1 = bbi2 = -1;
822 bbs_spanned = 0;
823
824 /* If the begin and end notes are both inside the same basic block,
825 or if they are both outside of basic blocks, then we know immediately
826 how they are related. Otherwise, we need to poke around to make the
827 determination. */
828 if (s->bb_beg != s->bb_end)
829 {
830 if (s->bb_beg && s->bb_end)
831 {
832 /* Both notes are in different bbs. This implies that all the
833 basic blocks spanned by the pair of notes are contained in
834 this scope. */
835 bbi1 = s->bb_beg->index;
836 bbi2 = s->bb_end->index;
837 bbs_spanned = 1;
838 }
839 else if (! s->bb_beg)
840 {
841 /* First note is outside of a bb. If the scope spans more than
842 one basic block, then they all are contained within this
843 scope. Otherwise, this scope is contained within the basic
844 block. */
845 bbnote = get_next_bb_note (s->note_beg);
846 if (! bbnote)
847 abort ();
848 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
849 {
850 bbs_spanned = 0;
851 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
852 }
853 else
854 {
855 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
856 bbi2 = s->bb_end->index;
857 s->bb_end = NULL;
858 bbs_spanned = 1;
859 }
860 }
861 else /* ! s->bb_end */
862 {
863 /* Second note is outside of a bb. If the scope spans more than
864 one basic block, then they all are contained within this
865 scope. Otherwise, this scope is contained within the basic
866 block. */
867 bbnote = get_prev_bb_note (s->note_end);
868 if (! bbnote)
869 abort ();
870 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
871 {
872 bbs_spanned = 0;
873 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
874 }
875 else
876 {
877 bbi1 = s->bb_beg->index;
878 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
879 s->bb_beg = NULL;
880 bbs_spanned = 1;
881 }
882 }
883 }
884 else
885 {
886 if (s->bb_beg)
887 /* Both notes are in the same bb, which implies the block
888 contains this scope. */
889 bbs_spanned = 0;
890 else
891 {
892 rtx x1, x2;
893 /* Both notes are outside of any bbs. This implies that all the
894 basic blocks spanned by the pair of notes are contained in
895 this scope.
896 There is a degenerate case to consider. If the notes do not
897 span any basic blocks, then it is an empty scope that can
898 safely be deleted or ignored. Mark these with level = -1. */
899
900 x1 = get_next_bb_note (s->note_beg);
901 x2 = get_prev_bb_note (s->note_end);
902 if (! (x1 && x2))
903 {
904 s->level = -1;
905 bbs_spanned = 0;
906 }
907 else
908 {
909 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
910 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
911 bbs_spanned = 1;
912 }
913 }
914 }
915
916 /* If the scope spans one or more basic blocks, we record them. We
917 only record the bbs that are immediately contained within this
918 scope. Note that if a scope is contained within a bb, we can tell
919 by checking that bb_beg = bb_end and that they are non-null. */
920 if (bbs_spanned)
921 {
922 int j = 0;
923
924 s->num_bbs = 0;
925 for (i = bbi1; i <= bbi2; i++)
926 if (! RBI (BASIC_BLOCK (i))->scope)
927 s->num_bbs++;
928
929 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
930 for (i = bbi1; i <= bbi2; i++)
931 {
932 basic_block curr_bb = BASIC_BLOCK (i);
933 if (! RBI (curr_bb)->scope)
934 {
935 s->bbs[j++] = curr_bb;
936 RBI (curr_bb)->scope = s;
937 }
938 }
939 }
940 else
941 s->num_bbs = 0;
942 }
943
944
945 /* Allocate and initialize a new scope structure with scope level LEVEL,
946 and record the NOTE beginning the scope. */
947
948 static scope
949 make_new_scope (level, note)
950 int level;
951 rtx note;
952 {
953 scope new_scope = xcalloc (1, sizeof (struct scope_def));
954 new_scope->level = level;
955 new_scope->note_beg = note;
956 return new_scope;
957 }
958
959
960 /* Build a forest representing the scope structure of the function.
961 Return a pointer to a structure describing the forest. */
962
963 static void
964 build_scope_forest (forest)
965 scope_forest_info *forest;
966 {
967 rtx x;
968 int level, bbi, i;
969 basic_block curr_bb;
970 scope root, curr_scope = 0;
971
972 forest->num_trees = 0;
973 forest->trees = NULL;
974 level = -1;
975 root = NULL;
976 curr_bb = NULL;
977 bbi = 0;
978 for (x = get_insns (); x; x = NEXT_INSN (x))
979 {
980 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
981 curr_bb = BASIC_BLOCK (bbi);
982
983 if (GET_CODE (x) == NOTE)
984 {
985 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
986 {
987 if (root)
988 {
989 scope new_scope;
990 if (! curr_scope)
991 abort();
992 level++;
993 new_scope = make_new_scope (level, x);
994 new_scope->outer = curr_scope;
995 new_scope->next = NULL;
996 if (! curr_scope->inner)
997 {
998 curr_scope->inner = new_scope;
999 curr_scope->inner_last = new_scope;
1000 }
1001 else
1002 {
1003 curr_scope->inner_last->next = new_scope;
1004 curr_scope->inner_last = new_scope;
1005 }
1006 curr_scope = curr_scope->inner_last;
1007 }
1008 else
1009 {
1010 int ntrees = forest->num_trees;
1011 level++;
1012 curr_scope = make_new_scope (level, x);
1013 root = curr_scope;
1014 forest->trees = xrealloc (forest->trees,
1015 sizeof (scope) * (ntrees + 1));
1016 forest->trees[forest->num_trees++] = root;
1017 }
1018 curr_scope->bb_beg = curr_bb;
1019 }
1020 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1021 {
1022 curr_scope->bb_end = curr_bb;
1023 curr_scope->note_end = x;
1024 level--;
1025 curr_scope = curr_scope->outer;
1026 if (level == -1)
1027 root = NULL;
1028 }
1029 } /* if note */
1030
1031 if (curr_bb && curr_bb->end == x)
1032 {
1033 curr_bb = NULL;
1034 bbi++;
1035 }
1036
1037 } /* for */
1038
1039 for (i = 0; i < forest->num_trees; i++)
1040 relate_bbs_with_scopes (forest->trees[i]);
1041 }
1042
1043
1044 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1045 the insn chain. */
1046
1047 static void
1048 remove_scope_notes ()
1049 {
1050 rtx x, next;
1051 basic_block currbb = NULL;
1052
1053 for (x = get_insns (); x; x = next)
1054 {
1055 next = NEXT_INSN (x);
1056 if (NOTE_INSN_BASIC_BLOCK_P (x))
1057 currbb = NOTE_BASIC_BLOCK (x);
1058
1059 if (GET_CODE (x) == NOTE
1060 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1061 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1062 {
1063 /* Check if the scope note happens to be the end of a bb. */
1064 if (currbb && x == currbb->end)
1065 currbb->end = PREV_INSN (x);
1066 if (currbb && x == currbb->head)
1067 abort ();
1068
1069 if (PREV_INSN (x))
1070 {
1071 NEXT_INSN (PREV_INSN (x)) = next;
1072 PREV_INSN (next) = PREV_INSN (x);
1073
1074 NEXT_INSN (x) = NULL;
1075 PREV_INSN (x) = NULL;
1076 }
1077 else
1078 abort ();
1079 }
1080 }
1081 }
1082
1083
1084 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1085
1086 static void
1087 insert_intra_1 (s, ip)
1088 scope s;
1089 rtx *ip;
1090 {
1091 scope p;
1092
1093 if (NOTE_BLOCK (s->note_beg))
1094 {
1095 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1096 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1097 }
1098
1099 for (p = s->inner; p; p = p->next)
1100 insert_intra_1 (p, ip);
1101
1102 if (NOTE_BLOCK (s->note_beg))
1103 {
1104 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1105 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1106 }
1107 }
1108
1109
1110 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1111 scopes that are contained within BB. */
1112
1113 static void
1114 insert_intra_bb_scope_notes (bb)
1115 basic_block bb;
1116 {
1117 scope s = RBI (bb)->scope;
1118 scope p;
1119 rtx ip;
1120
1121 if (! s)
1122 return;
1123
1124 ip = bb->head;
1125 if (GET_CODE (ip) == CODE_LABEL)
1126 ip = NEXT_INSN (ip);
1127
1128 for (p = s->inner; p; p = p->next)
1129 {
1130 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1131 insert_intra_1 (p, &ip);
1132 }
1133 }
1134
1135
1136 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1137 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1138 notes before BB2 such that the notes are correctly balanced. If BB1 or
1139 BB2 is NULL, we are inserting scope notes for the first and last basic
1140 blocks, respectively. */
1141
1142 static void
1143 insert_inter_bb_scope_notes (bb1, bb2)
1144 basic_block bb1;
1145 basic_block bb2;
1146 {
1147 rtx ip;
1148 scope com;
1149
1150 /* It is possible that a basic block is not contained in any scope.
1151 In that case, we either open or close a scope but not both. */
1152 if (bb1 && bb2)
1153 {
1154 scope s1 = RBI (bb1)->scope;
1155 scope s2 = RBI (bb2)->scope;
1156 if (! s1 && ! s2)
1157 return;
1158 if (! s1)
1159 bb1 = NULL;
1160 else if (! s2)
1161 bb2 = NULL;
1162 }
1163
1164 /* Find common ancestor scope. */
1165 if (bb1 && bb2)
1166 {
1167 scope s1 = RBI (bb1)->scope;
1168 scope s2 = RBI (bb2)->scope;
1169 while (s1 != s2)
1170 {
1171 if (! (s1 && s2))
1172 abort ();
1173 if (s1->level > s2->level)
1174 s1 = s1->outer;
1175 else if (s2->level > s1->level)
1176 s2 = s2->outer;
1177 else
1178 {
1179 s1 = s1->outer;
1180 s2 = s2->outer;
1181 }
1182 }
1183 com = s1;
1184 }
1185 else
1186 com = NULL;
1187
1188 /* Close scopes. */
1189 if (bb1)
1190 {
1191 scope s = RBI (bb1)->scope;
1192 ip = RBI (bb1)->eff_end;
1193 while (s != com)
1194 {
1195 if (NOTE_BLOCK (s->note_beg))
1196 {
1197 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1198 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1199 }
1200 s = s->outer;
1201 }
1202 }
1203
1204 /* Open scopes. */
1205 if (bb2)
1206 {
1207 scope s = RBI (bb2)->scope;
1208 ip = bb2->head;
1209 while (s != com)
1210 {
1211 if (NOTE_BLOCK (s->note_beg))
1212 {
1213 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1214 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1215 }
1216 s = s->outer;
1217 }
1218 }
1219 }
1220
1221
1222 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1223 on the scope forest and the newly reordered basic blocks. */
1224
1225 static void
1226 rebuild_scope_notes (forest)
1227 scope_forest_info *forest;
1228 {
1229 int i;
1230
1231 if (forest->num_trees == 0)
1232 return;
1233
1234 /* Start by opening the scopes before the first basic block. */
1235 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1236
1237 /* Then, open and close scopes as needed between blocks. */
1238 for (i = 0; i < n_basic_blocks - 1; i++)
1239 {
1240 basic_block bb1 = BASIC_BLOCK (i);
1241 basic_block bb2 = BASIC_BLOCK (i + 1);
1242 if (RBI (bb1)->scope != RBI (bb2)->scope)
1243 insert_inter_bb_scope_notes (bb1, bb2);
1244 insert_intra_bb_scope_notes (bb1);
1245 }
1246
1247 /* Finally, close the scopes after the last basic block. */
1248 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1249 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1250 }
1251
1252
1253 /* Free the storage associated with the scope tree at S. */
1254
1255 static void
1256 free_scope_forest_1 (s)
1257 scope s;
1258 {
1259 scope p, next;
1260
1261 for (p = s->inner; p; p = next)
1262 {
1263 next = p->next;
1264 free_scope_forest_1 (p);
1265 }
1266
1267 if (s->bbs)
1268 free (s->bbs);
1269 free (s);
1270 }
1271
1272
1273 /* Free the storage associated with the scope forest. */
1274
1275 static void
1276 free_scope_forest (forest)
1277 scope_forest_info *forest;
1278 {
1279 int i;
1280 for (i = 0; i < forest->num_trees; i++)
1281 free_scope_forest_1 (forest->trees[i]);
1282 }
1283
1284
1285 /* Visualize the scope forest. */
1286
1287 void
1288 dump_scope_forest (forest)
1289 scope_forest_info *forest;
1290 {
1291 if (forest->num_trees == 0)
1292 fprintf (stderr, "\n< Empty scope forest >\n");
1293 else
1294 {
1295 int i;
1296 fprintf (stderr, "\n< Scope forest >\n");
1297 for (i = 0; i < forest->num_trees; i++)
1298 dump_scope_forest_1 (forest->trees[i], 0);
1299 }
1300 }
1301
1302
1303 /* Recursive portion of dump_scope_forest. */
1304
1305 static void
1306 dump_scope_forest_1 (s, indent)
1307 scope s;
1308 int indent;
1309 {
1310 scope p;
1311 int i;
1312
1313 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1314 && RBI (s->bb_beg)->scope
1315 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1316 {
1317 fprintf (stderr, "%*s", indent, "");
1318 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1319 }
1320
1321 fprintf (stderr, "%*s", indent, "");
1322 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1323 (PTR) NOTE_BLOCK (s->note_beg));
1324
1325 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1326 for (i = 0; i < s->num_bbs; i++)
1327 fprintf (stderr, " %d", s->bbs[i]->index);
1328 fprintf (stderr, "\n");
1329
1330 for (p = s->inner; p; p = p->next)
1331 dump_scope_forest_1 (p, indent + 2);
1332
1333 fprintf (stderr, "%*s", indent, "");
1334 fprintf (stderr, "}\n");
1335 }
1336
1337
1338 /* Reorder basic blocks. The main entry point to this file. */
1339
1340 void
1341 reorder_basic_blocks ()
1342 {
1343 scope_forest_info forest;
1344 int i;
1345
1346 if (n_basic_blocks <= 1)
1347 return;
1348
1349 /* We do not currently handle correct re-placement of EH notes. */
1350 for (i = 0; i < n_basic_blocks; i++)
1351 {
1352 edge e;
1353 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1354 if (e->flags & EDGE_EH)
1355 return;
1356 }
1357
1358 for (i = 0; i < n_basic_blocks; i++)
1359 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1360
1361 build_scope_forest (&forest);
1362 remove_scope_notes ();
1363
1364 record_effective_endpoints ();
1365 make_reorder_chain ();
1366 fixup_reorder_chain ();
1367
1368 #ifdef ENABLE_CHECKING
1369 verify_insn_chain ();
1370 #endif
1371
1372 rebuild_scope_notes (&forest);
1373 free_scope_forest (&forest);
1374 reorder_blocks ();
1375
1376 for (i = 0; i < n_basic_blocks; i++)
1377 free (BASIC_BLOCK (i)->aux);
1378
1379 #ifdef ENABLE_CHECKING
1380 verify_flow_info ();
1381 #endif
1382 }