IA-64 ABI Exception Handling.
[gcc.git] / gcc / bb-reorder.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* References:
22
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
25
26 TODO:
27
28 (1) Consider:
29
30 if (p) goto A; // predict taken
31 foo ();
32 A:
33 if (q) goto B; // predict taken
34 bar ();
35 B:
36 baz ();
37 return;
38
39 We'll currently reorder this as
40
41 if (!p) goto C;
42 A:
43 if (!q) goto D;
44 B:
45 baz ();
46 return;
47 D:
48 bar ();
49 goto B;
50 C:
51 foo ();
52 goto A;
53
54 A better ordering is
55
56 if (!p) goto C;
57 if (!q) goto D;
58 B:
59 baz ();
60 return;
61 C:
62 foo ();
63 if (q) goto B;
64 D:
65 bar ();
66 goto B;
67
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
71
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
74
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
78
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
81 */
82
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "toplev.h"
96 #include "recog.h"
97 #include "expr.h"
98 #include "obstack.h"
99
100
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
103 #endif
104
105
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
110
111 extern struct obstack flow_obstack;
112
113
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
116 {
117 int level;
118
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
120 rtx note_beg;
121
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
123 rtx note_end;
124
125 /* The bb containing note_beg (if any). */
126 basic_block bb_beg;
127
128 /* The bb containing note_end (if any). */
129 basic_block bb_end;
130
131 /* List of basic blocks contained within this scope. */
132 basic_block *bbs;
133
134 /* Number of blocks contained within this scope. */
135 int num_bbs;
136
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
139
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
142
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
145
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
148 } *scope;
149
150
151 /* Structure to hold information about the scope forest. */
152 typedef struct
153 {
154 /* Number of trees in forest. */
155 int num_trees;
156
157 /* List of tree roots. */
158 scope *trees;
159 } scope_forest_info;
160
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
163 {
164 rtx eff_head;
165 rtx eff_end;
166 scope scope;
167 basic_block next;
168 int index;
169 int visited;
170 } *reorder_block_def;
171
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
173
174
175 /* Local function prototypes. */
176 static rtx skip_insns_after_block PARAMS ((basic_block));
177 static void record_effective_endpoints PARAMS ((void));
178 static void make_reorder_chain PARAMS ((void));
179 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
180 static rtx label_for_bb PARAMS ((basic_block));
181 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
182 static void fixup_reorder_chain PARAMS ((void));
183 static void relate_bbs_with_scopes PARAMS ((scope));
184 static scope make_new_scope PARAMS ((int, rtx));
185 static void build_scope_forest PARAMS ((scope_forest_info *));
186 static void remove_scope_notes PARAMS ((void));
187 static void insert_intra_1 PARAMS ((scope, rtx *));
188 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
189 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
190 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
191 static void free_scope_forest_1 PARAMS ((scope));
192 static void free_scope_forest PARAMS ((scope_forest_info *));
193 void dump_scope_forest PARAMS ((scope_forest_info *));
194 static void dump_scope_forest_1 PARAMS ((scope, int));
195 static rtx get_next_bb_note PARAMS ((rtx));
196 static rtx get_prev_bb_note PARAMS ((rtx));
197
198 void verify_insn_chain PARAMS ((void));
199 \f
200 /* Skip over inter-block insns occurring after BB which are typically
201 associated with BB (e.g., barriers). If there are any such insns,
202 we return the last one. Otherwise, we return the end of BB. */
203
204 static rtx
205 skip_insns_after_block (bb)
206 basic_block bb;
207 {
208 rtx insn, last_insn, next_head;
209
210 next_head = NULL_RTX;
211 if (bb->index + 1 != n_basic_blocks)
212 next_head = BASIC_BLOCK (bb->index + 1)->head;
213
214 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
215 {
216 if (insn == next_head)
217 break;
218
219 switch (GET_CODE (insn))
220 {
221 case BARRIER:
222 continue;
223
224 case NOTE:
225 switch (NOTE_LINE_NUMBER (insn))
226 {
227 case NOTE_INSN_LOOP_END:
228 case NOTE_INSN_BLOCK_END:
229 case NOTE_INSN_DELETED:
230 case NOTE_INSN_DELETED_LABEL:
231 continue;
232
233 default:
234 break;
235 }
236 break;
237
238 case CODE_LABEL:
239 if (NEXT_INSN (insn)
240 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
241 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
242 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
243 {
244 insn = NEXT_INSN (insn);
245 continue;
246 }
247 break;
248
249 default:
250 break;
251 }
252
253 break;
254 }
255
256 return last_insn;
257 }
258
259
260 /* Locate the effective beginning and end of the insn chain for each
261 block, as defined by skip_insns_after_block above. */
262
263 static void
264 record_effective_endpoints ()
265 {
266 rtx next_insn = get_insns ();
267 int i;
268
269 for (i = 0; i < n_basic_blocks; ++i)
270 {
271 basic_block bb = BASIC_BLOCK (i);
272 rtx end;
273
274 RBI (bb)->eff_head = next_insn;
275 end = skip_insns_after_block (bb);
276 RBI (bb)->eff_end = end;
277 next_insn = NEXT_INSN (end);
278 }
279 }
280
281
282 /* Compute an ordering for a subgraph beginning with block BB. Record the
283 ordering in RBI()->index and chained through RBI()->next. */
284
285 static void
286 make_reorder_chain ()
287 {
288 basic_block last_block = NULL;
289 basic_block prev = NULL;
290 int nbb_m1 = n_basic_blocks - 1;
291
292 /* If we've not got epilogue in RTL, we must fallthru to the exit.
293 Force the last block to be at the end. */
294 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
295 end of the function for stack unwinding purposes. */
296 if (! HAVE_epilogue)
297 {
298 last_block = BASIC_BLOCK (nbb_m1);
299 RBI (last_block)->visited = 1;
300 nbb_m1 -= 1;
301 }
302
303 /* Loop until we've placed every block. */
304 do
305 {
306 int i;
307 basic_block next = NULL;
308
309 /* Find the next unplaced block. */
310 /* ??? Get rid of this loop, and track which blocks are not yet
311 placed more directly, so as to avoid the O(N^2) worst case.
312 Perhaps keep a doubly-linked list of all to-be-placed blocks;
313 remove from the list as we place. The head of that list is
314 what we're looking for here. */
315
316 for (i = 0; i <= nbb_m1; ++i)
317 {
318 basic_block bb = BASIC_BLOCK (i);
319 if (! RBI (bb)->visited)
320 {
321 next = bb;
322 break;
323 }
324 }
325 if (! next)
326 abort ();
327
328 prev = make_reorder_chain_1 (next, prev);
329 }
330 while (RBI (prev)->index < nbb_m1);
331
332 /* Terminate the chain. */
333 if (! HAVE_epilogue)
334 {
335 RBI (prev)->next = last_block;
336 RBI (last_block)->index = RBI (prev)->index + 1;
337 prev = last_block;
338 }
339 RBI (prev)->next = NULL;
340 }
341
342 /* A helper function for make_reorder_chain.
343
344 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
345 These are assumed to be the error condition and we wish to cluster
346 all of them at the very end of the function for the benefit of cache
347 locality for the rest of the function.
348
349 ??? We could do slightly better by noticing earlier that some subgraph
350 has all paths leading to noreturn functions, but for there to be more
351 than one block in such a subgraph is rare. */
352
353 static basic_block
354 make_reorder_chain_1 (bb, prev)
355 basic_block bb;
356 basic_block prev;
357 {
358 edge e;
359 basic_block next;
360 rtx note;
361
362 /* Mark this block visited. */
363 if (prev)
364 {
365 int new_index;
366
367 restart:
368 RBI (prev)->next = bb;
369 new_index = RBI (prev)->index + 1;
370 RBI (bb)->index = new_index;
371
372 if (rtl_dump_file && prev->index + 1 != bb->index)
373 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
374 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
375 }
376 else
377 RBI (bb)->index = 0;
378 RBI (bb)->visited = 1;
379 prev = bb;
380
381 if (bb->succ == NULL)
382 return prev;
383
384 /* Find the most probable block. */
385
386 next = NULL;
387 if (any_condjump_p (bb->end)
388 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
389 {
390 int taken, probability;
391 edge e_taken, e_fall;
392
393 probability = INTVAL (XEXP (note, 0));
394 taken = probability > REG_BR_PROB_BASE / 2;
395
396 /* Find the normal taken edge and the normal fallthru edge.
397
398 Note, conditional jumps with other side effects may not
399 be fully optimized. In this case it is possible for
400 the conditional jump to branch to the same location as
401 the fallthru path.
402
403 We should probably work to improve optimization of that
404 case; however, it seems silly not to also deal with such
405 problems here if they happen to occur. */
406
407 e_taken = e_fall = NULL;
408 for (e = bb->succ; e ; e = e->succ_next)
409 {
410 if (e->flags & EDGE_FALLTHRU)
411 e_fall = e;
412 if (! (e->flags & EDGE_EH))
413 e_taken = e;
414 }
415
416 next = (taken ? e_taken : e_fall)->dest;
417 }
418
419 /* In the absence of a prediction, disturb things as little as possible
420 by selecting the old "next" block from the list of successors. If
421 there had been a fallthru edge, that will be the one. */
422 if (! next)
423 {
424 for (e = bb->succ; e ; e = e->succ_next)
425 if (e->dest->index == bb->index + 1)
426 {
427 if ((e->flags & EDGE_FALLTHRU)
428 || (e->dest->succ
429 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
430 next = e->dest;
431 break;
432 }
433 }
434
435 /* Make sure we didn't select a silly next block. */
436 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
437 next = NULL;
438
439 /* Recurse on the successors. Unroll the last call, as the normal
440 case is exactly one or two edges, and we can tail recurse. */
441 for (e = bb->succ; e; e = e->succ_next)
442 if (e->dest != EXIT_BLOCK_PTR
443 && ! RBI (e->dest)->visited
444 && e->dest->succ
445 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
446 {
447 if (next)
448 {
449 prev = make_reorder_chain_1 (next, prev);
450 next = RBI (e->dest)->visited ? NULL : e->dest;
451 }
452 else
453 next = e->dest;
454 }
455 if (next)
456 {
457 bb = next;
458 goto restart;
459 }
460
461 return prev;
462 }
463
464
465 /* Locate or create a label for a given basic block. */
466
467 static rtx
468 label_for_bb (bb)
469 basic_block bb;
470 {
471 rtx label = bb->head;
472
473 if (GET_CODE (label) != CODE_LABEL)
474 {
475 if (rtl_dump_file)
476 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
477 bb->index, RBI (bb)->index);
478
479 label = emit_label_before (gen_label_rtx (), label);
480 if (bb->head == RBI (bb)->eff_head)
481 RBI (bb)->eff_head = label;
482 bb->head = label;
483 }
484
485 return label;
486 }
487
488
489 /* Emit a jump to BB after insn AFTER. */
490
491 static rtx
492 emit_jump_to_block_after (bb, after)
493 basic_block bb;
494 rtx after;
495 {
496 rtx jump;
497
498 if (bb != EXIT_BLOCK_PTR)
499 {
500 rtx label = label_for_bb (bb);
501 jump = emit_jump_insn_after (gen_jump (label), after);
502 JUMP_LABEL (jump) = label;
503 LABEL_NUSES (label) += 1;
504
505 if (rtl_dump_file)
506 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
507 bb->index, RBI (bb)->index);
508 }
509 else
510 {
511 #ifdef HAVE_return
512 if (! HAVE_return)
513 abort ();
514 jump = emit_jump_insn_after (gen_return (), after);
515
516 if (rtl_dump_file)
517 fprintf (rtl_dump_file, "Emitting return\n");
518 #else
519 abort ();
520 #endif
521 }
522
523 return jump;
524 }
525
526
527 /* Given a reorder chain, rearrange the code to match. */
528
529 static void
530 fixup_reorder_chain ()
531 {
532 basic_block bb, last_bb;
533
534 /* First do the bulk reordering -- rechain the blocks without regard to
535 the needed changes to jumps and labels. */
536
537 last_bb = BASIC_BLOCK (0);
538 bb = RBI (last_bb)->next;
539 while (bb)
540 {
541 rtx last_e = RBI (last_bb)->eff_end;
542 rtx curr_h = RBI (bb)->eff_head;
543
544 NEXT_INSN (last_e) = curr_h;
545 PREV_INSN (curr_h) = last_e;
546
547 last_bb = bb;
548 bb = RBI (bb)->next;
549 }
550 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
551 set_last_insn (RBI (last_bb)->eff_end);
552
553 /* Now add jumps and labels as needed to match the blocks new
554 outgoing edges. */
555
556 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
557 {
558 edge e_fall, e_taken, e;
559 rtx jump_insn, barrier_insn, bb_end_insn;
560 basic_block nb;
561
562 if (bb->succ == NULL)
563 continue;
564
565 /* Find the old fallthru edge, and another non-EH edge for
566 a taken jump. */
567 e_taken = e_fall = NULL;
568 for (e = bb->succ; e ; e = e->succ_next)
569 if (e->flags & EDGE_FALLTHRU)
570 e_fall = e;
571 else if (! (e->flags & EDGE_EH))
572 e_taken = e;
573
574 bb_end_insn = bb->end;
575 if (GET_CODE (bb_end_insn) == JUMP_INSN)
576 {
577 if (any_uncondjump_p (bb_end_insn))
578 {
579 /* If the destination is still not next, nothing to do. */
580 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
581 continue;
582
583 /* Otherwise, we can remove the jump and cleanup the edge. */
584 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
585 RBI (bb)->eff_end = skip_insns_after_block (bb);
586 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
587
588 if (rtl_dump_file)
589 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
590 bb->index, RBI (bb)->index);
591 continue;
592 }
593 else if (any_condjump_p (bb_end_insn))
594 {
595 /* If the old fallthru is still next, nothing to do. */
596 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
597 || (RBI (bb)->index == n_basic_blocks - 1
598 && e_fall->dest == EXIT_BLOCK_PTR))
599 continue;
600
601 /* There is one special case: if *neither* block is next,
602 such as happens at the very end of a function, then we'll
603 need to add a new unconditional jump. Choose the taken
604 edge based on known or assumed probability. */
605 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
606 {
607 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
608 if (note
609 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
610 && invert_jump (bb_end_insn,
611 label_for_bb (e_fall->dest), 0))
612 {
613 e_fall->flags &= ~EDGE_FALLTHRU;
614 e_taken->flags |= EDGE_FALLTHRU;
615 e = e_fall, e_fall = e_taken, e_taken = e;
616 }
617 }
618
619 /* Otherwise we can try to invert the jump. This will
620 basically never fail, however, keep up the pretense. */
621 else if (invert_jump (bb_end_insn,
622 label_for_bb (e_fall->dest), 0))
623 {
624 e_fall->flags &= ~EDGE_FALLTHRU;
625 e_taken->flags |= EDGE_FALLTHRU;
626 continue;
627 }
628 }
629 else if (returnjump_p (bb_end_insn))
630 continue;
631 else
632 {
633 /* Otherwise we have some switch or computed jump. In the
634 99% case, there should not have been a fallthru edge. */
635 if (! e_fall)
636 continue;
637 #ifdef CASE_DROPS_THROUGH
638 /* Except for VAX. Since we didn't have predication for the
639 tablejump, the fallthru block should not have moved. */
640 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
641 continue;
642 bb_end_insn = skip_insns_after_block (bb);
643 #else
644 abort ();
645 #endif
646 }
647 }
648 else
649 {
650 /* No fallthru implies a noreturn function with EH edges, or
651 something similarly bizarre. In any case, we don't need to
652 do anything. */
653 if (! e_fall)
654 continue;
655
656 /* If the fallthru block is still next, nothing to do. */
657 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
658 || (RBI (bb)->index == n_basic_blocks - 1
659 && e_fall->dest == EXIT_BLOCK_PTR))
660 continue;
661
662 /* We need a new jump insn. If the block has only one outgoing
663 edge, then we can stuff the new jump insn in directly. */
664 if (bb->succ->succ_next == NULL)
665 {
666 e_fall->flags &= ~EDGE_FALLTHRU;
667
668 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
669 bb->end = jump_insn;
670 barrier_insn = emit_barrier_after (jump_insn);
671 RBI (bb)->eff_end = barrier_insn;
672 continue;
673 }
674 }
675
676 /* We got here if we need to add a new jump insn in a new block
677 across the edge e_fall. */
678
679 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
680 barrier_insn = emit_barrier_after (jump_insn);
681
682 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
683 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
684
685 nb = BASIC_BLOCK (n_basic_blocks - 1);
686 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
687 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
688 nb->local_set = 0;
689
690 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
691 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
692
693 nb->aux = xmalloc (sizeof (struct reorder_block_def));
694 RBI (nb)->eff_head = nb->head;
695 RBI (nb)->eff_end = barrier_insn;
696 RBI (nb)->scope = RBI (bb)->scope;
697 RBI (nb)->index = RBI (bb)->index + 1;
698 RBI (nb)->visited = 1;
699 RBI (nb)->next = RBI (bb)->next;
700 RBI (bb)->next = nb;
701
702 /* Link to new block. */
703 make_edge (NULL, nb, e_fall->dest, 0);
704 redirect_edge_succ (e_fall, nb);
705
706 /* Don't process this new block. */
707 bb = nb;
708
709 /* Fix subsequent reorder block indices to reflect new block. */
710 while ((nb = RBI (nb)->next) != NULL)
711 RBI (nb)->index += 1;
712 }
713
714 /* Put basic_block_info in the new order. */
715 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
716 {
717 bb->index = RBI (bb)->index;
718 BASIC_BLOCK (bb->index) = bb;
719 }
720 }
721
722
723 /* Perform sanity checks on the insn chain.
724 1. Check that next/prev pointers are consistent in both the forward and
725 reverse direction.
726 2. Count insns in chain, going both directions, and check if equal.
727 3. Check that get_last_insn () returns the actual end of chain. */
728
729 void
730 verify_insn_chain ()
731 {
732 rtx x,
733 prevx,
734 nextx;
735 int insn_cnt1,
736 insn_cnt2;
737
738 prevx = NULL;
739 insn_cnt1 = 1;
740 for (x = get_insns (); x; x = NEXT_INSN (x))
741 {
742 if (PREV_INSN (x) != prevx)
743 {
744 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
745 fprintf (stderr, "previous insn:\n");
746 debug_rtx (prevx);
747 fprintf (stderr, "current insn:\n");
748 debug_rtx (x);
749 abort ();
750 }
751 ++insn_cnt1;
752 prevx = x;
753 }
754
755 if (prevx != get_last_insn ())
756 {
757 fprintf (stderr, "last_insn corrupt.\n");
758 abort ();
759 }
760
761 nextx = NULL;
762 insn_cnt2 = 1;
763 for (x = get_last_insn (); x; x = PREV_INSN (x))
764 {
765 if (NEXT_INSN (x) != nextx)
766 {
767 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
768 fprintf (stderr, "current insn:\n");
769 debug_rtx (x);
770 fprintf (stderr, "next insn:\n");
771 debug_rtx (nextx);
772 abort ();
773 }
774 ++insn_cnt2;
775 nextx = x;
776 }
777
778 if (insn_cnt1 != insn_cnt2)
779 {
780 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
781 insn_cnt1, insn_cnt2);
782 abort ();
783 }
784 }
785
786 static rtx
787 get_next_bb_note (x)
788 rtx x;
789 {
790 while (x)
791 {
792 if (NOTE_INSN_BASIC_BLOCK_P (x))
793 return x;
794 x = NEXT_INSN (x);
795 }
796 return NULL;
797 }
798
799
800 static rtx
801 get_prev_bb_note (x)
802 rtx x;
803 {
804 while (x)
805 {
806 if (NOTE_INSN_BASIC_BLOCK_P (x))
807 return x;
808 x = PREV_INSN (x);
809 }
810 return NULL;
811 }
812
813
814 /* Determine and record the relationships between basic blocks and
815 scopes in scope tree S. */
816
817 static void
818 relate_bbs_with_scopes (s)
819 scope s;
820 {
821 scope p;
822 int i, bbi1, bbi2, bbs_spanned;
823 rtx bbnote;
824
825 for (p = s->inner; p; p = p->next)
826 relate_bbs_with_scopes (p);
827
828 bbi1 = bbi2 = -1;
829 bbs_spanned = 0;
830
831 /* If the begin and end notes are both inside the same basic block,
832 or if they are both outside of basic blocks, then we know immediately
833 how they are related. Otherwise, we need to poke around to make the
834 determination. */
835 if (s->bb_beg != s->bb_end)
836 {
837 if (s->bb_beg && s->bb_end)
838 {
839 /* Both notes are in different bbs. This implies that all the
840 basic blocks spanned by the pair of notes are contained in
841 this scope. */
842 bbi1 = s->bb_beg->index;
843 bbi2 = s->bb_end->index;
844 bbs_spanned = 1;
845 }
846 else if (! s->bb_beg)
847 {
848 /* First note is outside of a bb. If the scope spans more than
849 one basic block, then they all are contained within this
850 scope. Otherwise, this scope is contained within the basic
851 block. */
852 bbnote = get_next_bb_note (s->note_beg);
853 if (! bbnote)
854 abort ();
855 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
856 {
857 bbs_spanned = 0;
858 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
859 }
860 else
861 {
862 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
863 bbi2 = s->bb_end->index;
864 s->bb_end = NULL;
865 bbs_spanned = 1;
866 }
867 }
868 else /* ! s->bb_end */
869 {
870 /* Second note is outside of a bb. If the scope spans more than
871 one basic block, then they all are contained within this
872 scope. Otherwise, this scope is contained within the basic
873 block. */
874 bbnote = get_prev_bb_note (s->note_end);
875 if (! bbnote)
876 abort ();
877 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
878 {
879 bbs_spanned = 0;
880 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
881 }
882 else
883 {
884 bbi1 = s->bb_beg->index;
885 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
886 s->bb_beg = NULL;
887 bbs_spanned = 1;
888 }
889 }
890 }
891 else
892 {
893 if (s->bb_beg)
894 /* Both notes are in the same bb, which implies the block
895 contains this scope. */
896 bbs_spanned = 0;
897 else
898 {
899 rtx x1, x2;
900 /* Both notes are outside of any bbs. This implies that all the
901 basic blocks spanned by the pair of notes are contained in
902 this scope.
903 There is a degenerate case to consider. If the notes do not
904 span any basic blocks, then it is an empty scope that can
905 safely be deleted or ignored. Mark these with level = -1. */
906
907 x1 = get_next_bb_note (s->note_beg);
908 x2 = get_prev_bb_note (s->note_end);
909 if (! (x1 && x2))
910 {
911 s->level = -1;
912 bbs_spanned = 0;
913 }
914 else
915 {
916 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
917 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
918 bbs_spanned = 1;
919 }
920 }
921 }
922
923 /* If the scope spans one or more basic blocks, we record them. We
924 only record the bbs that are immediately contained within this
925 scope. Note that if a scope is contained within a bb, we can tell
926 by checking that bb_beg = bb_end and that they are non-null. */
927 if (bbs_spanned)
928 {
929 int j = 0;
930
931 s->num_bbs = 0;
932 for (i = bbi1; i <= bbi2; i++)
933 if (! RBI (BASIC_BLOCK (i))->scope)
934 s->num_bbs++;
935
936 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
937 for (i = bbi1; i <= bbi2; i++)
938 {
939 basic_block curr_bb = BASIC_BLOCK (i);
940 if (! RBI (curr_bb)->scope)
941 {
942 s->bbs[j++] = curr_bb;
943 RBI (curr_bb)->scope = s;
944 }
945 }
946 }
947 else
948 s->num_bbs = 0;
949 }
950
951
952 /* Allocate and initialize a new scope structure with scope level LEVEL,
953 and record the NOTE beginning the scope. */
954
955 static scope
956 make_new_scope (level, note)
957 int level;
958 rtx note;
959 {
960 scope new_scope = xcalloc (1, sizeof (struct scope_def));
961 new_scope->level = level;
962 new_scope->note_beg = note;
963 return new_scope;
964 }
965
966
967 /* Build a forest representing the scope structure of the function.
968 Return a pointer to a structure describing the forest. */
969
970 static void
971 build_scope_forest (forest)
972 scope_forest_info *forest;
973 {
974 rtx x;
975 int level, bbi, i;
976 basic_block curr_bb;
977 scope root, curr_scope = 0;
978
979 forest->num_trees = 0;
980 forest->trees = NULL;
981 level = -1;
982 root = NULL;
983 curr_bb = NULL;
984 bbi = 0;
985 for (x = get_insns (); x; x = NEXT_INSN (x))
986 {
987 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
988 curr_bb = BASIC_BLOCK (bbi);
989
990 if (GET_CODE (x) == NOTE)
991 {
992 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
993 {
994 if (root)
995 {
996 scope new_scope;
997 if (! curr_scope)
998 abort();
999 level++;
1000 new_scope = make_new_scope (level, x);
1001 new_scope->outer = curr_scope;
1002 new_scope->next = NULL;
1003 if (! curr_scope->inner)
1004 {
1005 curr_scope->inner = new_scope;
1006 curr_scope->inner_last = new_scope;
1007 }
1008 else
1009 {
1010 curr_scope->inner_last->next = new_scope;
1011 curr_scope->inner_last = new_scope;
1012 }
1013 curr_scope = curr_scope->inner_last;
1014 }
1015 else
1016 {
1017 int ntrees = forest->num_trees;
1018 level++;
1019 curr_scope = make_new_scope (level, x);
1020 root = curr_scope;
1021 forest->trees = xrealloc (forest->trees,
1022 sizeof (scope) * (ntrees + 1));
1023 forest->trees[forest->num_trees++] = root;
1024 }
1025 curr_scope->bb_beg = curr_bb;
1026 }
1027 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1028 {
1029 curr_scope->bb_end = curr_bb;
1030 curr_scope->note_end = x;
1031 level--;
1032 curr_scope = curr_scope->outer;
1033 if (level == -1)
1034 root = NULL;
1035 }
1036 } /* if note */
1037
1038 if (curr_bb && curr_bb->end == x)
1039 {
1040 curr_bb = NULL;
1041 bbi++;
1042 }
1043
1044 } /* for */
1045
1046 for (i = 0; i < forest->num_trees; i++)
1047 relate_bbs_with_scopes (forest->trees[i]);
1048 }
1049
1050
1051 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1052 the insn chain. */
1053
1054 static void
1055 remove_scope_notes ()
1056 {
1057 rtx x, next;
1058 basic_block currbb = NULL;
1059
1060 for (x = get_insns (); x; x = next)
1061 {
1062 next = NEXT_INSN (x);
1063 if (NOTE_INSN_BASIC_BLOCK_P (x))
1064 currbb = NOTE_BASIC_BLOCK (x);
1065
1066 if (GET_CODE (x) == NOTE
1067 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1068 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1069 {
1070 /* Check if the scope note happens to be the end of a bb. */
1071 if (currbb && x == currbb->end)
1072 currbb->end = PREV_INSN (x);
1073 if (currbb && x == currbb->head)
1074 abort ();
1075
1076 if (PREV_INSN (x))
1077 {
1078 NEXT_INSN (PREV_INSN (x)) = next;
1079 PREV_INSN (next) = PREV_INSN (x);
1080
1081 NEXT_INSN (x) = NULL;
1082 PREV_INSN (x) = NULL;
1083 }
1084 else
1085 abort ();
1086 }
1087 }
1088 }
1089
1090
1091 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1092
1093 static void
1094 insert_intra_1 (s, ip)
1095 scope s;
1096 rtx *ip;
1097 {
1098 scope p;
1099
1100 if (NOTE_BLOCK (s->note_beg))
1101 {
1102 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1103 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1104 }
1105
1106 for (p = s->inner; p; p = p->next)
1107 insert_intra_1 (p, ip);
1108
1109 if (NOTE_BLOCK (s->note_beg))
1110 {
1111 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1112 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1113 }
1114 }
1115
1116
1117 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1118 scopes that are contained within BB. */
1119
1120 static void
1121 insert_intra_bb_scope_notes (bb)
1122 basic_block bb;
1123 {
1124 scope s = RBI (bb)->scope;
1125 scope p;
1126 rtx ip;
1127
1128 if (! s)
1129 return;
1130
1131 ip = bb->head;
1132 if (GET_CODE (ip) == CODE_LABEL)
1133 ip = NEXT_INSN (ip);
1134
1135 for (p = s->inner; p; p = p->next)
1136 {
1137 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1138 insert_intra_1 (p, &ip);
1139 }
1140 }
1141
1142
1143 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1144 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1145 notes before BB2 such that the notes are correctly balanced. If BB1 or
1146 BB2 is NULL, we are inserting scope notes for the first and last basic
1147 blocks, respectively. */
1148
1149 static void
1150 insert_inter_bb_scope_notes (bb1, bb2)
1151 basic_block bb1;
1152 basic_block bb2;
1153 {
1154 rtx ip;
1155 scope com;
1156
1157 /* It is possible that a basic block is not contained in any scope.
1158 In that case, we either open or close a scope but not both. */
1159 if (bb1 && bb2)
1160 {
1161 scope s1 = RBI (bb1)->scope;
1162 scope s2 = RBI (bb2)->scope;
1163 if (! s1 && ! s2)
1164 return;
1165 if (! s1)
1166 bb1 = NULL;
1167 else if (! s2)
1168 bb2 = NULL;
1169 }
1170
1171 /* Find common ancestor scope. */
1172 if (bb1 && bb2)
1173 {
1174 scope s1 = RBI (bb1)->scope;
1175 scope s2 = RBI (bb2)->scope;
1176 while (s1 != s2)
1177 {
1178 if (! (s1 && s2))
1179 abort ();
1180 if (s1->level > s2->level)
1181 s1 = s1->outer;
1182 else if (s2->level > s1->level)
1183 s2 = s2->outer;
1184 else
1185 {
1186 s1 = s1->outer;
1187 s2 = s2->outer;
1188 }
1189 }
1190 com = s1;
1191 }
1192 else
1193 com = NULL;
1194
1195 /* Close scopes. */
1196 if (bb1)
1197 {
1198 scope s = RBI (bb1)->scope;
1199 ip = RBI (bb1)->eff_end;
1200 while (s != com)
1201 {
1202 if (NOTE_BLOCK (s->note_beg))
1203 {
1204 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1205 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1206 }
1207 s = s->outer;
1208 }
1209 }
1210
1211 /* Open scopes. */
1212 if (bb2)
1213 {
1214 scope s = RBI (bb2)->scope;
1215 ip = bb2->head;
1216 while (s != com)
1217 {
1218 if (NOTE_BLOCK (s->note_beg))
1219 {
1220 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1221 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1222 }
1223 s = s->outer;
1224 }
1225 }
1226 }
1227
1228
1229 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1230 on the scope forest and the newly reordered basic blocks. */
1231
1232 static void
1233 rebuild_scope_notes (forest)
1234 scope_forest_info *forest;
1235 {
1236 int i;
1237
1238 if (forest->num_trees == 0)
1239 return;
1240
1241 /* Start by opening the scopes before the first basic block. */
1242 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1243
1244 /* Then, open and close scopes as needed between blocks. */
1245 for (i = 0; i < n_basic_blocks - 1; i++)
1246 {
1247 basic_block bb1 = BASIC_BLOCK (i);
1248 basic_block bb2 = BASIC_BLOCK (i + 1);
1249 if (RBI (bb1)->scope != RBI (bb2)->scope)
1250 insert_inter_bb_scope_notes (bb1, bb2);
1251 insert_intra_bb_scope_notes (bb1);
1252 }
1253
1254 /* Finally, close the scopes after the last basic block. */
1255 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1256 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1257 }
1258
1259
1260 /* Free the storage associated with the scope tree at S. */
1261
1262 static void
1263 free_scope_forest_1 (s)
1264 scope s;
1265 {
1266 scope p, next;
1267
1268 for (p = s->inner; p; p = next)
1269 {
1270 next = p->next;
1271 free_scope_forest_1 (p);
1272 }
1273
1274 if (s->bbs)
1275 free (s->bbs);
1276 free (s);
1277 }
1278
1279
1280 /* Free the storage associated with the scope forest. */
1281
1282 static void
1283 free_scope_forest (forest)
1284 scope_forest_info *forest;
1285 {
1286 int i;
1287 for (i = 0; i < forest->num_trees; i++)
1288 free_scope_forest_1 (forest->trees[i]);
1289 }
1290
1291
1292 /* Visualize the scope forest. */
1293
1294 void
1295 dump_scope_forest (forest)
1296 scope_forest_info *forest;
1297 {
1298 if (forest->num_trees == 0)
1299 fprintf (stderr, "\n< Empty scope forest >\n");
1300 else
1301 {
1302 int i;
1303 fprintf (stderr, "\n< Scope forest >\n");
1304 for (i = 0; i < forest->num_trees; i++)
1305 dump_scope_forest_1 (forest->trees[i], 0);
1306 }
1307 }
1308
1309
1310 /* Recursive portion of dump_scope_forest. */
1311
1312 static void
1313 dump_scope_forest_1 (s, indent)
1314 scope s;
1315 int indent;
1316 {
1317 scope p;
1318 int i;
1319
1320 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1321 && RBI (s->bb_beg)->scope
1322 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1323 {
1324 fprintf (stderr, "%*s", indent, "");
1325 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1326 }
1327
1328 fprintf (stderr, "%*s", indent, "");
1329 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1330 (PTR) NOTE_BLOCK (s->note_beg));
1331
1332 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1333 for (i = 0; i < s->num_bbs; i++)
1334 fprintf (stderr, " %d", s->bbs[i]->index);
1335 fprintf (stderr, "\n");
1336
1337 for (p = s->inner; p; p = p->next)
1338 dump_scope_forest_1 (p, indent + 2);
1339
1340 fprintf (stderr, "%*s", indent, "");
1341 fprintf (stderr, "}\n");
1342 }
1343
1344
1345 /* Reorder basic blocks. The main entry point to this file. */
1346
1347 void
1348 reorder_basic_blocks ()
1349 {
1350 scope_forest_info forest;
1351 int i;
1352
1353 if (n_basic_blocks <= 1)
1354 return;
1355
1356 for (i = 0; i < n_basic_blocks; i++)
1357 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1358
1359 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1360
1361 build_scope_forest (&forest);
1362 remove_scope_notes ();
1363
1364 record_effective_endpoints ();
1365 make_reorder_chain ();
1366 fixup_reorder_chain ();
1367
1368 #ifdef ENABLE_CHECKING
1369 verify_insn_chain ();
1370 #endif
1371
1372 rebuild_scope_notes (&forest);
1373 free_scope_forest (&forest);
1374 reorder_blocks ();
1375
1376 for (i = 0; i < n_basic_blocks; i++)
1377 free (BASIC_BLOCK (i)->aux);
1378
1379 free (EXIT_BLOCK_PTR->aux);
1380
1381 #ifdef ENABLE_CHECKING
1382 verify_flow_info ();
1383 #endif
1384 }