cfglayout.c (choose_inner_scope): Make it static.
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39
40 /* The contents of the current function definition are allocated
41 in this obstack, and all are freed at the end of the function. */
42 extern struct obstack flow_obstack;
43
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer, cfg_layout_function_header;
46
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
51
52 static void set_block_levels (tree, int);
53 static void change_scope (rtx, tree, tree);
54
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 static tree insn_scope (rtx);
58 static void update_unlikely_executed_notes (basic_block);
59 \f
60 rtx
61 unlink_insn_chain (rtx first, rtx last)
62 {
63 rtx prevfirst = PREV_INSN (first);
64 rtx nextlast = NEXT_INSN (last);
65
66 PREV_INSN (first) = NULL;
67 NEXT_INSN (last) = NULL;
68 if (prevfirst)
69 NEXT_INSN (prevfirst) = nextlast;
70 if (nextlast)
71 PREV_INSN (nextlast) = prevfirst;
72 else
73 set_last_insn (prevfirst);
74 if (!prevfirst)
75 set_first_insn (nextlast);
76 return first;
77 }
78 \f
79 /* Skip over inter-block insns occurring after BB which are typically
80 associated with BB (e.g., barriers). If there are any such insns,
81 we return the last one. Otherwise, we return the end of BB. */
82
83 static rtx
84 skip_insns_after_block (basic_block bb)
85 {
86 rtx insn, last_insn, next_head, prev;
87
88 next_head = NULL_RTX;
89 if (bb->next_bb != EXIT_BLOCK_PTR)
90 next_head = BB_HEAD (bb->next_bb);
91
92 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
93 {
94 if (insn == next_head)
95 break;
96
97 switch (GET_CODE (insn))
98 {
99 case BARRIER:
100 last_insn = insn;
101 continue;
102
103 case NOTE:
104 switch (NOTE_LINE_NUMBER (insn))
105 {
106 case NOTE_INSN_LOOP_END:
107 case NOTE_INSN_BLOCK_END:
108 last_insn = insn;
109 continue;
110 case NOTE_INSN_DELETED:
111 case NOTE_INSN_DELETED_LABEL:
112 continue;
113
114 default:
115 continue;
116 break;
117 }
118 break;
119
120 case CODE_LABEL:
121 if (NEXT_INSN (insn)
122 && JUMP_P (NEXT_INSN (insn))
123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
125 {
126 insn = NEXT_INSN (insn);
127 last_insn = insn;
128 continue;
129 }
130 break;
131
132 default:
133 break;
134 }
135
136 break;
137 }
138
139 /* It is possible to hit contradictory sequence. For instance:
140
141 jump_insn
142 NOTE_INSN_LOOP_BEG
143 barrier
144
145 Where barrier belongs to jump_insn, but the note does not. This can be
146 created by removing the basic block originally following
147 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
148
149 for (insn = last_insn; insn != BB_END (bb); insn = prev)
150 {
151 prev = PREV_INSN (insn);
152 if (NOTE_P (insn))
153 switch (NOTE_LINE_NUMBER (insn))
154 {
155 case NOTE_INSN_LOOP_END:
156 case NOTE_INSN_BLOCK_END:
157 case NOTE_INSN_DELETED:
158 case NOTE_INSN_DELETED_LABEL:
159 continue;
160 default:
161 reorder_insns (insn, insn, last_insn);
162 }
163 }
164
165 return last_insn;
166 }
167
168 /* Locate or create a label for a given basic block. */
169
170 static rtx
171 label_for_bb (basic_block bb)
172 {
173 rtx label = BB_HEAD (bb);
174
175 if (!LABEL_P (label))
176 {
177 if (dump_file)
178 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
179
180 label = block_label (bb);
181 }
182
183 return label;
184 }
185
186 /* Locate the effective beginning and end of the insn chain for each
187 block, as defined by skip_insns_after_block above. */
188
189 static void
190 record_effective_endpoints (void)
191 {
192 rtx next_insn;
193 basic_block bb;
194 rtx insn;
195
196 for (insn = get_insns ();
197 insn
198 && NOTE_P (insn)
199 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
200 insn = NEXT_INSN (insn))
201 continue;
202 /* No basic blocks at all? */
203 gcc_assert (insn);
204
205 if (PREV_INSN (insn))
206 cfg_layout_function_header =
207 unlink_insn_chain (get_insns (), PREV_INSN (insn));
208 else
209 cfg_layout_function_header = NULL_RTX;
210
211 next_insn = get_insns ();
212 FOR_EACH_BB (bb)
213 {
214 rtx end;
215
216 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
217 bb->rbi->header = unlink_insn_chain (next_insn,
218 PREV_INSN (BB_HEAD (bb)));
219 end = skip_insns_after_block (bb);
220 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
221 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
222 next_insn = NEXT_INSN (BB_END (bb));
223 }
224
225 cfg_layout_function_footer = next_insn;
226 if (cfg_layout_function_footer)
227 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
228 }
229 \f
230 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
231 numbers and files. In order to be GGC friendly we need to use separate
232 varrays. This also slightly improve the memory locality in binary search.
233 The _locs array contains locators where the given property change. The
234 block_locators_blocks contains the scope block that is used for all insn
235 locator greater than corresponding block_locators_locs value and smaller
236 than the following one. Similarly for the other properties. */
237 static GTY(()) varray_type block_locators_locs;
238 static GTY(()) varray_type block_locators_blocks;
239 static GTY(()) varray_type line_locators_locs;
240 static GTY(()) varray_type line_locators_lines;
241 static GTY(()) varray_type file_locators_locs;
242 static GTY(()) varray_type file_locators_files;
243 int prologue_locator;
244 int epilogue_locator;
245
246 /* During the RTL expansion the lexical blocks and line numbers are
247 represented via INSN_NOTEs. Replace them by representation using
248 INSN_LOCATORs. */
249
250 void
251 insn_locators_initialize (void)
252 {
253 tree block = NULL;
254 tree last_block = NULL;
255 rtx insn, next;
256 int loc = 0;
257 int line_number = 0, last_line_number = 0;
258 const char *file_name = NULL, *last_file_name = NULL;
259
260 prologue_locator = epilogue_locator = 0;
261
262 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
263 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
264 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
265 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
266 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
267 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
268
269 for (insn = get_insns (); insn; insn = next)
270 {
271 int active = 0;
272
273 next = NEXT_INSN (insn);
274
275 if (NOTE_P (insn))
276 {
277 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
278 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
279 if (NOTE_LINE_NUMBER (insn) > 0)
280 {
281 expanded_location xloc;
282 NOTE_EXPANDED_LOCATION (xloc, insn);
283 line_number = xloc.line;
284 file_name = xloc.file;
285 }
286 }
287 else
288 active = (active_insn_p (insn)
289 && GET_CODE (PATTERN (insn)) != ADDR_VEC
290 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
291
292 check_block_change (insn, &block);
293
294 if (active
295 || !next
296 || (!prologue_locator && file_name))
297 {
298 if (last_block != block)
299 {
300 loc++;
301 VARRAY_PUSH_INT (block_locators_locs, loc);
302 VARRAY_PUSH_TREE (block_locators_blocks, block);
303 last_block = block;
304 }
305 if (last_line_number != line_number)
306 {
307 loc++;
308 VARRAY_PUSH_INT (line_locators_locs, loc);
309 VARRAY_PUSH_INT (line_locators_lines, line_number);
310 last_line_number = line_number;
311 }
312 if (last_file_name != file_name)
313 {
314 loc++;
315 VARRAY_PUSH_INT (file_locators_locs, loc);
316 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
317 last_file_name = file_name;
318 }
319 if (!prologue_locator && file_name)
320 prologue_locator = loc;
321 if (!next)
322 epilogue_locator = loc;
323 if (active)
324 INSN_LOCATOR (insn) = loc;
325 }
326 }
327
328 /* Tag the blocks with a depth number so that change_scope can find
329 the common parent easily. */
330 set_block_levels (DECL_INITIAL (cfun->decl), 0);
331
332 free_block_changes ();
333 }
334
335 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
336 found in the block tree. */
337
338 static void
339 set_block_levels (tree block, int level)
340 {
341 while (block)
342 {
343 BLOCK_NUMBER (block) = level;
344 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
345 block = BLOCK_CHAIN (block);
346 }
347 }
348 \f
349 /* Return sope resulting from combination of S1 and S2. */
350 static tree
351 choose_inner_scope (tree s1, tree s2)
352 {
353 if (!s1)
354 return s2;
355 if (!s2)
356 return s1;
357 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
358 return s1;
359 return s2;
360 }
361 \f
362 /* Emit lexical block notes needed to change scope from S1 to S2. */
363
364 static void
365 change_scope (rtx orig_insn, tree s1, tree s2)
366 {
367 rtx insn = orig_insn;
368 tree com = NULL_TREE;
369 tree ts1 = s1, ts2 = s2;
370 tree s;
371
372 while (ts1 != ts2)
373 {
374 gcc_assert (ts1 && ts2);
375 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
376 ts1 = BLOCK_SUPERCONTEXT (ts1);
377 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
378 ts2 = BLOCK_SUPERCONTEXT (ts2);
379 else
380 {
381 ts1 = BLOCK_SUPERCONTEXT (ts1);
382 ts2 = BLOCK_SUPERCONTEXT (ts2);
383 }
384 }
385 com = ts1;
386
387 /* Close scopes. */
388 s = s1;
389 while (s != com)
390 {
391 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
392 NOTE_BLOCK (note) = s;
393 s = BLOCK_SUPERCONTEXT (s);
394 }
395
396 /* Open scopes. */
397 s = s2;
398 while (s != com)
399 {
400 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
401 NOTE_BLOCK (insn) = s;
402 s = BLOCK_SUPERCONTEXT (s);
403 }
404 }
405
406 /* Return lexical scope block insn belong to. */
407 static tree
408 insn_scope (rtx insn)
409 {
410 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
411 int min = 0;
412 int loc = INSN_LOCATOR (insn);
413
414 /* When block_locators_locs was initialized, the pro- and epilogue
415 insns didn't exist yet and can therefore not be found this way.
416 But we know that they belong to the outer most block of the
417 current function.
418 Without this test, the prologue would be put inside the block of
419 the first valid instruction in the function and when that first
420 insn is part of an inlined function then the low_pc of that
421 inlined function is messed up. Likewise for the epilogue and
422 the last valid instruction. */
423 if (loc == prologue_locator || loc == epilogue_locator)
424 return DECL_INITIAL (cfun->decl);
425
426 if (!max || !loc)
427 return NULL;
428 while (1)
429 {
430 int pos = (min + max) / 2;
431 int tmp = VARRAY_INT (block_locators_locs, pos);
432
433 if (tmp <= loc && min != pos)
434 min = pos;
435 else if (tmp > loc && max != pos)
436 max = pos;
437 else
438 {
439 min = pos;
440 break;
441 }
442 }
443 return VARRAY_TREE (block_locators_blocks, min);
444 }
445
446 /* Return line number of the statement specified by the locator. */
447 int
448 locator_line (int loc)
449 {
450 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
451 int min = 0;
452
453 if (!max || !loc)
454 return 0;
455 while (1)
456 {
457 int pos = (min + max) / 2;
458 int tmp = VARRAY_INT (line_locators_locs, pos);
459
460 if (tmp <= loc && min != pos)
461 min = pos;
462 else if (tmp > loc && max != pos)
463 max = pos;
464 else
465 {
466 min = pos;
467 break;
468 }
469 }
470 return VARRAY_INT (line_locators_lines, min);
471 }
472
473 /* Return line number of the statement that produced this insn. */
474 int
475 insn_line (rtx insn)
476 {
477 return locator_line (INSN_LOCATOR (insn));
478 }
479
480 /* Return source file of the statement specified by LOC. */
481 const char *
482 locator_file (int loc)
483 {
484 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
485 int min = 0;
486
487 if (!max || !loc)
488 return NULL;
489 while (1)
490 {
491 int pos = (min + max) / 2;
492 int tmp = VARRAY_INT (file_locators_locs, pos);
493
494 if (tmp <= loc && min != pos)
495 min = pos;
496 else if (tmp > loc && max != pos)
497 max = pos;
498 else
499 {
500 min = pos;
501 break;
502 }
503 }
504 return VARRAY_CHAR_PTR (file_locators_files, min);
505 }
506
507 /* Return source file of the statement that produced this insn. */
508 const char *
509 insn_file (rtx insn)
510 {
511 return locator_file (INSN_LOCATOR (insn));
512 }
513
514 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
515 on the scope tree and the newly reordered instructions. */
516
517 void
518 reemit_insn_block_notes (void)
519 {
520 tree cur_block = DECL_INITIAL (cfun->decl);
521 rtx insn, note;
522
523 insn = get_insns ();
524 if (!active_insn_p (insn))
525 insn = next_active_insn (insn);
526 for (; insn; insn = next_active_insn (insn))
527 {
528 tree this_block;
529
530 this_block = insn_scope (insn);
531 /* For sequences compute scope resulting from merging all scopes
532 of instructions nested inside. */
533 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
534 {
535 int i;
536 rtx body = PATTERN (insn);
537
538 this_block = NULL;
539 for (i = 0; i < XVECLEN (body, 0); i++)
540 this_block = choose_inner_scope (this_block,
541 insn_scope (XVECEXP (body, 0, i)));
542 }
543 if (! this_block)
544 continue;
545
546 if (this_block != cur_block)
547 {
548 change_scope (insn, cur_block, this_block);
549 cur_block = this_block;
550 }
551 }
552
553 /* change_scope emits before the insn, not after. */
554 note = emit_note (NOTE_INSN_DELETED);
555 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
556 delete_insn (note);
557
558 reorder_blocks ();
559 }
560 \f
561 /* Given a reorder chain, rearrange the code to match. */
562
563 static void
564 fixup_reorder_chain (void)
565 {
566 basic_block bb, prev_bb;
567 int index;
568 rtx insn = NULL;
569
570 if (cfg_layout_function_header)
571 {
572 set_first_insn (cfg_layout_function_header);
573 insn = cfg_layout_function_header;
574 while (NEXT_INSN (insn))
575 insn = NEXT_INSN (insn);
576 }
577
578 /* First do the bulk reordering -- rechain the blocks without regard to
579 the needed changes to jumps and labels. */
580
581 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
582 bb != 0;
583 bb = bb->rbi->next, index++)
584 {
585 if (bb->rbi->header)
586 {
587 if (insn)
588 NEXT_INSN (insn) = bb->rbi->header;
589 else
590 set_first_insn (bb->rbi->header);
591 PREV_INSN (bb->rbi->header) = insn;
592 insn = bb->rbi->header;
593 while (NEXT_INSN (insn))
594 insn = NEXT_INSN (insn);
595 }
596 if (insn)
597 NEXT_INSN (insn) = BB_HEAD (bb);
598 else
599 set_first_insn (BB_HEAD (bb));
600 PREV_INSN (BB_HEAD (bb)) = insn;
601 insn = BB_END (bb);
602 if (bb->rbi->footer)
603 {
604 NEXT_INSN (insn) = bb->rbi->footer;
605 PREV_INSN (bb->rbi->footer) = insn;
606 while (NEXT_INSN (insn))
607 insn = NEXT_INSN (insn);
608 }
609 }
610
611 gcc_assert (index == n_basic_blocks);
612
613 NEXT_INSN (insn) = cfg_layout_function_footer;
614 if (cfg_layout_function_footer)
615 PREV_INSN (cfg_layout_function_footer) = insn;
616
617 while (NEXT_INSN (insn))
618 insn = NEXT_INSN (insn);
619
620 set_last_insn (insn);
621 #ifdef ENABLE_CHECKING
622 verify_insn_chain ();
623 #endif
624 delete_dead_jumptables ();
625
626 /* Now add jumps and labels as needed to match the blocks new
627 outgoing edges. */
628
629 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
630 {
631 edge e_fall, e_taken, e;
632 rtx bb_end_insn;
633 basic_block nb;
634 basic_block old_bb;
635 edge_iterator ei;
636
637 if (EDGE_COUNT (bb->succs) == 0)
638 continue;
639
640 /* Find the old fallthru edge, and another non-EH edge for
641 a taken jump. */
642 e_taken = e_fall = NULL;
643
644 FOR_EACH_EDGE (e, ei, bb->succs)
645 if (e->flags & EDGE_FALLTHRU)
646 e_fall = e;
647 else if (! (e->flags & EDGE_EH))
648 e_taken = e;
649
650 bb_end_insn = BB_END (bb);
651 if (JUMP_P (bb_end_insn))
652 {
653 if (any_condjump_p (bb_end_insn))
654 {
655 /* If the old fallthru is still next, nothing to do. */
656 if (bb->rbi->next == e_fall->dest
657 || e_fall->dest == EXIT_BLOCK_PTR)
658 continue;
659
660 /* The degenerated case of conditional jump jumping to the next
661 instruction can happen on target having jumps with side
662 effects.
663
664 Create temporarily the duplicated edge representing branch.
665 It will get unidentified by force_nonfallthru_and_redirect
666 that would otherwise get confused by fallthru edge not pointing
667 to the next basic block. */
668 if (!e_taken)
669 {
670 rtx note;
671 edge e_fake;
672 bool redirected;
673
674 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
675
676 redirected = redirect_jump (BB_END (bb),
677 block_label (bb), 0);
678 gcc_assert (redirected);
679
680 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
681 if (note)
682 {
683 int prob = INTVAL (XEXP (note, 0));
684
685 e_fake->probability = prob;
686 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
687 e_fall->probability -= e_fall->probability;
688 e_fall->count -= e_fake->count;
689 if (e_fall->probability < 0)
690 e_fall->probability = 0;
691 if (e_fall->count < 0)
692 e_fall->count = 0;
693 }
694 }
695 /* There is one special case: if *neither* block is next,
696 such as happens at the very end of a function, then we'll
697 need to add a new unconditional jump. Choose the taken
698 edge based on known or assumed probability. */
699 else if (bb->rbi->next != e_taken->dest)
700 {
701 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
702
703 if (note
704 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
705 && invert_jump (bb_end_insn,
706 (e_fall->dest == EXIT_BLOCK_PTR
707 ? NULL_RTX
708 : label_for_bb (e_fall->dest)), 0))
709 {
710 e_fall->flags &= ~EDGE_FALLTHRU;
711 #ifdef ENABLE_CHECKING
712 gcc_assert (could_fall_through
713 (e_taken->src, e_taken->dest));
714 #endif
715 e_taken->flags |= EDGE_FALLTHRU;
716 update_br_prob_note (bb);
717 e = e_fall, e_fall = e_taken, e_taken = e;
718 }
719 }
720
721 /* If the "jumping" edge is a crossing edge, and the fall
722 through edge is non-crossing, leave things as they are. */
723 else if ((e_taken->flags & EDGE_CROSSING)
724 && !(e_fall->flags & EDGE_CROSSING))
725 continue;
726
727 /* Otherwise we can try to invert the jump. This will
728 basically never fail, however, keep up the pretense. */
729 else if (invert_jump (bb_end_insn,
730 (e_fall->dest == EXIT_BLOCK_PTR
731 ? NULL_RTX
732 : label_for_bb (e_fall->dest)), 0))
733 {
734 e_fall->flags &= ~EDGE_FALLTHRU;
735 #ifdef ENABLE_CHECKING
736 gcc_assert (could_fall_through
737 (e_taken->src, e_taken->dest));
738 #endif
739 e_taken->flags |= EDGE_FALLTHRU;
740 update_br_prob_note (bb);
741 continue;
742 }
743 }
744 else
745 {
746 #ifndef CASE_DROPS_THROUGH
747 /* Otherwise we have some return, switch or computed
748 jump. In the 99% case, there should not have been a
749 fallthru edge. */
750 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
751 continue;
752 #else
753 if (returnjump_p (bb_end_insn) || !e_fall)
754 continue;
755 /* Except for VAX. Since we didn't have predication for the
756 tablejump, the fallthru block should not have moved. */
757 if (bb->rbi->next == e_fall->dest)
758 continue;
759 bb_end_insn = skip_insns_after_block (bb);
760 #endif
761 }
762 }
763 else
764 {
765 /* No fallthru implies a noreturn function with EH edges, or
766 something similarly bizarre. In any case, we don't need to
767 do anything. */
768 if (! e_fall)
769 continue;
770
771 /* If the fallthru block is still next, nothing to do. */
772 if (bb->rbi->next == e_fall->dest)
773 continue;
774
775 /* A fallthru to exit block. */
776 if (e_fall->dest == EXIT_BLOCK_PTR)
777 continue;
778 }
779
780 /* We got here if we need to add a new jump insn. */
781 nb = force_nonfallthru (e_fall);
782 if (nb)
783 {
784 initialize_bb_rbi (nb);
785 nb->rbi->visited = 1;
786 nb->rbi->next = bb->rbi->next;
787 bb->rbi->next = nb;
788 /* Don't process this new block. */
789 old_bb = bb;
790 bb = nb;
791
792 /* Make sure new bb is tagged for correct section (same as
793 fall-thru source, since you cannot fall-throu across
794 section boundaries). */
795 BB_COPY_PARTITION (e_fall->src, EDGE_PRED (bb, 0)->src);
796 if (flag_reorder_blocks_and_partition
797 && targetm.have_named_sections)
798 {
799 if (BB_PARTITION (EDGE_PRED (bb, 0)->src) == BB_COLD_PARTITION)
800 {
801 rtx new_note;
802 rtx note = BB_HEAD (e_fall->src);
803
804 while (!INSN_P (note)
805 && note != BB_END (e_fall->src))
806 note = NEXT_INSN (note);
807
808 new_note = emit_note_before
809 (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
810 note);
811 NOTE_BASIC_BLOCK (new_note) = bb;
812 }
813 if (JUMP_P (BB_END (bb))
814 && !any_condjump_p (BB_END (bb))
815 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
816 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
817 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
818 }
819 }
820 }
821
822 /* Put basic_block_info in the new order. */
823
824 if (dump_file)
825 {
826 fprintf (dump_file, "Reordered sequence:\n");
827 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
828 bb;
829 bb = bb->rbi->next, index++)
830 {
831 fprintf (dump_file, " %i ", index);
832 if (bb->rbi->original)
833 fprintf (dump_file, "duplicate of %i ",
834 bb->rbi->original->index);
835 else if (forwarder_block_p (bb)
836 && !LABEL_P (BB_HEAD (bb)))
837 fprintf (dump_file, "compensation ");
838 else
839 fprintf (dump_file, "bb %i ", bb->index);
840 fprintf (dump_file, " [%i]\n", bb->frequency);
841 }
842 }
843
844 prev_bb = ENTRY_BLOCK_PTR;
845 bb = ENTRY_BLOCK_PTR->next_bb;
846 index = 0;
847
848 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
849 {
850 bb->index = index;
851 BASIC_BLOCK (index) = bb;
852
853 update_unlikely_executed_notes (bb);
854
855 bb->prev_bb = prev_bb;
856 prev_bb->next_bb = bb;
857 }
858 prev_bb->next_bb = EXIT_BLOCK_PTR;
859 EXIT_BLOCK_PTR->prev_bb = prev_bb;
860
861 /* Annoying special case - jump around dead jumptables left in the code. */
862 FOR_EACH_BB (bb)
863 {
864 edge e;
865 edge_iterator ei;
866
867 FOR_EACH_EDGE (e, ei, bb->succs)
868 if (e->flags & EDGE_FALLTHRU)
869 break;
870
871 if (e && !can_fallthru (e->src, e->dest))
872 force_nonfallthru (e);
873 }
874 }
875 \f
876 /* Update the basic block number information in any
877 NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
878
879 static void
880 update_unlikely_executed_notes (basic_block bb)
881 {
882 rtx cur_insn;
883
884 for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
885 cur_insn = NEXT_INSN (cur_insn))
886 if (NOTE_P (cur_insn)
887 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
888 NOTE_BASIC_BLOCK (cur_insn) = bb;
889 }
890 \f
891 /* Perform sanity checks on the insn chain.
892 1. Check that next/prev pointers are consistent in both the forward and
893 reverse direction.
894 2. Count insns in chain, going both directions, and check if equal.
895 3. Check that get_last_insn () returns the actual end of chain. */
896
897 void
898 verify_insn_chain (void)
899 {
900 rtx x, prevx, nextx;
901 int insn_cnt1, insn_cnt2;
902
903 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
904 x != 0;
905 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
906 gcc_assert (PREV_INSN (x) == prevx);
907
908 gcc_assert (prevx == get_last_insn ());
909
910 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
911 x != 0;
912 nextx = x, insn_cnt2++, x = PREV_INSN (x))
913 gcc_assert (NEXT_INSN (x) == nextx);
914
915 gcc_assert (insn_cnt1 == insn_cnt2);
916 }
917 \f
918 /* If we have assembler epilogues, the block falling through to exit must
919 be the last one in the reordered chain when we reach final. Ensure
920 that this condition is met. */
921 static void
922 fixup_fallthru_exit_predecessor (void)
923 {
924 edge e;
925 edge_iterator ei;
926 basic_block bb = NULL;
927
928 /* This transformation is not valid before reload, because we might
929 separate a call from the instruction that copies the return
930 value. */
931 gcc_assert (reload_completed);
932
933 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
934 if (e->flags & EDGE_FALLTHRU)
935 bb = e->src;
936
937 if (bb && bb->rbi->next)
938 {
939 basic_block c = ENTRY_BLOCK_PTR->next_bb;
940
941 /* If the very first block is the one with the fall-through exit
942 edge, we have to split that block. */
943 if (c == bb)
944 {
945 bb = split_block (bb, NULL)->dest;
946 initialize_bb_rbi (bb);
947 bb->rbi->next = c->rbi->next;
948 c->rbi->next = bb;
949 bb->rbi->footer = c->rbi->footer;
950 c->rbi->footer = NULL;
951 }
952
953 while (c->rbi->next != bb)
954 c = c->rbi->next;
955
956 c->rbi->next = bb->rbi->next;
957 while (c->rbi->next)
958 c = c->rbi->next;
959
960 c->rbi->next = bb;
961 bb->rbi->next = NULL;
962 }
963 }
964 \f
965 /* Return true in case it is possible to duplicate the basic block BB. */
966
967 /* We do not want to declare the function in a header file, since it should
968 only be used through the cfghooks interface, and we do not want to move
969 it to cfgrtl.c since it would require also moving quite a lot of related
970 code. */
971 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
972
973 bool
974 cfg_layout_can_duplicate_bb_p (basic_block bb)
975 {
976 /* Do not attempt to duplicate tablejumps, as we need to unshare
977 the dispatch table. This is difficult to do, as the instructions
978 computing jump destination may be hoisted outside the basic block. */
979 if (tablejump_p (BB_END (bb), NULL, NULL))
980 return false;
981
982 /* Do not duplicate blocks containing insns that can't be copied. */
983 if (targetm.cannot_copy_insn_p)
984 {
985 rtx insn = BB_HEAD (bb);
986 while (1)
987 {
988 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
989 return false;
990 if (insn == BB_END (bb))
991 break;
992 insn = NEXT_INSN (insn);
993 }
994 }
995
996 return true;
997 }
998
999 rtx
1000 duplicate_insn_chain (rtx from, rtx to)
1001 {
1002 rtx insn, last;
1003
1004 /* Avoid updating of boundaries of previous basic block. The
1005 note will get removed from insn stream in fixup. */
1006 last = emit_note (NOTE_INSN_DELETED);
1007
1008 /* Create copy at the end of INSN chain. The chain will
1009 be reordered later. */
1010 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1011 {
1012 switch (GET_CODE (insn))
1013 {
1014 case INSN:
1015 case CALL_INSN:
1016 case JUMP_INSN:
1017 /* Avoid copying of dispatch tables. We never duplicate
1018 tablejumps, so this can hit only in case the table got
1019 moved far from original jump. */
1020 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1021 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1022 break;
1023 emit_copy_of_insn_after (insn, get_last_insn ());
1024 break;
1025
1026 case CODE_LABEL:
1027 break;
1028
1029 case BARRIER:
1030 emit_barrier ();
1031 break;
1032
1033 case NOTE:
1034 switch (NOTE_LINE_NUMBER (insn))
1035 {
1036 /* In case prologue is empty and function contain label
1037 in first BB, we may want to copy the block. */
1038 case NOTE_INSN_PROLOGUE_END:
1039
1040 case NOTE_INSN_LOOP_BEG:
1041 case NOTE_INSN_LOOP_END:
1042 /* Strip down the loop notes - we don't really want to keep
1043 them consistent in loop copies. */
1044 case NOTE_INSN_DELETED:
1045 case NOTE_INSN_DELETED_LABEL:
1046 /* No problem to strip these. */
1047 case NOTE_INSN_EPILOGUE_BEG:
1048 case NOTE_INSN_FUNCTION_END:
1049 /* Debug code expect these notes to exist just once.
1050 Keep them in the master copy.
1051 ??? It probably makes more sense to duplicate them for each
1052 epilogue copy. */
1053 case NOTE_INSN_FUNCTION_BEG:
1054 /* There is always just single entry to function. */
1055 case NOTE_INSN_BASIC_BLOCK:
1056 break;
1057
1058 case NOTE_INSN_REPEATED_LINE_NUMBER:
1059 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1060 emit_note_copy (insn);
1061 break;
1062
1063 default:
1064 /* All other notes should have already been eliminated.
1065 */
1066 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1067
1068 /* It is possible that no_line_number is set and the note
1069 won't be emitted. */
1070 emit_note_copy (insn);
1071 }
1072 break;
1073 default:
1074 gcc_unreachable ();
1075 }
1076 }
1077 insn = NEXT_INSN (last);
1078 delete_insn (last);
1079 return insn;
1080 }
1081 /* Create a duplicate of the basic block BB. */
1082
1083 /* We do not want to declare the function in a header file, since it should
1084 only be used through the cfghooks interface, and we do not want to move
1085 it to cfgrtl.c since it would require also moving quite a lot of related
1086 code. */
1087 extern basic_block cfg_layout_duplicate_bb (basic_block);
1088
1089 basic_block
1090 cfg_layout_duplicate_bb (basic_block bb)
1091 {
1092 rtx insn;
1093 basic_block new_bb;
1094
1095 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1096 new_bb = create_basic_block (insn,
1097 insn ? get_last_insn () : NULL,
1098 EXIT_BLOCK_PTR->prev_bb);
1099
1100 BB_COPY_PARTITION (new_bb, bb);
1101 if (bb->rbi->header)
1102 {
1103 insn = bb->rbi->header;
1104 while (NEXT_INSN (insn))
1105 insn = NEXT_INSN (insn);
1106 insn = duplicate_insn_chain (bb->rbi->header, insn);
1107 if (insn)
1108 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1109 }
1110
1111 if (bb->rbi->footer)
1112 {
1113 insn = bb->rbi->footer;
1114 while (NEXT_INSN (insn))
1115 insn = NEXT_INSN (insn);
1116 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1117 if (insn)
1118 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1119 }
1120
1121 if (bb->global_live_at_start)
1122 {
1123 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1124 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1125 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1126 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1127 }
1128
1129 return new_bb;
1130 }
1131 \f
1132 /* Main entry point to this module - initialize the datastructures for
1133 CFG layout changes. It keeps LOOPS up-to-date if not null.
1134
1135 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1136 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1137 to date. */
1138
1139 void
1140 cfg_layout_initialize (unsigned int flags)
1141 {
1142 basic_block bb;
1143
1144 /* Our algorithm depends on fact that there are no dead jumptables
1145 around the code. */
1146 alloc_rbi_pool ();
1147
1148 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1149 initialize_bb_rbi (bb);
1150
1151 cfg_layout_rtl_register_cfg_hooks ();
1152
1153 record_effective_endpoints ();
1154
1155 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1156 }
1157
1158 /* Splits superblocks. */
1159 void
1160 break_superblocks (void)
1161 {
1162 sbitmap superblocks;
1163 bool need = false;
1164 basic_block bb;
1165
1166 superblocks = sbitmap_alloc (last_basic_block);
1167 sbitmap_zero (superblocks);
1168
1169 FOR_EACH_BB (bb)
1170 if (bb->flags & BB_SUPERBLOCK)
1171 {
1172 bb->flags &= ~BB_SUPERBLOCK;
1173 SET_BIT (superblocks, bb->index);
1174 need = true;
1175 }
1176
1177 if (need)
1178 {
1179 rebuild_jump_labels (get_insns ());
1180 find_many_sub_basic_blocks (superblocks);
1181 }
1182
1183 free (superblocks);
1184 }
1185
1186 /* Finalize the changes: reorder insn list according to the sequence, enter
1187 compensation code, rebuild scope forest. */
1188
1189 void
1190 cfg_layout_finalize (void)
1191 {
1192 basic_block bb;
1193
1194 #ifdef ENABLE_CHECKING
1195 verify_flow_info ();
1196 #endif
1197 rtl_register_cfg_hooks ();
1198 if (reload_completed
1199 #ifdef HAVE_epilogue
1200 && !HAVE_epilogue
1201 #endif
1202 )
1203 fixup_fallthru_exit_predecessor ();
1204 fixup_reorder_chain ();
1205
1206 #ifdef ENABLE_CHECKING
1207 verify_insn_chain ();
1208 #endif
1209
1210 free_rbi_pool ();
1211 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1212 bb->rbi = NULL;
1213
1214 break_superblocks ();
1215
1216 #ifdef ENABLE_CHECKING
1217 verify_flow_info ();
1218 #endif
1219 }
1220
1221 /* Checks whether all N blocks in BBS array can be copied. */
1222 bool
1223 can_copy_bbs_p (basic_block *bbs, unsigned n)
1224 {
1225 unsigned i;
1226 edge e;
1227 int ret = true;
1228
1229 for (i = 0; i < n; i++)
1230 bbs[i]->rbi->duplicated = 1;
1231
1232 for (i = 0; i < n; i++)
1233 {
1234 /* In case we should redirect abnormal edge during duplication, fail. */
1235 edge_iterator ei;
1236 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1237 if ((e->flags & EDGE_ABNORMAL)
1238 && e->dest->rbi->duplicated)
1239 {
1240 ret = false;
1241 goto end;
1242 }
1243
1244 if (!can_duplicate_block_p (bbs[i]))
1245 {
1246 ret = false;
1247 break;
1248 }
1249 }
1250
1251 end:
1252 for (i = 0; i < n; i++)
1253 bbs[i]->rbi->duplicated = 0;
1254
1255 return ret;
1256 }
1257
1258 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1259 are placed into array NEW_BBS in the same order. Edges from basic blocks
1260 in BBS are also duplicated and copies of those of them
1261 that lead into BBS are redirected to appropriate newly created block. The
1262 function assigns bbs into loops (copy of basic block bb is assigned to
1263 bb->loop_father->copy loop, so this must be set up correctly in advance)
1264 and updates dominators locally (LOOPS structure that contains the information
1265 about dominators is passed to enable this).
1266
1267 BASE is the superloop to that basic block belongs; if its header or latch
1268 is copied, we do not set the new blocks as header or latch.
1269
1270 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1271 also in the same order. */
1272
1273 void
1274 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1275 edge *edges, unsigned n_edges, edge *new_edges,
1276 struct loop *base)
1277 {
1278 unsigned i, j;
1279 basic_block bb, new_bb, dom_bb;
1280 edge e;
1281
1282 /* Duplicate bbs, update dominators, assign bbs to loops. */
1283 for (i = 0; i < n; i++)
1284 {
1285 /* Duplicate. */
1286 bb = bbs[i];
1287 new_bb = new_bbs[i] = duplicate_block (bb, NULL);
1288 bb->rbi->duplicated = 1;
1289 /* Add to loop. */
1290 add_bb_to_loop (new_bb, bb->loop_father->copy);
1291 /* Possibly set header. */
1292 if (bb->loop_father->header == bb && bb->loop_father != base)
1293 new_bb->loop_father->header = new_bb;
1294 /* Or latch. */
1295 if (bb->loop_father->latch == bb && bb->loop_father != base)
1296 new_bb->loop_father->latch = new_bb;
1297 }
1298
1299 /* Set dominators. */
1300 for (i = 0; i < n; i++)
1301 {
1302 bb = bbs[i];
1303 new_bb = new_bbs[i];
1304
1305 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1306 if (dom_bb->rbi->duplicated)
1307 {
1308 dom_bb = dom_bb->rbi->copy;
1309 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1310 }
1311 }
1312
1313 /* Redirect edges. */
1314 for (j = 0; j < n_edges; j++)
1315 new_edges[j] = NULL;
1316 for (i = 0; i < n; i++)
1317 {
1318 edge_iterator ei;
1319 new_bb = new_bbs[i];
1320 bb = bbs[i];
1321
1322 FOR_EACH_EDGE (e, ei, new_bb->succs)
1323 {
1324 for (j = 0; j < n_edges; j++)
1325 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1326 new_edges[j] = e;
1327
1328 if (!e->dest->rbi->duplicated)
1329 continue;
1330 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1331 }
1332 }
1333
1334 /* Clear information about duplicates. */
1335 for (i = 0; i < n; i++)
1336 bbs[i]->rbi->duplicated = 0;
1337 }
1338
1339 #include "gt-cfglayout.h"