841f2a8a8bbe9b7149065649f4825de09a76afa1
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "vecprim.h"
41
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer, cfg_layout_function_header;
44
45 static rtx skip_insns_after_block (basic_block);
46 static void record_effective_endpoints (void);
47 static rtx label_for_bb (basic_block);
48 static void fixup_reorder_chain (void);
49
50 static void set_block_levels (tree, int);
51 static void change_scope (rtx, tree, tree);
52
53 void verify_insn_chain (void);
54 static void fixup_fallthru_exit_predecessor (void);
55 static tree insn_scope (rtx);
56 \f
57 rtx
58 unlink_insn_chain (rtx first, rtx last)
59 {
60 rtx prevfirst = PREV_INSN (first);
61 rtx nextlast = NEXT_INSN (last);
62
63 PREV_INSN (first) = NULL;
64 NEXT_INSN (last) = NULL;
65 if (prevfirst)
66 NEXT_INSN (prevfirst) = nextlast;
67 if (nextlast)
68 PREV_INSN (nextlast) = prevfirst;
69 else
70 set_last_insn (prevfirst);
71 if (!prevfirst)
72 set_first_insn (nextlast);
73 return first;
74 }
75 \f
76 /* Skip over inter-block insns occurring after BB which are typically
77 associated with BB (e.g., barriers). If there are any such insns,
78 we return the last one. Otherwise, we return the end of BB. */
79
80 static rtx
81 skip_insns_after_block (basic_block bb)
82 {
83 rtx insn, last_insn, next_head, prev;
84
85 next_head = NULL_RTX;
86 if (bb->next_bb != EXIT_BLOCK_PTR)
87 next_head = BB_HEAD (bb->next_bb);
88
89 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
90 {
91 if (insn == next_head)
92 break;
93
94 switch (GET_CODE (insn))
95 {
96 case BARRIER:
97 last_insn = insn;
98 continue;
99
100 case NOTE:
101 switch (NOTE_LINE_NUMBER (insn))
102 {
103 case NOTE_INSN_BLOCK_END:
104 last_insn = insn;
105 continue;
106 case NOTE_INSN_DELETED:
107 case NOTE_INSN_DELETED_LABEL:
108 continue;
109
110 default:
111 continue;
112 break;
113 }
114 break;
115
116 case CODE_LABEL:
117 if (NEXT_INSN (insn)
118 && JUMP_P (NEXT_INSN (insn))
119 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
121 {
122 insn = NEXT_INSN (insn);
123 last_insn = insn;
124 continue;
125 }
126 break;
127
128 default:
129 break;
130 }
131
132 break;
133 }
134
135 /* It is possible to hit contradictory sequence. For instance:
136
137 jump_insn
138 NOTE_INSN_BLOCK_BEG
139 barrier
140
141 Where barrier belongs to jump_insn, but the note does not. This can be
142 created by removing the basic block originally following
143 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
144
145 for (insn = last_insn; insn != BB_END (bb); insn = prev)
146 {
147 prev = PREV_INSN (insn);
148 if (NOTE_P (insn))
149 switch (NOTE_LINE_NUMBER (insn))
150 {
151 case NOTE_INSN_BLOCK_END:
152 case NOTE_INSN_DELETED:
153 case NOTE_INSN_DELETED_LABEL:
154 continue;
155 default:
156 reorder_insns (insn, insn, last_insn);
157 }
158 }
159
160 return last_insn;
161 }
162
163 /* Locate or create a label for a given basic block. */
164
165 static rtx
166 label_for_bb (basic_block bb)
167 {
168 rtx label = BB_HEAD (bb);
169
170 if (!LABEL_P (label))
171 {
172 if (dump_file)
173 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174
175 label = block_label (bb);
176 }
177
178 return label;
179 }
180
181 /* Locate the effective beginning and end of the insn chain for each
182 block, as defined by skip_insns_after_block above. */
183
184 static void
185 record_effective_endpoints (void)
186 {
187 rtx next_insn;
188 basic_block bb;
189 rtx insn;
190
191 for (insn = get_insns ();
192 insn
193 && NOTE_P (insn)
194 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
195 insn = NEXT_INSN (insn))
196 continue;
197 /* No basic blocks at all? */
198 gcc_assert (insn);
199
200 if (PREV_INSN (insn))
201 cfg_layout_function_header =
202 unlink_insn_chain (get_insns (), PREV_INSN (insn));
203 else
204 cfg_layout_function_header = NULL_RTX;
205
206 next_insn = get_insns ();
207 FOR_EACH_BB (bb)
208 {
209 rtx end;
210
211 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
212 bb->il.rtl->header = unlink_insn_chain (next_insn,
213 PREV_INSN (BB_HEAD (bb)));
214 end = skip_insns_after_block (bb);
215 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
216 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
217 next_insn = NEXT_INSN (BB_END (bb));
218 }
219
220 cfg_layout_function_footer = next_insn;
221 if (cfg_layout_function_footer)
222 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
223 }
224 \f
225 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
226 numbers and files. In order to be GGC friendly we need to use separate
227 varrays. This also slightly improve the memory locality in binary search.
228 The _locs array contains locators where the given property change. The
229 block_locators_blocks contains the scope block that is used for all insn
230 locator greater than corresponding block_locators_locs value and smaller
231 than the following one. Similarly for the other properties. */
232 static VEC(int,heap) *block_locators_locs;
233 static GTY(()) VEC(tree,gc) *block_locators_blocks;
234 static VEC(int,heap) *line_locators_locs;
235 static VEC(int,heap) *line_locators_lines;
236 static VEC(int,heap) *file_locators_locs;
237 static GTY(()) varray_type file_locators_files;
238 int prologue_locator;
239 int epilogue_locator;
240
241 /* During the RTL expansion the lexical blocks and line numbers are
242 represented via INSN_NOTEs. Replace them by representation using
243 INSN_LOCATORs. */
244
245 unsigned int
246 insn_locators_initialize (void)
247 {
248 tree block = NULL;
249 tree last_block = NULL;
250 rtx insn, next;
251 int loc = 0;
252 int line_number = 0, last_line_number = 0;
253 const char *file_name = NULL, *last_file_name = NULL;
254
255 prologue_locator = epilogue_locator = 0;
256
257 block_locators_locs = VEC_alloc (int, heap, 32);
258 block_locators_blocks = VEC_alloc (tree, gc, 32);
259 line_locators_locs = VEC_alloc (int, heap, 32);
260 line_locators_lines = VEC_alloc (int, heap, 32);
261 file_locators_locs = VEC_alloc (int, heap, 32);
262 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
263
264 for (insn = get_insns (); insn; insn = next)
265 {
266 int active = 0;
267
268 next = NEXT_INSN (insn);
269
270 if (NOTE_P (insn))
271 {
272 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
273 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
274 if (NOTE_LINE_NUMBER (insn) > 0)
275 {
276 expanded_location xloc;
277 NOTE_EXPANDED_LOCATION (xloc, insn);
278 line_number = xloc.line;
279 file_name = xloc.file;
280 }
281 }
282 else
283 active = (active_insn_p (insn)
284 && GET_CODE (PATTERN (insn)) != ADDR_VEC
285 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
286
287 check_block_change (insn, &block);
288
289 if (active
290 || !next
291 || (!prologue_locator && file_name))
292 {
293 if (last_block != block)
294 {
295 loc++;
296 VEC_safe_push (int, heap, block_locators_locs, loc);
297 VEC_safe_push (tree, gc, block_locators_blocks, block);
298 last_block = block;
299 }
300 if (last_line_number != line_number)
301 {
302 loc++;
303 VEC_safe_push (int, heap, line_locators_locs, loc);
304 VEC_safe_push (int, heap, line_locators_lines, line_number);
305 last_line_number = line_number;
306 }
307 if (last_file_name != file_name)
308 {
309 loc++;
310 VEC_safe_push (int, heap, file_locators_locs, loc);
311 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
312 last_file_name = file_name;
313 }
314 if (!prologue_locator && file_name)
315 prologue_locator = loc;
316 if (!next)
317 epilogue_locator = loc;
318 if (active)
319 INSN_LOCATOR (insn) = loc;
320 }
321 }
322
323 /* Tag the blocks with a depth number so that change_scope can find
324 the common parent easily. */
325 set_block_levels (DECL_INITIAL (cfun->decl), 0);
326
327 free_block_changes ();
328 return 0;
329 }
330
331 struct tree_opt_pass pass_insn_locators_initialize =
332 {
333 "locators", /* name */
334 NULL, /* gate */
335 insn_locators_initialize, /* execute */
336 NULL, /* sub */
337 NULL, /* next */
338 0, /* static_pass_number */
339 0, /* tv_id */
340 0, /* properties_required */
341 0, /* properties_provided */
342 0, /* properties_destroyed */
343 0, /* todo_flags_start */
344 TODO_dump_func, /* todo_flags_finish */
345 0 /* letter */
346 };
347
348
349 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
350 found in the block tree. */
351
352 static void
353 set_block_levels (tree block, int level)
354 {
355 while (block)
356 {
357 BLOCK_NUMBER (block) = level;
358 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
359 block = BLOCK_CHAIN (block);
360 }
361 }
362 \f
363 /* Return sope resulting from combination of S1 and S2. */
364 static tree
365 choose_inner_scope (tree s1, tree s2)
366 {
367 if (!s1)
368 return s2;
369 if (!s2)
370 return s1;
371 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
372 return s1;
373 return s2;
374 }
375 \f
376 /* Emit lexical block notes needed to change scope from S1 to S2. */
377
378 static void
379 change_scope (rtx orig_insn, tree s1, tree s2)
380 {
381 rtx insn = orig_insn;
382 tree com = NULL_TREE;
383 tree ts1 = s1, ts2 = s2;
384 tree s;
385
386 while (ts1 != ts2)
387 {
388 gcc_assert (ts1 && ts2);
389 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
390 ts1 = BLOCK_SUPERCONTEXT (ts1);
391 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
392 ts2 = BLOCK_SUPERCONTEXT (ts2);
393 else
394 {
395 ts1 = BLOCK_SUPERCONTEXT (ts1);
396 ts2 = BLOCK_SUPERCONTEXT (ts2);
397 }
398 }
399 com = ts1;
400
401 /* Close scopes. */
402 s = s1;
403 while (s != com)
404 {
405 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
406 NOTE_BLOCK (note) = s;
407 s = BLOCK_SUPERCONTEXT (s);
408 }
409
410 /* Open scopes. */
411 s = s2;
412 while (s != com)
413 {
414 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
415 NOTE_BLOCK (insn) = s;
416 s = BLOCK_SUPERCONTEXT (s);
417 }
418 }
419
420 /* Return lexical scope block insn belong to. */
421 static tree
422 insn_scope (rtx insn)
423 {
424 int max = VEC_length (int, block_locators_locs);
425 int min = 0;
426 int loc = INSN_LOCATOR (insn);
427
428 /* When block_locators_locs was initialized, the pro- and epilogue
429 insns didn't exist yet and can therefore not be found this way.
430 But we know that they belong to the outer most block of the
431 current function.
432 Without this test, the prologue would be put inside the block of
433 the first valid instruction in the function and when that first
434 insn is part of an inlined function then the low_pc of that
435 inlined function is messed up. Likewise for the epilogue and
436 the last valid instruction. */
437 if (loc == prologue_locator || loc == epilogue_locator)
438 return DECL_INITIAL (cfun->decl);
439
440 if (!max || !loc)
441 return NULL;
442 while (1)
443 {
444 int pos = (min + max) / 2;
445 int tmp = VEC_index (int, block_locators_locs, pos);
446
447 if (tmp <= loc && min != pos)
448 min = pos;
449 else if (tmp > loc && max != pos)
450 max = pos;
451 else
452 {
453 min = pos;
454 break;
455 }
456 }
457 return VEC_index (tree, block_locators_blocks, min);
458 }
459
460 /* Return line number of the statement specified by the locator. */
461 int
462 locator_line (int loc)
463 {
464 int max = VEC_length (int, line_locators_locs);
465 int min = 0;
466
467 if (!max || !loc)
468 return 0;
469 while (1)
470 {
471 int pos = (min + max) / 2;
472 int tmp = VEC_index (int, line_locators_locs, pos);
473
474 if (tmp <= loc && min != pos)
475 min = pos;
476 else if (tmp > loc && max != pos)
477 max = pos;
478 else
479 {
480 min = pos;
481 break;
482 }
483 }
484 return VEC_index (int, line_locators_lines, min);
485 }
486
487 /* Return line number of the statement that produced this insn. */
488 int
489 insn_line (rtx insn)
490 {
491 return locator_line (INSN_LOCATOR (insn));
492 }
493
494 /* Return source file of the statement specified by LOC. */
495 const char *
496 locator_file (int loc)
497 {
498 int max = VEC_length (int, file_locators_locs);
499 int min = 0;
500
501 if (!max || !loc)
502 return NULL;
503 while (1)
504 {
505 int pos = (min + max) / 2;
506 int tmp = VEC_index (int, file_locators_locs, pos);
507
508 if (tmp <= loc && min != pos)
509 min = pos;
510 else if (tmp > loc && max != pos)
511 max = pos;
512 else
513 {
514 min = pos;
515 break;
516 }
517 }
518 return VARRAY_CHAR_PTR (file_locators_files, min);
519 }
520
521 /* Return source file of the statement that produced this insn. */
522 const char *
523 insn_file (rtx insn)
524 {
525 return locator_file (INSN_LOCATOR (insn));
526 }
527
528 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
529 on the scope tree and the newly reordered instructions. */
530
531 void
532 reemit_insn_block_notes (void)
533 {
534 tree cur_block = DECL_INITIAL (cfun->decl);
535 rtx insn, note;
536
537 insn = get_insns ();
538 if (!active_insn_p (insn))
539 insn = next_active_insn (insn);
540 for (; insn; insn = next_active_insn (insn))
541 {
542 tree this_block;
543
544 /* Avoid putting scope notes between jump table and its label. */
545 if (JUMP_P (insn)
546 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
547 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
548 continue;
549
550 this_block = insn_scope (insn);
551 /* For sequences compute scope resulting from merging all scopes
552 of instructions nested inside. */
553 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
554 {
555 int i;
556 rtx body = PATTERN (insn);
557
558 this_block = NULL;
559 for (i = 0; i < XVECLEN (body, 0); i++)
560 this_block = choose_inner_scope (this_block,
561 insn_scope (XVECEXP (body, 0, i)));
562 }
563 if (! this_block)
564 continue;
565
566 if (this_block != cur_block)
567 {
568 change_scope (insn, cur_block, this_block);
569 cur_block = this_block;
570 }
571 }
572
573 /* change_scope emits before the insn, not after. */
574 note = emit_note (NOTE_INSN_DELETED);
575 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
576 delete_insn (note);
577
578 reorder_blocks ();
579 }
580 \f
581 /* Given a reorder chain, rearrange the code to match. */
582
583 static void
584 fixup_reorder_chain (void)
585 {
586 basic_block bb, prev_bb;
587 int index;
588 rtx insn = NULL;
589
590 if (cfg_layout_function_header)
591 {
592 set_first_insn (cfg_layout_function_header);
593 insn = cfg_layout_function_header;
594 while (NEXT_INSN (insn))
595 insn = NEXT_INSN (insn);
596 }
597
598 /* First do the bulk reordering -- rechain the blocks without regard to
599 the needed changes to jumps and labels. */
600
601 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
602 bb != 0;
603 bb = bb->aux, index++)
604 {
605 if (bb->il.rtl->header)
606 {
607 if (insn)
608 NEXT_INSN (insn) = bb->il.rtl->header;
609 else
610 set_first_insn (bb->il.rtl->header);
611 PREV_INSN (bb->il.rtl->header) = insn;
612 insn = bb->il.rtl->header;
613 while (NEXT_INSN (insn))
614 insn = NEXT_INSN (insn);
615 }
616 if (insn)
617 NEXT_INSN (insn) = BB_HEAD (bb);
618 else
619 set_first_insn (BB_HEAD (bb));
620 PREV_INSN (BB_HEAD (bb)) = insn;
621 insn = BB_END (bb);
622 if (bb->il.rtl->footer)
623 {
624 NEXT_INSN (insn) = bb->il.rtl->footer;
625 PREV_INSN (bb->il.rtl->footer) = insn;
626 while (NEXT_INSN (insn))
627 insn = NEXT_INSN (insn);
628 }
629 }
630
631 gcc_assert (index == n_basic_blocks);
632
633 NEXT_INSN (insn) = cfg_layout_function_footer;
634 if (cfg_layout_function_footer)
635 PREV_INSN (cfg_layout_function_footer) = insn;
636
637 while (NEXT_INSN (insn))
638 insn = NEXT_INSN (insn);
639
640 set_last_insn (insn);
641 #ifdef ENABLE_CHECKING
642 verify_insn_chain ();
643 #endif
644 delete_dead_jumptables ();
645
646 /* Now add jumps and labels as needed to match the blocks new
647 outgoing edges. */
648
649 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
650 {
651 edge e_fall, e_taken, e;
652 rtx bb_end_insn;
653 basic_block nb;
654 edge_iterator ei;
655
656 if (EDGE_COUNT (bb->succs) == 0)
657 continue;
658
659 /* Find the old fallthru edge, and another non-EH edge for
660 a taken jump. */
661 e_taken = e_fall = NULL;
662
663 FOR_EACH_EDGE (e, ei, bb->succs)
664 if (e->flags & EDGE_FALLTHRU)
665 e_fall = e;
666 else if (! (e->flags & EDGE_EH))
667 e_taken = e;
668
669 bb_end_insn = BB_END (bb);
670 if (JUMP_P (bb_end_insn))
671 {
672 if (any_condjump_p (bb_end_insn))
673 {
674 /* If the old fallthru is still next, nothing to do. */
675 if (bb->aux == e_fall->dest
676 || e_fall->dest == EXIT_BLOCK_PTR)
677 continue;
678
679 /* The degenerated case of conditional jump jumping to the next
680 instruction can happen for jumps with side effects. We need
681 to construct a forwarder block and this will be done just
682 fine by force_nonfallthru below. */
683 if (!e_taken)
684 ;
685
686 /* There is another special case: if *neither* block is next,
687 such as happens at the very end of a function, then we'll
688 need to add a new unconditional jump. Choose the taken
689 edge based on known or assumed probability. */
690 else if (bb->aux != e_taken->dest)
691 {
692 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
693
694 if (note
695 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
696 && invert_jump (bb_end_insn,
697 (e_fall->dest == EXIT_BLOCK_PTR
698 ? NULL_RTX
699 : label_for_bb (e_fall->dest)), 0))
700 {
701 e_fall->flags &= ~EDGE_FALLTHRU;
702 #ifdef ENABLE_CHECKING
703 gcc_assert (could_fall_through
704 (e_taken->src, e_taken->dest));
705 #endif
706 e_taken->flags |= EDGE_FALLTHRU;
707 update_br_prob_note (bb);
708 e = e_fall, e_fall = e_taken, e_taken = e;
709 }
710 }
711
712 /* If the "jumping" edge is a crossing edge, and the fall
713 through edge is non-crossing, leave things as they are. */
714 else if ((e_taken->flags & EDGE_CROSSING)
715 && !(e_fall->flags & EDGE_CROSSING))
716 continue;
717
718 /* Otherwise we can try to invert the jump. This will
719 basically never fail, however, keep up the pretense. */
720 else if (invert_jump (bb_end_insn,
721 (e_fall->dest == EXIT_BLOCK_PTR
722 ? NULL_RTX
723 : label_for_bb (e_fall->dest)), 0))
724 {
725 e_fall->flags &= ~EDGE_FALLTHRU;
726 #ifdef ENABLE_CHECKING
727 gcc_assert (could_fall_through
728 (e_taken->src, e_taken->dest));
729 #endif
730 e_taken->flags |= EDGE_FALLTHRU;
731 update_br_prob_note (bb);
732 continue;
733 }
734 }
735 else
736 {
737 /* Otherwise we have some return, switch or computed
738 jump. In the 99% case, there should not have been a
739 fallthru edge. */
740 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
741 continue;
742 }
743 }
744 else
745 {
746 /* No fallthru implies a noreturn function with EH edges, or
747 something similarly bizarre. In any case, we don't need to
748 do anything. */
749 if (! e_fall)
750 continue;
751
752 /* If the fallthru block is still next, nothing to do. */
753 if (bb->aux == e_fall->dest)
754 continue;
755
756 /* A fallthru to exit block. */
757 if (e_fall->dest == EXIT_BLOCK_PTR)
758 continue;
759 }
760
761 /* We got here if we need to add a new jump insn. */
762 nb = force_nonfallthru (e_fall);
763 if (nb)
764 {
765 nb->il.rtl->visited = 1;
766 nb->aux = bb->aux;
767 bb->aux = nb;
768 /* Don't process this new block. */
769 bb = nb;
770
771 /* Make sure new bb is tagged for correct section (same as
772 fall-thru source, since you cannot fall-throu across
773 section boundaries). */
774 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
775 if (flag_reorder_blocks_and_partition
776 && targetm.have_named_sections
777 && JUMP_P (BB_END (bb))
778 && !any_condjump_p (BB_END (bb))
779 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
780 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
781 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
782 }
783 }
784
785 /* Put basic_block_info in the new order. */
786
787 if (dump_file)
788 {
789 fprintf (dump_file, "Reordered sequence:\n");
790 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
791 bb;
792 bb = bb->aux, index++)
793 {
794 fprintf (dump_file, " %i ", index);
795 if (get_bb_original (bb))
796 fprintf (dump_file, "duplicate of %i ",
797 get_bb_original (bb)->index);
798 else if (forwarder_block_p (bb)
799 && !LABEL_P (BB_HEAD (bb)))
800 fprintf (dump_file, "compensation ");
801 else
802 fprintf (dump_file, "bb %i ", bb->index);
803 fprintf (dump_file, " [%i]\n", bb->frequency);
804 }
805 }
806
807 prev_bb = ENTRY_BLOCK_PTR;
808 bb = ENTRY_BLOCK_PTR->next_bb;
809 index = NUM_FIXED_BLOCKS;
810
811 for (; bb; prev_bb = bb, bb = bb->aux, index ++)
812 {
813 bb->index = index;
814 SET_BASIC_BLOCK (index, bb);
815
816 bb->prev_bb = prev_bb;
817 prev_bb->next_bb = bb;
818 }
819 prev_bb->next_bb = EXIT_BLOCK_PTR;
820 EXIT_BLOCK_PTR->prev_bb = prev_bb;
821
822 /* Annoying special case - jump around dead jumptables left in the code. */
823 FOR_EACH_BB (bb)
824 {
825 edge e;
826 edge_iterator ei;
827
828 FOR_EACH_EDGE (e, ei, bb->succs)
829 if (e->flags & EDGE_FALLTHRU)
830 break;
831
832 if (e && !can_fallthru (e->src, e->dest))
833 force_nonfallthru (e);
834 }
835 }
836 \f
837 /* Perform sanity checks on the insn chain.
838 1. Check that next/prev pointers are consistent in both the forward and
839 reverse direction.
840 2. Count insns in chain, going both directions, and check if equal.
841 3. Check that get_last_insn () returns the actual end of chain. */
842
843 void
844 verify_insn_chain (void)
845 {
846 rtx x, prevx, nextx;
847 int insn_cnt1, insn_cnt2;
848
849 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
850 x != 0;
851 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
852 gcc_assert (PREV_INSN (x) == prevx);
853
854 gcc_assert (prevx == get_last_insn ());
855
856 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
857 x != 0;
858 nextx = x, insn_cnt2++, x = PREV_INSN (x))
859 gcc_assert (NEXT_INSN (x) == nextx);
860
861 gcc_assert (insn_cnt1 == insn_cnt2);
862 }
863 \f
864 /* If we have assembler epilogues, the block falling through to exit must
865 be the last one in the reordered chain when we reach final. Ensure
866 that this condition is met. */
867 static void
868 fixup_fallthru_exit_predecessor (void)
869 {
870 edge e;
871 edge_iterator ei;
872 basic_block bb = NULL;
873
874 /* This transformation is not valid before reload, because we might
875 separate a call from the instruction that copies the return
876 value. */
877 gcc_assert (reload_completed);
878
879 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
880 if (e->flags & EDGE_FALLTHRU)
881 bb = e->src;
882
883 if (bb && bb->aux)
884 {
885 basic_block c = ENTRY_BLOCK_PTR->next_bb;
886
887 /* If the very first block is the one with the fall-through exit
888 edge, we have to split that block. */
889 if (c == bb)
890 {
891 bb = split_block (bb, NULL)->dest;
892 bb->aux = c->aux;
893 c->aux = bb;
894 bb->il.rtl->footer = c->il.rtl->footer;
895 c->il.rtl->footer = NULL;
896 }
897
898 while (c->aux != bb)
899 c = c->aux;
900
901 c->aux = bb->aux;
902 while (c->aux)
903 c = c->aux;
904
905 c->aux = bb;
906 bb->aux = NULL;
907 }
908 }
909 \f
910 /* Return true in case it is possible to duplicate the basic block BB. */
911
912 /* We do not want to declare the function in a header file, since it should
913 only be used through the cfghooks interface, and we do not want to move
914 it to cfgrtl.c since it would require also moving quite a lot of related
915 code. */
916 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
917
918 bool
919 cfg_layout_can_duplicate_bb_p (basic_block bb)
920 {
921 /* Do not attempt to duplicate tablejumps, as we need to unshare
922 the dispatch table. This is difficult to do, as the instructions
923 computing jump destination may be hoisted outside the basic block. */
924 if (tablejump_p (BB_END (bb), NULL, NULL))
925 return false;
926
927 /* Do not duplicate blocks containing insns that can't be copied. */
928 if (targetm.cannot_copy_insn_p)
929 {
930 rtx insn = BB_HEAD (bb);
931 while (1)
932 {
933 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
934 return false;
935 if (insn == BB_END (bb))
936 break;
937 insn = NEXT_INSN (insn);
938 }
939 }
940
941 return true;
942 }
943
944 rtx
945 duplicate_insn_chain (rtx from, rtx to)
946 {
947 rtx insn, last;
948
949 /* Avoid updating of boundaries of previous basic block. The
950 note will get removed from insn stream in fixup. */
951 last = emit_note (NOTE_INSN_DELETED);
952
953 /* Create copy at the end of INSN chain. The chain will
954 be reordered later. */
955 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
956 {
957 switch (GET_CODE (insn))
958 {
959 case INSN:
960 case CALL_INSN:
961 case JUMP_INSN:
962 /* Avoid copying of dispatch tables. We never duplicate
963 tablejumps, so this can hit only in case the table got
964 moved far from original jump. */
965 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
966 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
967 break;
968 emit_copy_of_insn_after (insn, get_last_insn ());
969 break;
970
971 case CODE_LABEL:
972 break;
973
974 case BARRIER:
975 emit_barrier ();
976 break;
977
978 case NOTE:
979 switch (NOTE_LINE_NUMBER (insn))
980 {
981 /* In case prologue is empty and function contain label
982 in first BB, we may want to copy the block. */
983 case NOTE_INSN_PROLOGUE_END:
984
985 case NOTE_INSN_DELETED:
986 case NOTE_INSN_DELETED_LABEL:
987 /* No problem to strip these. */
988 case NOTE_INSN_EPILOGUE_BEG:
989 case NOTE_INSN_FUNCTION_END:
990 /* Debug code expect these notes to exist just once.
991 Keep them in the master copy.
992 ??? It probably makes more sense to duplicate them for each
993 epilogue copy. */
994 case NOTE_INSN_FUNCTION_BEG:
995 /* There is always just single entry to function. */
996 case NOTE_INSN_BASIC_BLOCK:
997 break;
998
999 case NOTE_INSN_REPEATED_LINE_NUMBER:
1000 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1001 emit_note_copy (insn);
1002 break;
1003
1004 default:
1005 /* All other notes should have already been eliminated.
1006 */
1007 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1008
1009 /* It is possible that no_line_number is set and the note
1010 won't be emitted. */
1011 emit_note_copy (insn);
1012 }
1013 break;
1014 default:
1015 gcc_unreachable ();
1016 }
1017 }
1018 insn = NEXT_INSN (last);
1019 delete_insn (last);
1020 return insn;
1021 }
1022 /* Create a duplicate of the basic block BB. */
1023
1024 /* We do not want to declare the function in a header file, since it should
1025 only be used through the cfghooks interface, and we do not want to move
1026 it to cfgrtl.c since it would require also moving quite a lot of related
1027 code. */
1028 extern basic_block cfg_layout_duplicate_bb (basic_block);
1029
1030 basic_block
1031 cfg_layout_duplicate_bb (basic_block bb)
1032 {
1033 rtx insn;
1034 basic_block new_bb;
1035
1036 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1037 new_bb = create_basic_block (insn,
1038 insn ? get_last_insn () : NULL,
1039 EXIT_BLOCK_PTR->prev_bb);
1040
1041 BB_COPY_PARTITION (new_bb, bb);
1042 if (bb->il.rtl->header)
1043 {
1044 insn = bb->il.rtl->header;
1045 while (NEXT_INSN (insn))
1046 insn = NEXT_INSN (insn);
1047 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1048 if (insn)
1049 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1050 }
1051
1052 if (bb->il.rtl->footer)
1053 {
1054 insn = bb->il.rtl->footer;
1055 while (NEXT_INSN (insn))
1056 insn = NEXT_INSN (insn);
1057 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1058 if (insn)
1059 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1060 }
1061
1062 if (bb->il.rtl->global_live_at_start)
1063 {
1064 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1065 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1066 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1067 bb->il.rtl->global_live_at_start);
1068 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1069 bb->il.rtl->global_live_at_end);
1070 }
1071
1072 return new_bb;
1073 }
1074 \f
1075 /* Main entry point to this module - initialize the datastructures for
1076 CFG layout changes. It keeps LOOPS up-to-date if not null.
1077
1078 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1079 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1080 to date. */
1081
1082 void
1083 cfg_layout_initialize (unsigned int flags)
1084 {
1085 initialize_original_copy_tables ();
1086
1087 cfg_layout_rtl_register_cfg_hooks ();
1088
1089 record_effective_endpoints ();
1090
1091 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1092 }
1093
1094 /* Splits superblocks. */
1095 void
1096 break_superblocks (void)
1097 {
1098 sbitmap superblocks;
1099 bool need = false;
1100 basic_block bb;
1101
1102 superblocks = sbitmap_alloc (last_basic_block);
1103 sbitmap_zero (superblocks);
1104
1105 FOR_EACH_BB (bb)
1106 if (bb->flags & BB_SUPERBLOCK)
1107 {
1108 bb->flags &= ~BB_SUPERBLOCK;
1109 SET_BIT (superblocks, bb->index);
1110 need = true;
1111 }
1112
1113 if (need)
1114 {
1115 rebuild_jump_labels (get_insns ());
1116 find_many_sub_basic_blocks (superblocks);
1117 }
1118
1119 free (superblocks);
1120 }
1121
1122 /* Finalize the changes: reorder insn list according to the sequence specified
1123 by aux pointers, enter compensation code, rebuild scope forest. */
1124
1125 void
1126 cfg_layout_finalize (void)
1127 {
1128 basic_block bb;
1129
1130 #ifdef ENABLE_CHECKING
1131 verify_flow_info ();
1132 #endif
1133 rtl_register_cfg_hooks ();
1134 if (reload_completed
1135 #ifdef HAVE_epilogue
1136 && !HAVE_epilogue
1137 #endif
1138 )
1139 fixup_fallthru_exit_predecessor ();
1140 fixup_reorder_chain ();
1141
1142 #ifdef ENABLE_CHECKING
1143 verify_insn_chain ();
1144 #endif
1145 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1146 {
1147 bb->il.rtl->header = bb->il.rtl->footer = NULL;
1148 bb->aux = NULL;
1149 bb->il.rtl->visited = 0;
1150 }
1151
1152 break_superblocks ();
1153
1154 #ifdef ENABLE_CHECKING
1155 verify_flow_info ();
1156 #endif
1157
1158 free_original_copy_tables ();
1159 }
1160
1161 /* Checks whether all N blocks in BBS array can be copied. */
1162 bool
1163 can_copy_bbs_p (basic_block *bbs, unsigned n)
1164 {
1165 unsigned i;
1166 edge e;
1167 int ret = true;
1168
1169 for (i = 0; i < n; i++)
1170 bbs[i]->flags |= BB_DUPLICATED;
1171
1172 for (i = 0; i < n; i++)
1173 {
1174 /* In case we should redirect abnormal edge during duplication, fail. */
1175 edge_iterator ei;
1176 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1177 if ((e->flags & EDGE_ABNORMAL)
1178 && (e->dest->flags & BB_DUPLICATED))
1179 {
1180 ret = false;
1181 goto end;
1182 }
1183
1184 if (!can_duplicate_block_p (bbs[i]))
1185 {
1186 ret = false;
1187 break;
1188 }
1189 }
1190
1191 end:
1192 for (i = 0; i < n; i++)
1193 bbs[i]->flags &= ~BB_DUPLICATED;
1194
1195 return ret;
1196 }
1197
1198 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1199 are placed into array NEW_BBS in the same order. Edges from basic blocks
1200 in BBS are also duplicated and copies of those of them
1201 that lead into BBS are redirected to appropriate newly created block. The
1202 function assigns bbs into loops (copy of basic block bb is assigned to
1203 bb->loop_father->copy loop, so this must be set up correctly in advance)
1204 and updates dominators locally (LOOPS structure that contains the information
1205 about dominators is passed to enable this).
1206
1207 BASE is the superloop to that basic block belongs; if its header or latch
1208 is copied, we do not set the new blocks as header or latch.
1209
1210 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1211 also in the same order.
1212
1213 Newly created basic blocks are put after the basic block AFTER in the
1214 instruction stream, and the order of the blocks in BBS array is preserved. */
1215
1216 void
1217 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1218 edge *edges, unsigned num_edges, edge *new_edges,
1219 struct loop *base, basic_block after)
1220 {
1221 unsigned i, j;
1222 basic_block bb, new_bb, dom_bb;
1223 edge e;
1224
1225 /* Duplicate bbs, update dominators, assign bbs to loops. */
1226 for (i = 0; i < n; i++)
1227 {
1228 /* Duplicate. */
1229 bb = bbs[i];
1230 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1231 after = new_bb;
1232 bb->flags |= BB_DUPLICATED;
1233 /* Add to loop. */
1234 add_bb_to_loop (new_bb, bb->loop_father->copy);
1235 /* Possibly set header. */
1236 if (bb->loop_father->header == bb && bb->loop_father != base)
1237 new_bb->loop_father->header = new_bb;
1238 /* Or latch. */
1239 if (bb->loop_father->latch == bb && bb->loop_father != base)
1240 new_bb->loop_father->latch = new_bb;
1241 }
1242
1243 /* Set dominators. */
1244 for (i = 0; i < n; i++)
1245 {
1246 bb = bbs[i];
1247 new_bb = new_bbs[i];
1248
1249 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1250 if (dom_bb->flags & BB_DUPLICATED)
1251 {
1252 dom_bb = get_bb_copy (dom_bb);
1253 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1254 }
1255 }
1256
1257 /* Redirect edges. */
1258 for (j = 0; j < num_edges; j++)
1259 new_edges[j] = NULL;
1260 for (i = 0; i < n; i++)
1261 {
1262 edge_iterator ei;
1263 new_bb = new_bbs[i];
1264 bb = bbs[i];
1265
1266 FOR_EACH_EDGE (e, ei, new_bb->succs)
1267 {
1268 for (j = 0; j < num_edges; j++)
1269 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1270 new_edges[j] = e;
1271
1272 if (!(e->dest->flags & BB_DUPLICATED))
1273 continue;
1274 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1275 }
1276 }
1277
1278 /* Clear information about duplicates. */
1279 for (i = 0; i < n; i++)
1280 bbs[i]->flags &= ~BB_DUPLICATED;
1281 }
1282
1283 #include "gt-cfglayout.h"