cfgloopmanip.c (create_loop_notes): Removed.
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40
41 /* Holds the interesting trailing notes for the function. */
42 rtx cfg_layout_function_footer, cfg_layout_function_header;
43
44 static rtx skip_insns_after_block (basic_block);
45 static void record_effective_endpoints (void);
46 static rtx label_for_bb (basic_block);
47 static void fixup_reorder_chain (void);
48
49 static void set_block_levels (tree, int);
50 static void change_scope (rtx, tree, tree);
51
52 void verify_insn_chain (void);
53 static void fixup_fallthru_exit_predecessor (void);
54 static tree insn_scope (rtx);
55 \f
56 rtx
57 unlink_insn_chain (rtx first, rtx last)
58 {
59 rtx prevfirst = PREV_INSN (first);
60 rtx nextlast = NEXT_INSN (last);
61
62 PREV_INSN (first) = NULL;
63 NEXT_INSN (last) = NULL;
64 if (prevfirst)
65 NEXT_INSN (prevfirst) = nextlast;
66 if (nextlast)
67 PREV_INSN (nextlast) = prevfirst;
68 else
69 set_last_insn (prevfirst);
70 if (!prevfirst)
71 set_first_insn (nextlast);
72 return first;
73 }
74 \f
75 /* Skip over inter-block insns occurring after BB which are typically
76 associated with BB (e.g., barriers). If there are any such insns,
77 we return the last one. Otherwise, we return the end of BB. */
78
79 static rtx
80 skip_insns_after_block (basic_block bb)
81 {
82 rtx insn, last_insn, next_head, prev;
83
84 next_head = NULL_RTX;
85 if (bb->next_bb != EXIT_BLOCK_PTR)
86 next_head = BB_HEAD (bb->next_bb);
87
88 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
89 {
90 if (insn == next_head)
91 break;
92
93 switch (GET_CODE (insn))
94 {
95 case BARRIER:
96 last_insn = insn;
97 continue;
98
99 case NOTE:
100 switch (NOTE_LINE_NUMBER (insn))
101 {
102 case NOTE_INSN_BLOCK_END:
103 last_insn = insn;
104 continue;
105 case NOTE_INSN_DELETED:
106 case NOTE_INSN_DELETED_LABEL:
107 continue;
108
109 default:
110 continue;
111 break;
112 }
113 break;
114
115 case CODE_LABEL:
116 if (NEXT_INSN (insn)
117 && JUMP_P (NEXT_INSN (insn))
118 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
119 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
120 {
121 insn = NEXT_INSN (insn);
122 last_insn = insn;
123 continue;
124 }
125 break;
126
127 default:
128 break;
129 }
130
131 break;
132 }
133
134 /* It is possible to hit contradictory sequence. For instance:
135
136 jump_insn
137 NOTE_INSN_BLOCK_BEG
138 barrier
139
140 Where barrier belongs to jump_insn, but the note does not. This can be
141 created by removing the basic block originally following
142 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
143
144 for (insn = last_insn; insn != BB_END (bb); insn = prev)
145 {
146 prev = PREV_INSN (insn);
147 if (NOTE_P (insn))
148 switch (NOTE_LINE_NUMBER (insn))
149 {
150 case NOTE_INSN_BLOCK_END:
151 case NOTE_INSN_DELETED:
152 case NOTE_INSN_DELETED_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
156 }
157 }
158
159 return last_insn;
160 }
161
162 /* Locate or create a label for a given basic block. */
163
164 static rtx
165 label_for_bb (basic_block bb)
166 {
167 rtx label = BB_HEAD (bb);
168
169 if (!LABEL_P (label))
170 {
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
173
174 label = block_label (bb);
175 }
176
177 return label;
178 }
179
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
182
183 static void
184 record_effective_endpoints (void)
185 {
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
189
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
198
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
204
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
207 {
208 rtx end;
209
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
217 }
218
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
222 }
223 \f
224 DEF_VEC_I(int);
225 DEF_VEC_ALLOC_I(int,heap);
226
227 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
228 numbers and files. In order to be GGC friendly we need to use separate
229 varrays. This also slightly improve the memory locality in binary search.
230 The _locs array contains locators where the given property change. The
231 block_locators_blocks contains the scope block that is used for all insn
232 locator greater than corresponding block_locators_locs value and smaller
233 than the following one. Similarly for the other properties. */
234 static VEC(int,heap) *block_locators_locs;
235 static GTY(()) VEC(tree,gc) *block_locators_blocks;
236 static VEC(int,heap) *line_locators_locs;
237 static VEC(int,heap) *line_locators_lines;
238 static VEC(int,heap) *file_locators_locs;
239 static GTY(()) varray_type file_locators_files;
240 int prologue_locator;
241 int epilogue_locator;
242
243 /* During the RTL expansion the lexical blocks and line numbers are
244 represented via INSN_NOTEs. Replace them by representation using
245 INSN_LOCATORs. */
246
247 unsigned int
248 insn_locators_initialize (void)
249 {
250 tree block = NULL;
251 tree last_block = NULL;
252 rtx insn, next;
253 int loc = 0;
254 int line_number = 0, last_line_number = 0;
255 const char *file_name = NULL, *last_file_name = NULL;
256
257 prologue_locator = epilogue_locator = 0;
258
259 block_locators_locs = VEC_alloc (int, heap, 32);
260 block_locators_blocks = VEC_alloc (tree, gc, 32);
261 line_locators_locs = VEC_alloc (int, heap, 32);
262 line_locators_lines = VEC_alloc (int, heap, 32);
263 file_locators_locs = VEC_alloc (int, heap, 32);
264 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
265
266 for (insn = get_insns (); insn; insn = next)
267 {
268 int active = 0;
269
270 next = NEXT_INSN (insn);
271
272 if (NOTE_P (insn))
273 {
274 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
275 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
276 if (NOTE_LINE_NUMBER (insn) > 0)
277 {
278 expanded_location xloc;
279 NOTE_EXPANDED_LOCATION (xloc, insn);
280 line_number = xloc.line;
281 file_name = xloc.file;
282 }
283 }
284 else
285 active = (active_insn_p (insn)
286 && GET_CODE (PATTERN (insn)) != ADDR_VEC
287 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
288
289 check_block_change (insn, &block);
290
291 if (active
292 || !next
293 || (!prologue_locator && file_name))
294 {
295 if (last_block != block)
296 {
297 loc++;
298 VEC_safe_push (int, heap, block_locators_locs, loc);
299 VEC_safe_push (tree, gc, block_locators_blocks, block);
300 last_block = block;
301 }
302 if (last_line_number != line_number)
303 {
304 loc++;
305 VEC_safe_push (int, heap, line_locators_locs, loc);
306 VEC_safe_push (int, heap, line_locators_lines, line_number);
307 last_line_number = line_number;
308 }
309 if (last_file_name != file_name)
310 {
311 loc++;
312 VEC_safe_push (int, heap, file_locators_locs, loc);
313 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
314 last_file_name = file_name;
315 }
316 if (!prologue_locator && file_name)
317 prologue_locator = loc;
318 if (!next)
319 epilogue_locator = loc;
320 if (active)
321 INSN_LOCATOR (insn) = loc;
322 }
323 }
324
325 /* Tag the blocks with a depth number so that change_scope can find
326 the common parent easily. */
327 set_block_levels (DECL_INITIAL (cfun->decl), 0);
328
329 free_block_changes ();
330 return 0;
331 }
332
333 struct tree_opt_pass pass_insn_locators_initialize =
334 {
335 "locators", /* name */
336 NULL, /* gate */
337 insn_locators_initialize, /* execute */
338 NULL, /* sub */
339 NULL, /* next */
340 0, /* static_pass_number */
341 0, /* tv_id */
342 0, /* properties_required */
343 0, /* properties_provided */
344 0, /* properties_destroyed */
345 0, /* todo_flags_start */
346 TODO_dump_func, /* todo_flags_finish */
347 0 /* letter */
348 };
349
350
351 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
352 found in the block tree. */
353
354 static void
355 set_block_levels (tree block, int level)
356 {
357 while (block)
358 {
359 BLOCK_NUMBER (block) = level;
360 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
361 block = BLOCK_CHAIN (block);
362 }
363 }
364 \f
365 /* Return sope resulting from combination of S1 and S2. */
366 static tree
367 choose_inner_scope (tree s1, tree s2)
368 {
369 if (!s1)
370 return s2;
371 if (!s2)
372 return s1;
373 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
374 return s1;
375 return s2;
376 }
377 \f
378 /* Emit lexical block notes needed to change scope from S1 to S2. */
379
380 static void
381 change_scope (rtx orig_insn, tree s1, tree s2)
382 {
383 rtx insn = orig_insn;
384 tree com = NULL_TREE;
385 tree ts1 = s1, ts2 = s2;
386 tree s;
387
388 while (ts1 != ts2)
389 {
390 gcc_assert (ts1 && ts2);
391 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
392 ts1 = BLOCK_SUPERCONTEXT (ts1);
393 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
394 ts2 = BLOCK_SUPERCONTEXT (ts2);
395 else
396 {
397 ts1 = BLOCK_SUPERCONTEXT (ts1);
398 ts2 = BLOCK_SUPERCONTEXT (ts2);
399 }
400 }
401 com = ts1;
402
403 /* Close scopes. */
404 s = s1;
405 while (s != com)
406 {
407 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
408 NOTE_BLOCK (note) = s;
409 s = BLOCK_SUPERCONTEXT (s);
410 }
411
412 /* Open scopes. */
413 s = s2;
414 while (s != com)
415 {
416 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
417 NOTE_BLOCK (insn) = s;
418 s = BLOCK_SUPERCONTEXT (s);
419 }
420 }
421
422 /* Return lexical scope block insn belong to. */
423 static tree
424 insn_scope (rtx insn)
425 {
426 int max = VEC_length (int, block_locators_locs);
427 int min = 0;
428 int loc = INSN_LOCATOR (insn);
429
430 /* When block_locators_locs was initialized, the pro- and epilogue
431 insns didn't exist yet and can therefore not be found this way.
432 But we know that they belong to the outer most block of the
433 current function.
434 Without this test, the prologue would be put inside the block of
435 the first valid instruction in the function and when that first
436 insn is part of an inlined function then the low_pc of that
437 inlined function is messed up. Likewise for the epilogue and
438 the last valid instruction. */
439 if (loc == prologue_locator || loc == epilogue_locator)
440 return DECL_INITIAL (cfun->decl);
441
442 if (!max || !loc)
443 return NULL;
444 while (1)
445 {
446 int pos = (min + max) / 2;
447 int tmp = VEC_index (int, block_locators_locs, pos);
448
449 if (tmp <= loc && min != pos)
450 min = pos;
451 else if (tmp > loc && max != pos)
452 max = pos;
453 else
454 {
455 min = pos;
456 break;
457 }
458 }
459 return VEC_index (tree, block_locators_blocks, min);
460 }
461
462 /* Return line number of the statement specified by the locator. */
463 int
464 locator_line (int loc)
465 {
466 int max = VEC_length (int, line_locators_locs);
467 int min = 0;
468
469 if (!max || !loc)
470 return 0;
471 while (1)
472 {
473 int pos = (min + max) / 2;
474 int tmp = VEC_index (int, line_locators_locs, pos);
475
476 if (tmp <= loc && min != pos)
477 min = pos;
478 else if (tmp > loc && max != pos)
479 max = pos;
480 else
481 {
482 min = pos;
483 break;
484 }
485 }
486 return VEC_index (int, line_locators_lines, min);
487 }
488
489 /* Return line number of the statement that produced this insn. */
490 int
491 insn_line (rtx insn)
492 {
493 return locator_line (INSN_LOCATOR (insn));
494 }
495
496 /* Return source file of the statement specified by LOC. */
497 const char *
498 locator_file (int loc)
499 {
500 int max = VEC_length (int, file_locators_locs);
501 int min = 0;
502
503 if (!max || !loc)
504 return NULL;
505 while (1)
506 {
507 int pos = (min + max) / 2;
508 int tmp = VEC_index (int, file_locators_locs, pos);
509
510 if (tmp <= loc && min != pos)
511 min = pos;
512 else if (tmp > loc && max != pos)
513 max = pos;
514 else
515 {
516 min = pos;
517 break;
518 }
519 }
520 return VARRAY_CHAR_PTR (file_locators_files, min);
521 }
522
523 /* Return source file of the statement that produced this insn. */
524 const char *
525 insn_file (rtx insn)
526 {
527 return locator_file (INSN_LOCATOR (insn));
528 }
529
530 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
531 on the scope tree and the newly reordered instructions. */
532
533 void
534 reemit_insn_block_notes (void)
535 {
536 tree cur_block = DECL_INITIAL (cfun->decl);
537 rtx insn, note;
538
539 insn = get_insns ();
540 if (!active_insn_p (insn))
541 insn = next_active_insn (insn);
542 for (; insn; insn = next_active_insn (insn))
543 {
544 tree this_block;
545
546 /* Avoid putting scope notes between jump table and its label. */
547 if (JUMP_P (insn)
548 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
549 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
550 continue;
551
552 this_block = insn_scope (insn);
553 /* For sequences compute scope resulting from merging all scopes
554 of instructions nested inside. */
555 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
556 {
557 int i;
558 rtx body = PATTERN (insn);
559
560 this_block = NULL;
561 for (i = 0; i < XVECLEN (body, 0); i++)
562 this_block = choose_inner_scope (this_block,
563 insn_scope (XVECEXP (body, 0, i)));
564 }
565 if (! this_block)
566 continue;
567
568 if (this_block != cur_block)
569 {
570 change_scope (insn, cur_block, this_block);
571 cur_block = this_block;
572 }
573 }
574
575 /* change_scope emits before the insn, not after. */
576 note = emit_note (NOTE_INSN_DELETED);
577 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
578 delete_insn (note);
579
580 reorder_blocks ();
581 }
582 \f
583 /* Given a reorder chain, rearrange the code to match. */
584
585 static void
586 fixup_reorder_chain (void)
587 {
588 basic_block bb, prev_bb;
589 int index;
590 rtx insn = NULL;
591
592 if (cfg_layout_function_header)
593 {
594 set_first_insn (cfg_layout_function_header);
595 insn = cfg_layout_function_header;
596 while (NEXT_INSN (insn))
597 insn = NEXT_INSN (insn);
598 }
599
600 /* First do the bulk reordering -- rechain the blocks without regard to
601 the needed changes to jumps and labels. */
602
603 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
604 bb != 0;
605 bb = bb->aux, index++)
606 {
607 if (bb->il.rtl->header)
608 {
609 if (insn)
610 NEXT_INSN (insn) = bb->il.rtl->header;
611 else
612 set_first_insn (bb->il.rtl->header);
613 PREV_INSN (bb->il.rtl->header) = insn;
614 insn = bb->il.rtl->header;
615 while (NEXT_INSN (insn))
616 insn = NEXT_INSN (insn);
617 }
618 if (insn)
619 NEXT_INSN (insn) = BB_HEAD (bb);
620 else
621 set_first_insn (BB_HEAD (bb));
622 PREV_INSN (BB_HEAD (bb)) = insn;
623 insn = BB_END (bb);
624 if (bb->il.rtl->footer)
625 {
626 NEXT_INSN (insn) = bb->il.rtl->footer;
627 PREV_INSN (bb->il.rtl->footer) = insn;
628 while (NEXT_INSN (insn))
629 insn = NEXT_INSN (insn);
630 }
631 }
632
633 gcc_assert (index == n_basic_blocks);
634
635 NEXT_INSN (insn) = cfg_layout_function_footer;
636 if (cfg_layout_function_footer)
637 PREV_INSN (cfg_layout_function_footer) = insn;
638
639 while (NEXT_INSN (insn))
640 insn = NEXT_INSN (insn);
641
642 set_last_insn (insn);
643 #ifdef ENABLE_CHECKING
644 verify_insn_chain ();
645 #endif
646 delete_dead_jumptables ();
647
648 /* Now add jumps and labels as needed to match the blocks new
649 outgoing edges. */
650
651 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
652 {
653 edge e_fall, e_taken, e;
654 rtx bb_end_insn;
655 basic_block nb;
656 edge_iterator ei;
657
658 if (EDGE_COUNT (bb->succs) == 0)
659 continue;
660
661 /* Find the old fallthru edge, and another non-EH edge for
662 a taken jump. */
663 e_taken = e_fall = NULL;
664
665 FOR_EACH_EDGE (e, ei, bb->succs)
666 if (e->flags & EDGE_FALLTHRU)
667 e_fall = e;
668 else if (! (e->flags & EDGE_EH))
669 e_taken = e;
670
671 bb_end_insn = BB_END (bb);
672 if (JUMP_P (bb_end_insn))
673 {
674 if (any_condjump_p (bb_end_insn))
675 {
676 /* If the old fallthru is still next, nothing to do. */
677 if (bb->aux == e_fall->dest
678 || e_fall->dest == EXIT_BLOCK_PTR)
679 continue;
680
681 /* The degenerated case of conditional jump jumping to the next
682 instruction can happen for jumps with side effects. We need
683 to construct a forwarder block and this will be done just
684 fine by force_nonfallthru below. */
685 if (!e_taken)
686 ;
687
688 /* There is another special case: if *neither* block is next,
689 such as happens at the very end of a function, then we'll
690 need to add a new unconditional jump. Choose the taken
691 edge based on known or assumed probability. */
692 else if (bb->aux != e_taken->dest)
693 {
694 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
695
696 if (note
697 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
698 && invert_jump (bb_end_insn,
699 (e_fall->dest == EXIT_BLOCK_PTR
700 ? NULL_RTX
701 : label_for_bb (e_fall->dest)), 0))
702 {
703 e_fall->flags &= ~EDGE_FALLTHRU;
704 #ifdef ENABLE_CHECKING
705 gcc_assert (could_fall_through
706 (e_taken->src, e_taken->dest));
707 #endif
708 e_taken->flags |= EDGE_FALLTHRU;
709 update_br_prob_note (bb);
710 e = e_fall, e_fall = e_taken, e_taken = e;
711 }
712 }
713
714 /* If the "jumping" edge is a crossing edge, and the fall
715 through edge is non-crossing, leave things as they are. */
716 else if ((e_taken->flags & EDGE_CROSSING)
717 && !(e_fall->flags & EDGE_CROSSING))
718 continue;
719
720 /* Otherwise we can try to invert the jump. This will
721 basically never fail, however, keep up the pretense. */
722 else if (invert_jump (bb_end_insn,
723 (e_fall->dest == EXIT_BLOCK_PTR
724 ? NULL_RTX
725 : label_for_bb (e_fall->dest)), 0))
726 {
727 e_fall->flags &= ~EDGE_FALLTHRU;
728 #ifdef ENABLE_CHECKING
729 gcc_assert (could_fall_through
730 (e_taken->src, e_taken->dest));
731 #endif
732 e_taken->flags |= EDGE_FALLTHRU;
733 update_br_prob_note (bb);
734 continue;
735 }
736 }
737 else
738 {
739 /* Otherwise we have some return, switch or computed
740 jump. In the 99% case, there should not have been a
741 fallthru edge. */
742 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
743 continue;
744 }
745 }
746 else
747 {
748 /* No fallthru implies a noreturn function with EH edges, or
749 something similarly bizarre. In any case, we don't need to
750 do anything. */
751 if (! e_fall)
752 continue;
753
754 /* If the fallthru block is still next, nothing to do. */
755 if (bb->aux == e_fall->dest)
756 continue;
757
758 /* A fallthru to exit block. */
759 if (e_fall->dest == EXIT_BLOCK_PTR)
760 continue;
761 }
762
763 /* We got here if we need to add a new jump insn. */
764 nb = force_nonfallthru (e_fall);
765 if (nb)
766 {
767 nb->il.rtl->visited = 1;
768 nb->aux = bb->aux;
769 bb->aux = nb;
770 /* Don't process this new block. */
771 bb = nb;
772
773 /* Make sure new bb is tagged for correct section (same as
774 fall-thru source, since you cannot fall-throu across
775 section boundaries). */
776 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
777 if (flag_reorder_blocks_and_partition
778 && targetm.have_named_sections
779 && JUMP_P (BB_END (bb))
780 && !any_condjump_p (BB_END (bb))
781 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
782 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
783 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
784 }
785 }
786
787 /* Put basic_block_info in the new order. */
788
789 if (dump_file)
790 {
791 fprintf (dump_file, "Reordered sequence:\n");
792 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
793 bb;
794 bb = bb->aux, index++)
795 {
796 fprintf (dump_file, " %i ", index);
797 if (get_bb_original (bb))
798 fprintf (dump_file, "duplicate of %i ",
799 get_bb_original (bb)->index);
800 else if (forwarder_block_p (bb)
801 && !LABEL_P (BB_HEAD (bb)))
802 fprintf (dump_file, "compensation ");
803 else
804 fprintf (dump_file, "bb %i ", bb->index);
805 fprintf (dump_file, " [%i]\n", bb->frequency);
806 }
807 }
808
809 prev_bb = ENTRY_BLOCK_PTR;
810 bb = ENTRY_BLOCK_PTR->next_bb;
811 index = NUM_FIXED_BLOCKS;
812
813 for (; bb; prev_bb = bb, bb = bb->aux, index ++)
814 {
815 bb->index = index;
816 SET_BASIC_BLOCK (index, bb);
817
818 bb->prev_bb = prev_bb;
819 prev_bb->next_bb = bb;
820 }
821 prev_bb->next_bb = EXIT_BLOCK_PTR;
822 EXIT_BLOCK_PTR->prev_bb = prev_bb;
823
824 /* Annoying special case - jump around dead jumptables left in the code. */
825 FOR_EACH_BB (bb)
826 {
827 edge e;
828 edge_iterator ei;
829
830 FOR_EACH_EDGE (e, ei, bb->succs)
831 if (e->flags & EDGE_FALLTHRU)
832 break;
833
834 if (e && !can_fallthru (e->src, e->dest))
835 force_nonfallthru (e);
836 }
837 }
838 \f
839 /* Perform sanity checks on the insn chain.
840 1. Check that next/prev pointers are consistent in both the forward and
841 reverse direction.
842 2. Count insns in chain, going both directions, and check if equal.
843 3. Check that get_last_insn () returns the actual end of chain. */
844
845 void
846 verify_insn_chain (void)
847 {
848 rtx x, prevx, nextx;
849 int insn_cnt1, insn_cnt2;
850
851 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
852 x != 0;
853 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
854 gcc_assert (PREV_INSN (x) == prevx);
855
856 gcc_assert (prevx == get_last_insn ());
857
858 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
859 x != 0;
860 nextx = x, insn_cnt2++, x = PREV_INSN (x))
861 gcc_assert (NEXT_INSN (x) == nextx);
862
863 gcc_assert (insn_cnt1 == insn_cnt2);
864 }
865 \f
866 /* If we have assembler epilogues, the block falling through to exit must
867 be the last one in the reordered chain when we reach final. Ensure
868 that this condition is met. */
869 static void
870 fixup_fallthru_exit_predecessor (void)
871 {
872 edge e;
873 edge_iterator ei;
874 basic_block bb = NULL;
875
876 /* This transformation is not valid before reload, because we might
877 separate a call from the instruction that copies the return
878 value. */
879 gcc_assert (reload_completed);
880
881 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
882 if (e->flags & EDGE_FALLTHRU)
883 bb = e->src;
884
885 if (bb && bb->aux)
886 {
887 basic_block c = ENTRY_BLOCK_PTR->next_bb;
888
889 /* If the very first block is the one with the fall-through exit
890 edge, we have to split that block. */
891 if (c == bb)
892 {
893 bb = split_block (bb, NULL)->dest;
894 bb->aux = c->aux;
895 c->aux = bb;
896 bb->il.rtl->footer = c->il.rtl->footer;
897 c->il.rtl->footer = NULL;
898 }
899
900 while (c->aux != bb)
901 c = c->aux;
902
903 c->aux = bb->aux;
904 while (c->aux)
905 c = c->aux;
906
907 c->aux = bb;
908 bb->aux = NULL;
909 }
910 }
911 \f
912 /* Return true in case it is possible to duplicate the basic block BB. */
913
914 /* We do not want to declare the function in a header file, since it should
915 only be used through the cfghooks interface, and we do not want to move
916 it to cfgrtl.c since it would require also moving quite a lot of related
917 code. */
918 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
919
920 bool
921 cfg_layout_can_duplicate_bb_p (basic_block bb)
922 {
923 /* Do not attempt to duplicate tablejumps, as we need to unshare
924 the dispatch table. This is difficult to do, as the instructions
925 computing jump destination may be hoisted outside the basic block. */
926 if (tablejump_p (BB_END (bb), NULL, NULL))
927 return false;
928
929 /* Do not duplicate blocks containing insns that can't be copied. */
930 if (targetm.cannot_copy_insn_p)
931 {
932 rtx insn = BB_HEAD (bb);
933 while (1)
934 {
935 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
936 return false;
937 if (insn == BB_END (bb))
938 break;
939 insn = NEXT_INSN (insn);
940 }
941 }
942
943 return true;
944 }
945
946 rtx
947 duplicate_insn_chain (rtx from, rtx to)
948 {
949 rtx insn, last;
950
951 /* Avoid updating of boundaries of previous basic block. The
952 note will get removed from insn stream in fixup. */
953 last = emit_note (NOTE_INSN_DELETED);
954
955 /* Create copy at the end of INSN chain. The chain will
956 be reordered later. */
957 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
958 {
959 switch (GET_CODE (insn))
960 {
961 case INSN:
962 case CALL_INSN:
963 case JUMP_INSN:
964 /* Avoid copying of dispatch tables. We never duplicate
965 tablejumps, so this can hit only in case the table got
966 moved far from original jump. */
967 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
968 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
969 break;
970 emit_copy_of_insn_after (insn, get_last_insn ());
971 break;
972
973 case CODE_LABEL:
974 break;
975
976 case BARRIER:
977 emit_barrier ();
978 break;
979
980 case NOTE:
981 switch (NOTE_LINE_NUMBER (insn))
982 {
983 /* In case prologue is empty and function contain label
984 in first BB, we may want to copy the block. */
985 case NOTE_INSN_PROLOGUE_END:
986
987 case NOTE_INSN_DELETED:
988 case NOTE_INSN_DELETED_LABEL:
989 /* No problem to strip these. */
990 case NOTE_INSN_EPILOGUE_BEG:
991 case NOTE_INSN_FUNCTION_END:
992 /* Debug code expect these notes to exist just once.
993 Keep them in the master copy.
994 ??? It probably makes more sense to duplicate them for each
995 epilogue copy. */
996 case NOTE_INSN_FUNCTION_BEG:
997 /* There is always just single entry to function. */
998 case NOTE_INSN_BASIC_BLOCK:
999 break;
1000
1001 case NOTE_INSN_REPEATED_LINE_NUMBER:
1002 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1003 emit_note_copy (insn);
1004 break;
1005
1006 default:
1007 /* All other notes should have already been eliminated.
1008 */
1009 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1010
1011 /* It is possible that no_line_number is set and the note
1012 won't be emitted. */
1013 emit_note_copy (insn);
1014 }
1015 break;
1016 default:
1017 gcc_unreachable ();
1018 }
1019 }
1020 insn = NEXT_INSN (last);
1021 delete_insn (last);
1022 return insn;
1023 }
1024 /* Create a duplicate of the basic block BB. */
1025
1026 /* We do not want to declare the function in a header file, since it should
1027 only be used through the cfghooks interface, and we do not want to move
1028 it to cfgrtl.c since it would require also moving quite a lot of related
1029 code. */
1030 extern basic_block cfg_layout_duplicate_bb (basic_block);
1031
1032 basic_block
1033 cfg_layout_duplicate_bb (basic_block bb)
1034 {
1035 rtx insn;
1036 basic_block new_bb;
1037
1038 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1039 new_bb = create_basic_block (insn,
1040 insn ? get_last_insn () : NULL,
1041 EXIT_BLOCK_PTR->prev_bb);
1042
1043 BB_COPY_PARTITION (new_bb, bb);
1044 if (bb->il.rtl->header)
1045 {
1046 insn = bb->il.rtl->header;
1047 while (NEXT_INSN (insn))
1048 insn = NEXT_INSN (insn);
1049 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1050 if (insn)
1051 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1052 }
1053
1054 if (bb->il.rtl->footer)
1055 {
1056 insn = bb->il.rtl->footer;
1057 while (NEXT_INSN (insn))
1058 insn = NEXT_INSN (insn);
1059 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1060 if (insn)
1061 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1062 }
1063
1064 if (bb->il.rtl->global_live_at_start)
1065 {
1066 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1067 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1068 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1069 bb->il.rtl->global_live_at_start);
1070 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1071 bb->il.rtl->global_live_at_end);
1072 }
1073
1074 return new_bb;
1075 }
1076 \f
1077 /* Main entry point to this module - initialize the datastructures for
1078 CFG layout changes. It keeps LOOPS up-to-date if not null.
1079
1080 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1081 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1082 to date. */
1083
1084 void
1085 cfg_layout_initialize (unsigned int flags)
1086 {
1087 initialize_original_copy_tables ();
1088
1089 cfg_layout_rtl_register_cfg_hooks ();
1090
1091 record_effective_endpoints ();
1092
1093 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1094 }
1095
1096 /* Splits superblocks. */
1097 void
1098 break_superblocks (void)
1099 {
1100 sbitmap superblocks;
1101 bool need = false;
1102 basic_block bb;
1103
1104 superblocks = sbitmap_alloc (last_basic_block);
1105 sbitmap_zero (superblocks);
1106
1107 FOR_EACH_BB (bb)
1108 if (bb->flags & BB_SUPERBLOCK)
1109 {
1110 bb->flags &= ~BB_SUPERBLOCK;
1111 SET_BIT (superblocks, bb->index);
1112 need = true;
1113 }
1114
1115 if (need)
1116 {
1117 rebuild_jump_labels (get_insns ());
1118 find_many_sub_basic_blocks (superblocks);
1119 }
1120
1121 free (superblocks);
1122 }
1123
1124 /* Finalize the changes: reorder insn list according to the sequence specified
1125 by aux pointers, enter compensation code, rebuild scope forest. */
1126
1127 void
1128 cfg_layout_finalize (void)
1129 {
1130 basic_block bb;
1131
1132 #ifdef ENABLE_CHECKING
1133 verify_flow_info ();
1134 #endif
1135 rtl_register_cfg_hooks ();
1136 if (reload_completed
1137 #ifdef HAVE_epilogue
1138 && !HAVE_epilogue
1139 #endif
1140 )
1141 fixup_fallthru_exit_predecessor ();
1142 fixup_reorder_chain ();
1143
1144 #ifdef ENABLE_CHECKING
1145 verify_insn_chain ();
1146 #endif
1147 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1148 {
1149 bb->il.rtl->header = bb->il.rtl->footer = NULL;
1150 bb->aux = NULL;
1151 bb->il.rtl->visited = 0;
1152 }
1153
1154 break_superblocks ();
1155
1156 #ifdef ENABLE_CHECKING
1157 verify_flow_info ();
1158 #endif
1159
1160 free_original_copy_tables ();
1161 }
1162
1163 /* Checks whether all N blocks in BBS array can be copied. */
1164 bool
1165 can_copy_bbs_p (basic_block *bbs, unsigned n)
1166 {
1167 unsigned i;
1168 edge e;
1169 int ret = true;
1170
1171 for (i = 0; i < n; i++)
1172 bbs[i]->flags |= BB_DUPLICATED;
1173
1174 for (i = 0; i < n; i++)
1175 {
1176 /* In case we should redirect abnormal edge during duplication, fail. */
1177 edge_iterator ei;
1178 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1179 if ((e->flags & EDGE_ABNORMAL)
1180 && (e->dest->flags & BB_DUPLICATED))
1181 {
1182 ret = false;
1183 goto end;
1184 }
1185
1186 if (!can_duplicate_block_p (bbs[i]))
1187 {
1188 ret = false;
1189 break;
1190 }
1191 }
1192
1193 end:
1194 for (i = 0; i < n; i++)
1195 bbs[i]->flags &= ~BB_DUPLICATED;
1196
1197 return ret;
1198 }
1199
1200 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1201 are placed into array NEW_BBS in the same order. Edges from basic blocks
1202 in BBS are also duplicated and copies of those of them
1203 that lead into BBS are redirected to appropriate newly created block. The
1204 function assigns bbs into loops (copy of basic block bb is assigned to
1205 bb->loop_father->copy loop, so this must be set up correctly in advance)
1206 and updates dominators locally (LOOPS structure that contains the information
1207 about dominators is passed to enable this).
1208
1209 BASE is the superloop to that basic block belongs; if its header or latch
1210 is copied, we do not set the new blocks as header or latch.
1211
1212 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1213 also in the same order.
1214
1215 Newly created basic blocks are put after the basic block AFTER in the
1216 instruction stream, and the order of the blocks in BBS array is preserved. */
1217
1218 void
1219 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1220 edge *edges, unsigned num_edges, edge *new_edges,
1221 struct loop *base, basic_block after)
1222 {
1223 unsigned i, j;
1224 basic_block bb, new_bb, dom_bb;
1225 edge e;
1226
1227 /* Duplicate bbs, update dominators, assign bbs to loops. */
1228 for (i = 0; i < n; i++)
1229 {
1230 /* Duplicate. */
1231 bb = bbs[i];
1232 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1233 after = new_bb;
1234 bb->flags |= BB_DUPLICATED;
1235 /* Add to loop. */
1236 add_bb_to_loop (new_bb, bb->loop_father->copy);
1237 /* Possibly set header. */
1238 if (bb->loop_father->header == bb && bb->loop_father != base)
1239 new_bb->loop_father->header = new_bb;
1240 /* Or latch. */
1241 if (bb->loop_father->latch == bb && bb->loop_father != base)
1242 new_bb->loop_father->latch = new_bb;
1243 }
1244
1245 /* Set dominators. */
1246 for (i = 0; i < n; i++)
1247 {
1248 bb = bbs[i];
1249 new_bb = new_bbs[i];
1250
1251 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1252 if (dom_bb->flags & BB_DUPLICATED)
1253 {
1254 dom_bb = get_bb_copy (dom_bb);
1255 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1256 }
1257 }
1258
1259 /* Redirect edges. */
1260 for (j = 0; j < num_edges; j++)
1261 new_edges[j] = NULL;
1262 for (i = 0; i < n; i++)
1263 {
1264 edge_iterator ei;
1265 new_bb = new_bbs[i];
1266 bb = bbs[i];
1267
1268 FOR_EACH_EDGE (e, ei, new_bb->succs)
1269 {
1270 for (j = 0; j < num_edges; j++)
1271 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1272 new_edges[j] = e;
1273
1274 if (!(e->dest->flags & BB_DUPLICATED))
1275 continue;
1276 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1277 }
1278 }
1279
1280 /* Clear information about duplicates. */
1281 for (i = 0; i < n; i++)
1282 bbs[i]->flags &= ~BB_DUPLICATED;
1283 }
1284
1285 #include "gt-cfglayout.h"