cfglayout.c (fixup_reorder_chain): Postpone deleting dead jump tables...
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "vecprim.h"
41
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer;
44 rtx cfg_layout_function_header;
45
46 static rtx skip_insns_after_block (basic_block);
47 static void record_effective_endpoints (void);
48 static rtx label_for_bb (basic_block);
49 static void fixup_reorder_chain (void);
50
51 static void set_block_levels (tree, int);
52 static void change_scope (rtx, tree, tree);
53
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (rtx);
57 \f
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
60 {
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
63
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
75 }
76 \f
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
80
81 static rtx
82 skip_insns_after_block (basic_block bb)
83 {
84 rtx insn, last_insn, next_head, prev;
85
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
89
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91 {
92 if (insn == next_head)
93 break;
94
95 switch (GET_CODE (insn))
96 {
97 case BARRIER:
98 last_insn = insn;
99 continue;
100
101 case NOTE:
102 switch (NOTE_LINE_NUMBER (insn))
103 {
104 case NOTE_INSN_BLOCK_END:
105 last_insn = insn;
106 continue;
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_DELETED_LABEL:
109 continue;
110
111 default:
112 continue;
113 break;
114 }
115 break;
116
117 case CODE_LABEL:
118 if (NEXT_INSN (insn)
119 && JUMP_P (NEXT_INSN (insn))
120 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
121 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
122 {
123 insn = NEXT_INSN (insn);
124 last_insn = insn;
125 continue;
126 }
127 break;
128
129 default:
130 break;
131 }
132
133 break;
134 }
135
136 /* It is possible to hit contradictory sequence. For instance:
137
138 jump_insn
139 NOTE_INSN_BLOCK_BEG
140 barrier
141
142 Where barrier belongs to jump_insn, but the note does not. This can be
143 created by removing the basic block originally following
144 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
145
146 for (insn = last_insn; insn != BB_END (bb); insn = prev)
147 {
148 prev = PREV_INSN (insn);
149 if (NOTE_P (insn))
150 switch (NOTE_LINE_NUMBER (insn))
151 {
152 case NOTE_INSN_BLOCK_END:
153 case NOTE_INSN_DELETED:
154 case NOTE_INSN_DELETED_LABEL:
155 continue;
156 default:
157 reorder_insns (insn, insn, last_insn);
158 }
159 }
160
161 return last_insn;
162 }
163
164 /* Locate or create a label for a given basic block. */
165
166 static rtx
167 label_for_bb (basic_block bb)
168 {
169 rtx label = BB_HEAD (bb);
170
171 if (!LABEL_P (label))
172 {
173 if (dump_file)
174 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
175
176 label = block_label (bb);
177 }
178
179 return label;
180 }
181
182 /* Locate the effective beginning and end of the insn chain for each
183 block, as defined by skip_insns_after_block above. */
184
185 static void
186 record_effective_endpoints (void)
187 {
188 rtx next_insn;
189 basic_block bb;
190 rtx insn;
191
192 for (insn = get_insns ();
193 insn
194 && NOTE_P (insn)
195 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
196 insn = NEXT_INSN (insn))
197 continue;
198 /* No basic blocks at all? */
199 gcc_assert (insn);
200
201 if (PREV_INSN (insn))
202 cfg_layout_function_header =
203 unlink_insn_chain (get_insns (), PREV_INSN (insn));
204 else
205 cfg_layout_function_header = NULL_RTX;
206
207 next_insn = get_insns ();
208 FOR_EACH_BB (bb)
209 {
210 rtx end;
211
212 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
213 bb->il.rtl->header = unlink_insn_chain (next_insn,
214 PREV_INSN (BB_HEAD (bb)));
215 end = skip_insns_after_block (bb);
216 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
217 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
218 next_insn = NEXT_INSN (BB_END (bb));
219 }
220
221 cfg_layout_function_footer = next_insn;
222 if (cfg_layout_function_footer)
223 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224 }
225 \f
226 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
227 numbers and files. In order to be GGC friendly we need to use separate
228 varrays. This also slightly improve the memory locality in binary search.
229 The _locs array contains locators where the given property change. The
230 block_locators_blocks contains the scope block that is used for all insn
231 locator greater than corresponding block_locators_locs value and smaller
232 than the following one. Similarly for the other properties. */
233 static VEC(int,heap) *block_locators_locs;
234 static GTY(()) VEC(tree,gc) *block_locators_blocks;
235 static VEC(int,heap) *line_locators_locs;
236 static VEC(int,heap) *line_locators_lines;
237 static VEC(int,heap) *file_locators_locs;
238 static GTY(()) varray_type file_locators_files;
239 int prologue_locator;
240 int epilogue_locator;
241
242 /* During the RTL expansion the lexical blocks and line numbers are
243 represented via INSN_NOTEs. Replace them by representation using
244 INSN_LOCATORs. */
245
246 unsigned int
247 insn_locators_initialize (void)
248 {
249 tree block = NULL;
250 tree last_block = NULL;
251 rtx insn, next;
252 int loc = 0;
253 int line_number = 0, last_line_number = 0;
254 const char *file_name = NULL, *last_file_name = NULL;
255
256 prologue_locator = epilogue_locator = 0;
257
258 block_locators_locs = VEC_alloc (int, heap, 32);
259 block_locators_blocks = VEC_alloc (tree, gc, 32);
260 line_locators_locs = VEC_alloc (int, heap, 32);
261 line_locators_lines = VEC_alloc (int, heap, 32);
262 file_locators_locs = VEC_alloc (int, heap, 32);
263 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
264
265 for (insn = get_insns (); insn; insn = next)
266 {
267 int active = 0;
268
269 next = NEXT_INSN (insn);
270
271 if (NOTE_P (insn))
272 {
273 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
274 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
275 if (NOTE_LINE_NUMBER (insn) > 0)
276 {
277 expanded_location xloc;
278 NOTE_EXPANDED_LOCATION (xloc, insn);
279 line_number = xloc.line;
280 file_name = xloc.file;
281 delete_insn (insn);
282 }
283 }
284 else
285 active = (active_insn_p (insn)
286 && GET_CODE (PATTERN (insn)) != ADDR_VEC
287 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
288
289 check_block_change (insn, &block);
290
291 if (active
292 || !next
293 || (!prologue_locator && file_name))
294 {
295 if (last_block != block)
296 {
297 loc++;
298 VEC_safe_push (int, heap, block_locators_locs, loc);
299 VEC_safe_push (tree, gc, block_locators_blocks, block);
300 last_block = block;
301 }
302 if (last_line_number != line_number)
303 {
304 loc++;
305 VEC_safe_push (int, heap, line_locators_locs, loc);
306 VEC_safe_push (int, heap, line_locators_lines, line_number);
307 last_line_number = line_number;
308 }
309 if (last_file_name != file_name)
310 {
311 loc++;
312 VEC_safe_push (int, heap, file_locators_locs, loc);
313 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
314 last_file_name = file_name;
315 }
316 if (!prologue_locator && file_name)
317 prologue_locator = loc;
318 if (!next)
319 epilogue_locator = loc;
320 if (active)
321 INSN_LOCATOR (insn) = loc;
322 }
323 }
324
325 /* Tag the blocks with a depth number so that change_scope can find
326 the common parent easily. */
327 set_block_levels (DECL_INITIAL (cfun->decl), 0);
328
329 free_block_changes ();
330 return 0;
331 }
332
333 struct tree_opt_pass pass_insn_locators_initialize =
334 {
335 "locators", /* name */
336 NULL, /* gate */
337 insn_locators_initialize, /* execute */
338 NULL, /* sub */
339 NULL, /* next */
340 0, /* static_pass_number */
341 0, /* tv_id */
342 0, /* properties_required */
343 0, /* properties_provided */
344 0, /* properties_destroyed */
345 0, /* todo_flags_start */
346 TODO_dump_func, /* todo_flags_finish */
347 0 /* letter */
348 };
349
350 static unsigned int
351 into_cfg_layout_mode (void)
352 {
353 cfg_layout_initialize (0);
354 return 0;
355 }
356
357 static unsigned int
358 outof_cfg_layout_mode (void)
359 {
360 basic_block bb;
361
362 FOR_EACH_BB (bb)
363 if (bb->next_bb != EXIT_BLOCK_PTR)
364 bb->aux = bb->next_bb;
365
366 cfg_layout_finalize ();
367
368 return 0;
369 }
370
371 struct tree_opt_pass pass_into_cfg_layout_mode =
372 {
373 "into_cfglayout", /* name */
374 NULL, /* gate */
375 into_cfg_layout_mode, /* execute */
376 NULL, /* sub */
377 NULL, /* next */
378 0, /* static_pass_number */
379 0, /* tv_id */
380 0, /* properties_required */
381 0, /* properties_provided */
382 0, /* properties_destroyed */
383 0, /* todo_flags_start */
384 TODO_dump_func, /* todo_flags_finish */
385 0 /* letter */
386 };
387
388 struct tree_opt_pass pass_outof_cfg_layout_mode =
389 {
390 "outof_cfglayout", /* name */
391 NULL, /* gate */
392 outof_cfg_layout_mode, /* execute */
393 NULL, /* sub */
394 NULL, /* next */
395 0, /* static_pass_number */
396 0, /* tv_id */
397 0, /* properties_required */
398 0, /* properties_provided */
399 0, /* properties_destroyed */
400 0, /* todo_flags_start */
401 TODO_dump_func, /* todo_flags_finish */
402 0 /* letter */
403 };
404
405 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
406 found in the block tree. */
407
408 static void
409 set_block_levels (tree block, int level)
410 {
411 while (block)
412 {
413 BLOCK_NUMBER (block) = level;
414 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
415 block = BLOCK_CHAIN (block);
416 }
417 }
418 \f
419 /* Return sope resulting from combination of S1 and S2. */
420 static tree
421 choose_inner_scope (tree s1, tree s2)
422 {
423 if (!s1)
424 return s2;
425 if (!s2)
426 return s1;
427 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
428 return s1;
429 return s2;
430 }
431 \f
432 /* Emit lexical block notes needed to change scope from S1 to S2. */
433
434 static void
435 change_scope (rtx orig_insn, tree s1, tree s2)
436 {
437 rtx insn = orig_insn;
438 tree com = NULL_TREE;
439 tree ts1 = s1, ts2 = s2;
440 tree s;
441
442 while (ts1 != ts2)
443 {
444 gcc_assert (ts1 && ts2);
445 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
446 ts1 = BLOCK_SUPERCONTEXT (ts1);
447 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
448 ts2 = BLOCK_SUPERCONTEXT (ts2);
449 else
450 {
451 ts1 = BLOCK_SUPERCONTEXT (ts1);
452 ts2 = BLOCK_SUPERCONTEXT (ts2);
453 }
454 }
455 com = ts1;
456
457 /* Close scopes. */
458 s = s1;
459 while (s != com)
460 {
461 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
462 NOTE_BLOCK (note) = s;
463 s = BLOCK_SUPERCONTEXT (s);
464 }
465
466 /* Open scopes. */
467 s = s2;
468 while (s != com)
469 {
470 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
471 NOTE_BLOCK (insn) = s;
472 s = BLOCK_SUPERCONTEXT (s);
473 }
474 }
475
476 /* Return lexical scope block insn belong to. */
477 static tree
478 insn_scope (rtx insn)
479 {
480 int max = VEC_length (int, block_locators_locs);
481 int min = 0;
482 int loc = INSN_LOCATOR (insn);
483
484 /* When block_locators_locs was initialized, the pro- and epilogue
485 insns didn't exist yet and can therefore not be found this way.
486 But we know that they belong to the outer most block of the
487 current function.
488 Without this test, the prologue would be put inside the block of
489 the first valid instruction in the function and when that first
490 insn is part of an inlined function then the low_pc of that
491 inlined function is messed up. Likewise for the epilogue and
492 the last valid instruction. */
493 if (loc == prologue_locator || loc == epilogue_locator)
494 return DECL_INITIAL (cfun->decl);
495
496 if (!max || !loc)
497 return NULL;
498 while (1)
499 {
500 int pos = (min + max) / 2;
501 int tmp = VEC_index (int, block_locators_locs, pos);
502
503 if (tmp <= loc && min != pos)
504 min = pos;
505 else if (tmp > loc && max != pos)
506 max = pos;
507 else
508 {
509 min = pos;
510 break;
511 }
512 }
513 return VEC_index (tree, block_locators_blocks, min);
514 }
515
516 /* Return line number of the statement specified by the locator. */
517 int
518 locator_line (int loc)
519 {
520 int max = VEC_length (int, line_locators_locs);
521 int min = 0;
522
523 if (!max || !loc)
524 return 0;
525 while (1)
526 {
527 int pos = (min + max) / 2;
528 int tmp = VEC_index (int, line_locators_locs, pos);
529
530 if (tmp <= loc && min != pos)
531 min = pos;
532 else if (tmp > loc && max != pos)
533 max = pos;
534 else
535 {
536 min = pos;
537 break;
538 }
539 }
540 return VEC_index (int, line_locators_lines, min);
541 }
542
543 /* Return line number of the statement that produced this insn. */
544 int
545 insn_line (rtx insn)
546 {
547 return locator_line (INSN_LOCATOR (insn));
548 }
549
550 /* Return source file of the statement specified by LOC. */
551 const char *
552 locator_file (int loc)
553 {
554 int max = VEC_length (int, file_locators_locs);
555 int min = 0;
556
557 if (!max || !loc)
558 return NULL;
559 while (1)
560 {
561 int pos = (min + max) / 2;
562 int tmp = VEC_index (int, file_locators_locs, pos);
563
564 if (tmp <= loc && min != pos)
565 min = pos;
566 else if (tmp > loc && max != pos)
567 max = pos;
568 else
569 {
570 min = pos;
571 break;
572 }
573 }
574 return VARRAY_CHAR_PTR (file_locators_files, min);
575 }
576
577 /* Return source file of the statement that produced this insn. */
578 const char *
579 insn_file (rtx insn)
580 {
581 return locator_file (INSN_LOCATOR (insn));
582 }
583
584 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
585 on the scope tree and the newly reordered instructions. */
586
587 void
588 reemit_insn_block_notes (void)
589 {
590 tree cur_block = DECL_INITIAL (cfun->decl);
591 rtx insn, note;
592
593 insn = get_insns ();
594 if (!active_insn_p (insn))
595 insn = next_active_insn (insn);
596 for (; insn; insn = next_active_insn (insn))
597 {
598 tree this_block;
599
600 /* Avoid putting scope notes between jump table and its label. */
601 if (JUMP_P (insn)
602 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
603 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
604 continue;
605
606 this_block = insn_scope (insn);
607 /* For sequences compute scope resulting from merging all scopes
608 of instructions nested inside. */
609 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
610 {
611 int i;
612 rtx body = PATTERN (insn);
613
614 this_block = NULL;
615 for (i = 0; i < XVECLEN (body, 0); i++)
616 this_block = choose_inner_scope (this_block,
617 insn_scope (XVECEXP (body, 0, i)));
618 }
619 if (! this_block)
620 continue;
621
622 if (this_block != cur_block)
623 {
624 change_scope (insn, cur_block, this_block);
625 cur_block = this_block;
626 }
627 }
628
629 /* change_scope emits before the insn, not after. */
630 note = emit_note (NOTE_INSN_DELETED);
631 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
632 delete_insn (note);
633
634 reorder_blocks ();
635 }
636 \f
637 /* Given a reorder chain, rearrange the code to match. */
638
639 static void
640 fixup_reorder_chain (void)
641 {
642 basic_block bb, prev_bb;
643 int index;
644 rtx insn = NULL;
645
646 if (cfg_layout_function_header)
647 {
648 set_first_insn (cfg_layout_function_header);
649 insn = cfg_layout_function_header;
650 while (NEXT_INSN (insn))
651 insn = NEXT_INSN (insn);
652 }
653
654 /* First do the bulk reordering -- rechain the blocks without regard to
655 the needed changes to jumps and labels. */
656
657 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
658 bb != 0;
659 bb = bb->aux, index++)
660 {
661 if (bb->il.rtl->header)
662 {
663 if (insn)
664 NEXT_INSN (insn) = bb->il.rtl->header;
665 else
666 set_first_insn (bb->il.rtl->header);
667 PREV_INSN (bb->il.rtl->header) = insn;
668 insn = bb->il.rtl->header;
669 while (NEXT_INSN (insn))
670 insn = NEXT_INSN (insn);
671 }
672 if (insn)
673 NEXT_INSN (insn) = BB_HEAD (bb);
674 else
675 set_first_insn (BB_HEAD (bb));
676 PREV_INSN (BB_HEAD (bb)) = insn;
677 insn = BB_END (bb);
678 if (bb->il.rtl->footer)
679 {
680 NEXT_INSN (insn) = bb->il.rtl->footer;
681 PREV_INSN (bb->il.rtl->footer) = insn;
682 while (NEXT_INSN (insn))
683 insn = NEXT_INSN (insn);
684 }
685 }
686
687 gcc_assert (index == n_basic_blocks);
688
689 NEXT_INSN (insn) = cfg_layout_function_footer;
690 if (cfg_layout_function_footer)
691 PREV_INSN (cfg_layout_function_footer) = insn;
692
693 while (NEXT_INSN (insn))
694 insn = NEXT_INSN (insn);
695
696 set_last_insn (insn);
697 #ifdef ENABLE_CHECKING
698 verify_insn_chain ();
699 #endif
700
701 /* Now add jumps and labels as needed to match the blocks new
702 outgoing edges. */
703
704 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
705 {
706 edge e_fall, e_taken, e;
707 rtx bb_end_insn;
708 basic_block nb;
709 edge_iterator ei;
710
711 if (EDGE_COUNT (bb->succs) == 0)
712 continue;
713
714 /* Find the old fallthru edge, and another non-EH edge for
715 a taken jump. */
716 e_taken = e_fall = NULL;
717
718 FOR_EACH_EDGE (e, ei, bb->succs)
719 if (e->flags & EDGE_FALLTHRU)
720 e_fall = e;
721 else if (! (e->flags & EDGE_EH))
722 e_taken = e;
723
724 bb_end_insn = BB_END (bb);
725 if (JUMP_P (bb_end_insn))
726 {
727 if (any_condjump_p (bb_end_insn))
728 {
729 /* If the old fallthru is still next, nothing to do. */
730 if (bb->aux == e_fall->dest
731 || e_fall->dest == EXIT_BLOCK_PTR)
732 continue;
733
734 /* The degenerated case of conditional jump jumping to the next
735 instruction can happen for jumps with side effects. We need
736 to construct a forwarder block and this will be done just
737 fine by force_nonfallthru below. */
738 if (!e_taken)
739 ;
740
741 /* There is another special case: if *neither* block is next,
742 such as happens at the very end of a function, then we'll
743 need to add a new unconditional jump. Choose the taken
744 edge based on known or assumed probability. */
745 else if (bb->aux != e_taken->dest)
746 {
747 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
748
749 if (note
750 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
751 && invert_jump (bb_end_insn,
752 (e_fall->dest == EXIT_BLOCK_PTR
753 ? NULL_RTX
754 : label_for_bb (e_fall->dest)), 0))
755 {
756 e_fall->flags &= ~EDGE_FALLTHRU;
757 #ifdef ENABLE_CHECKING
758 gcc_assert (could_fall_through
759 (e_taken->src, e_taken->dest));
760 #endif
761 e_taken->flags |= EDGE_FALLTHRU;
762 update_br_prob_note (bb);
763 e = e_fall, e_fall = e_taken, e_taken = e;
764 }
765 }
766
767 /* If the "jumping" edge is a crossing edge, and the fall
768 through edge is non-crossing, leave things as they are. */
769 else if ((e_taken->flags & EDGE_CROSSING)
770 && !(e_fall->flags & EDGE_CROSSING))
771 continue;
772
773 /* Otherwise we can try to invert the jump. This will
774 basically never fail, however, keep up the pretense. */
775 else if (invert_jump (bb_end_insn,
776 (e_fall->dest == EXIT_BLOCK_PTR
777 ? NULL_RTX
778 : label_for_bb (e_fall->dest)), 0))
779 {
780 e_fall->flags &= ~EDGE_FALLTHRU;
781 #ifdef ENABLE_CHECKING
782 gcc_assert (could_fall_through
783 (e_taken->src, e_taken->dest));
784 #endif
785 e_taken->flags |= EDGE_FALLTHRU;
786 update_br_prob_note (bb);
787 continue;
788 }
789 }
790 else
791 {
792 /* Otherwise we have some return, switch or computed
793 jump. In the 99% case, there should not have been a
794 fallthru edge. */
795 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
796 continue;
797 }
798 }
799 else
800 {
801 /* No fallthru implies a noreturn function with EH edges, or
802 something similarly bizarre. In any case, we don't need to
803 do anything. */
804 if (! e_fall)
805 continue;
806
807 /* If the fallthru block is still next, nothing to do. */
808 if (bb->aux == e_fall->dest)
809 continue;
810
811 /* A fallthru to exit block. */
812 if (e_fall->dest == EXIT_BLOCK_PTR)
813 continue;
814 }
815
816 /* We got here if we need to add a new jump insn. */
817 nb = force_nonfallthru (e_fall);
818 if (nb)
819 {
820 nb->il.rtl->visited = 1;
821 nb->aux = bb->aux;
822 bb->aux = nb;
823 /* Don't process this new block. */
824 bb = nb;
825
826 /* Make sure new bb is tagged for correct section (same as
827 fall-thru source, since you cannot fall-throu across
828 section boundaries). */
829 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
830 if (flag_reorder_blocks_and_partition
831 && targetm.have_named_sections
832 && JUMP_P (BB_END (bb))
833 && !any_condjump_p (BB_END (bb))
834 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
835 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
836 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
837 }
838 }
839
840 /* Put basic_block_info in the new order. */
841
842 if (dump_file)
843 {
844 fprintf (dump_file, "Reordered sequence:\n");
845 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
846 bb;
847 bb = bb->aux, index++)
848 {
849 fprintf (dump_file, " %i ", index);
850 if (get_bb_original (bb))
851 fprintf (dump_file, "duplicate of %i ",
852 get_bb_original (bb)->index);
853 else if (forwarder_block_p (bb)
854 && !LABEL_P (BB_HEAD (bb)))
855 fprintf (dump_file, "compensation ");
856 else
857 fprintf (dump_file, "bb %i ", bb->index);
858 fprintf (dump_file, " [%i]\n", bb->frequency);
859 }
860 }
861
862 prev_bb = ENTRY_BLOCK_PTR;
863 bb = ENTRY_BLOCK_PTR->next_bb;
864 index = NUM_FIXED_BLOCKS;
865
866 for (; bb; prev_bb = bb, bb = bb->aux, index ++)
867 {
868 bb->index = index;
869 SET_BASIC_BLOCK (index, bb);
870
871 bb->prev_bb = prev_bb;
872 prev_bb->next_bb = bb;
873 }
874 prev_bb->next_bb = EXIT_BLOCK_PTR;
875 EXIT_BLOCK_PTR->prev_bb = prev_bb;
876
877 /* Annoying special case - jump around dead jumptables left in the code. */
878 FOR_EACH_BB (bb)
879 {
880 edge e;
881 edge_iterator ei;
882
883 FOR_EACH_EDGE (e, ei, bb->succs)
884 if (e->flags & EDGE_FALLTHRU)
885 break;
886
887 if (e && !can_fallthru (e->src, e->dest))
888 force_nonfallthru (e);
889 }
890 }
891 \f
892 /* Perform sanity checks on the insn chain.
893 1. Check that next/prev pointers are consistent in both the forward and
894 reverse direction.
895 2. Count insns in chain, going both directions, and check if equal.
896 3. Check that get_last_insn () returns the actual end of chain. */
897
898 void
899 verify_insn_chain (void)
900 {
901 rtx x, prevx, nextx;
902 int insn_cnt1, insn_cnt2;
903
904 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
905 x != 0;
906 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
907 gcc_assert (PREV_INSN (x) == prevx);
908
909 gcc_assert (prevx == get_last_insn ());
910
911 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
912 x != 0;
913 nextx = x, insn_cnt2++, x = PREV_INSN (x))
914 gcc_assert (NEXT_INSN (x) == nextx);
915
916 gcc_assert (insn_cnt1 == insn_cnt2);
917 }
918 \f
919 /* If we have assembler epilogues, the block falling through to exit must
920 be the last one in the reordered chain when we reach final. Ensure
921 that this condition is met. */
922 static void
923 fixup_fallthru_exit_predecessor (void)
924 {
925 edge e;
926 edge_iterator ei;
927 basic_block bb = NULL;
928
929 /* This transformation is not valid before reload, because we might
930 separate a call from the instruction that copies the return
931 value. */
932 gcc_assert (reload_completed);
933
934 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
935 if (e->flags & EDGE_FALLTHRU)
936 bb = e->src;
937
938 if (bb && bb->aux)
939 {
940 basic_block c = ENTRY_BLOCK_PTR->next_bb;
941
942 /* If the very first block is the one with the fall-through exit
943 edge, we have to split that block. */
944 if (c == bb)
945 {
946 bb = split_block (bb, NULL)->dest;
947 bb->aux = c->aux;
948 c->aux = bb;
949 bb->il.rtl->footer = c->il.rtl->footer;
950 c->il.rtl->footer = NULL;
951 }
952
953 while (c->aux != bb)
954 c = c->aux;
955
956 c->aux = bb->aux;
957 while (c->aux)
958 c = c->aux;
959
960 c->aux = bb;
961 bb->aux = NULL;
962 }
963 }
964 \f
965 /* Return true in case it is possible to duplicate the basic block BB. */
966
967 /* We do not want to declare the function in a header file, since it should
968 only be used through the cfghooks interface, and we do not want to move
969 it to cfgrtl.c since it would require also moving quite a lot of related
970 code. */
971 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
972
973 bool
974 cfg_layout_can_duplicate_bb_p (basic_block bb)
975 {
976 /* Do not attempt to duplicate tablejumps, as we need to unshare
977 the dispatch table. This is difficult to do, as the instructions
978 computing jump destination may be hoisted outside the basic block. */
979 if (tablejump_p (BB_END (bb), NULL, NULL))
980 return false;
981
982 /* Do not duplicate blocks containing insns that can't be copied. */
983 if (targetm.cannot_copy_insn_p)
984 {
985 rtx insn = BB_HEAD (bb);
986 while (1)
987 {
988 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
989 return false;
990 if (insn == BB_END (bb))
991 break;
992 insn = NEXT_INSN (insn);
993 }
994 }
995
996 return true;
997 }
998
999 rtx
1000 duplicate_insn_chain (rtx from, rtx to)
1001 {
1002 rtx insn, last;
1003
1004 /* Avoid updating of boundaries of previous basic block. The
1005 note will get removed from insn stream in fixup. */
1006 last = emit_note (NOTE_INSN_DELETED);
1007
1008 /* Create copy at the end of INSN chain. The chain will
1009 be reordered later. */
1010 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1011 {
1012 switch (GET_CODE (insn))
1013 {
1014 case INSN:
1015 case CALL_INSN:
1016 case JUMP_INSN:
1017 /* Avoid copying of dispatch tables. We never duplicate
1018 tablejumps, so this can hit only in case the table got
1019 moved far from original jump. */
1020 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1021 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1022 break;
1023 emit_copy_of_insn_after (insn, get_last_insn ());
1024 break;
1025
1026 case CODE_LABEL:
1027 break;
1028
1029 case BARRIER:
1030 emit_barrier ();
1031 break;
1032
1033 case NOTE:
1034 switch (NOTE_LINE_NUMBER (insn))
1035 {
1036 /* In case prologue is empty and function contain label
1037 in first BB, we may want to copy the block. */
1038 case NOTE_INSN_PROLOGUE_END:
1039
1040 case NOTE_INSN_DELETED:
1041 case NOTE_INSN_DELETED_LABEL:
1042 /* No problem to strip these. */
1043 case NOTE_INSN_EPILOGUE_BEG:
1044 /* Debug code expect these notes to exist just once.
1045 Keep them in the master copy.
1046 ??? It probably makes more sense to duplicate them for each
1047 epilogue copy. */
1048 case NOTE_INSN_FUNCTION_BEG:
1049 /* There is always just single entry to function. */
1050 case NOTE_INSN_BASIC_BLOCK:
1051 break;
1052
1053 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1054 emit_note_copy (insn);
1055 break;
1056
1057 default:
1058 /* All other notes should have already been eliminated.
1059 */
1060 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1061
1062 /* It is possible that no_line_number is set and the note
1063 won't be emitted. */
1064 emit_note_copy (insn);
1065 }
1066 break;
1067 default:
1068 gcc_unreachable ();
1069 }
1070 }
1071 insn = NEXT_INSN (last);
1072 delete_insn (last);
1073 return insn;
1074 }
1075 /* Create a duplicate of the basic block BB. */
1076
1077 /* We do not want to declare the function in a header file, since it should
1078 only be used through the cfghooks interface, and we do not want to move
1079 it to cfgrtl.c since it would require also moving quite a lot of related
1080 code. */
1081 extern basic_block cfg_layout_duplicate_bb (basic_block);
1082
1083 basic_block
1084 cfg_layout_duplicate_bb (basic_block bb)
1085 {
1086 rtx insn;
1087 basic_block new_bb;
1088
1089 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1090 new_bb = create_basic_block (insn,
1091 insn ? get_last_insn () : NULL,
1092 EXIT_BLOCK_PTR->prev_bb);
1093
1094 BB_COPY_PARTITION (new_bb, bb);
1095 if (bb->il.rtl->header)
1096 {
1097 insn = bb->il.rtl->header;
1098 while (NEXT_INSN (insn))
1099 insn = NEXT_INSN (insn);
1100 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1101 if (insn)
1102 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1103 }
1104
1105 if (bb->il.rtl->footer)
1106 {
1107 insn = bb->il.rtl->footer;
1108 while (NEXT_INSN (insn))
1109 insn = NEXT_INSN (insn);
1110 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1111 if (insn)
1112 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1113 }
1114
1115 if (bb->il.rtl->global_live_at_start)
1116 {
1117 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1118 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1119 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1120 bb->il.rtl->global_live_at_start);
1121 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1122 bb->il.rtl->global_live_at_end);
1123 }
1124
1125 return new_bb;
1126 }
1127 \f
1128 /* Main entry point to this module - initialize the datastructures for
1129 CFG layout changes. It keeps LOOPS up-to-date if not null.
1130
1131 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1132 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1133 to date. */
1134
1135 void
1136 cfg_layout_initialize (unsigned int flags)
1137 {
1138 initialize_original_copy_tables ();
1139
1140 cfg_layout_rtl_register_cfg_hooks ();
1141
1142 record_effective_endpoints ();
1143
1144 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1145 }
1146
1147 /* Splits superblocks. */
1148 void
1149 break_superblocks (void)
1150 {
1151 sbitmap superblocks;
1152 bool need = false;
1153 basic_block bb;
1154
1155 superblocks = sbitmap_alloc (last_basic_block);
1156 sbitmap_zero (superblocks);
1157
1158 FOR_EACH_BB (bb)
1159 if (bb->flags & BB_SUPERBLOCK)
1160 {
1161 bb->flags &= ~BB_SUPERBLOCK;
1162 SET_BIT (superblocks, bb->index);
1163 need = true;
1164 }
1165
1166 if (need)
1167 {
1168 rebuild_jump_labels (get_insns ());
1169 find_many_sub_basic_blocks (superblocks);
1170 }
1171
1172 free (superblocks);
1173 }
1174
1175 /* Finalize the changes: reorder insn list according to the sequence specified
1176 by aux pointers, enter compensation code, rebuild scope forest. */
1177
1178 void
1179 cfg_layout_finalize (void)
1180 {
1181 basic_block bb;
1182
1183 #ifdef ENABLE_CHECKING
1184 verify_flow_info ();
1185 #endif
1186 rtl_register_cfg_hooks ();
1187 if (reload_completed
1188 #ifdef HAVE_epilogue
1189 && !HAVE_epilogue
1190 #endif
1191 )
1192 fixup_fallthru_exit_predecessor ();
1193 fixup_reorder_chain ();
1194
1195 rebuild_jump_labels (get_insns ());
1196 delete_dead_jumptables ();
1197
1198 #ifdef ENABLE_CHECKING
1199 verify_insn_chain ();
1200 #endif
1201 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1202 {
1203 bb->il.rtl->header = bb->il.rtl->footer = NULL;
1204 bb->aux = NULL;
1205 bb->il.rtl->visited = 0;
1206 }
1207
1208 #ifdef ENABLE_CHECKING
1209 verify_flow_info ();
1210 #endif
1211
1212 free_original_copy_tables ();
1213 }
1214
1215 /* Checks whether all N blocks in BBS array can be copied. */
1216 bool
1217 can_copy_bbs_p (basic_block *bbs, unsigned n)
1218 {
1219 unsigned i;
1220 edge e;
1221 int ret = true;
1222
1223 for (i = 0; i < n; i++)
1224 bbs[i]->flags |= BB_DUPLICATED;
1225
1226 for (i = 0; i < n; i++)
1227 {
1228 /* In case we should redirect abnormal edge during duplication, fail. */
1229 edge_iterator ei;
1230 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1231 if ((e->flags & EDGE_ABNORMAL)
1232 && (e->dest->flags & BB_DUPLICATED))
1233 {
1234 ret = false;
1235 goto end;
1236 }
1237
1238 if (!can_duplicate_block_p (bbs[i]))
1239 {
1240 ret = false;
1241 break;
1242 }
1243 }
1244
1245 end:
1246 for (i = 0; i < n; i++)
1247 bbs[i]->flags &= ~BB_DUPLICATED;
1248
1249 return ret;
1250 }
1251
1252 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1253 are placed into array NEW_BBS in the same order. Edges from basic blocks
1254 in BBS are also duplicated and copies of those of them
1255 that lead into BBS are redirected to appropriate newly created block. The
1256 function assigns bbs into loops (copy of basic block bb is assigned to
1257 bb->loop_father->copy loop, so this must be set up correctly in advance)
1258 and updates dominators locally (LOOPS structure that contains the information
1259 about dominators is passed to enable this).
1260
1261 BASE is the superloop to that basic block belongs; if its header or latch
1262 is copied, we do not set the new blocks as header or latch.
1263
1264 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1265 also in the same order.
1266
1267 Newly created basic blocks are put after the basic block AFTER in the
1268 instruction stream, and the order of the blocks in BBS array is preserved. */
1269
1270 void
1271 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1272 edge *edges, unsigned num_edges, edge *new_edges,
1273 struct loop *base, basic_block after)
1274 {
1275 unsigned i, j;
1276 basic_block bb, new_bb, dom_bb;
1277 edge e;
1278
1279 /* Duplicate bbs, update dominators, assign bbs to loops. */
1280 for (i = 0; i < n; i++)
1281 {
1282 /* Duplicate. */
1283 bb = bbs[i];
1284 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1285 after = new_bb;
1286 bb->flags |= BB_DUPLICATED;
1287 /* Possibly set loop header. */
1288 if (bb->loop_father->header == bb && bb->loop_father != base)
1289 new_bb->loop_father->header = new_bb;
1290 /* Or latch. */
1291 if (bb->loop_father->latch == bb && bb->loop_father != base)
1292 new_bb->loop_father->latch = new_bb;
1293 }
1294
1295 /* Set dominators. */
1296 for (i = 0; i < n; i++)
1297 {
1298 bb = bbs[i];
1299 new_bb = new_bbs[i];
1300
1301 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1302 if (dom_bb->flags & BB_DUPLICATED)
1303 {
1304 dom_bb = get_bb_copy (dom_bb);
1305 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1306 }
1307 }
1308
1309 /* Redirect edges. */
1310 for (j = 0; j < num_edges; j++)
1311 new_edges[j] = NULL;
1312 for (i = 0; i < n; i++)
1313 {
1314 edge_iterator ei;
1315 new_bb = new_bbs[i];
1316 bb = bbs[i];
1317
1318 FOR_EACH_EDGE (e, ei, new_bb->succs)
1319 {
1320 for (j = 0; j < num_edges; j++)
1321 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1322 new_edges[j] = e;
1323
1324 if (!(e->dest->flags & BB_DUPLICATED))
1325 continue;
1326 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1327 }
1328 }
1329
1330 /* Clear information about duplicates. */
1331 for (i = 0; i < n; i++)
1332 bbs[i]->flags &= ~BB_DUPLICATED;
1333 }
1334
1335 #include "gt-cfglayout.h"