c-common.c (vector_types_convertible_p): Treat opaque types as always convertible...
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "vecprim.h"
41
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer, cfg_layout_function_header;
44
45 static rtx skip_insns_after_block (basic_block);
46 static void record_effective_endpoints (void);
47 static rtx label_for_bb (basic_block);
48 static void fixup_reorder_chain (void);
49
50 static void set_block_levels (tree, int);
51 static void change_scope (rtx, tree, tree);
52
53 void verify_insn_chain (void);
54 static void fixup_fallthru_exit_predecessor (void);
55 static tree insn_scope (rtx);
56 \f
57 rtx
58 unlink_insn_chain (rtx first, rtx last)
59 {
60 rtx prevfirst = PREV_INSN (first);
61 rtx nextlast = NEXT_INSN (last);
62
63 PREV_INSN (first) = NULL;
64 NEXT_INSN (last) = NULL;
65 if (prevfirst)
66 NEXT_INSN (prevfirst) = nextlast;
67 if (nextlast)
68 PREV_INSN (nextlast) = prevfirst;
69 else
70 set_last_insn (prevfirst);
71 if (!prevfirst)
72 set_first_insn (nextlast);
73 return first;
74 }
75 \f
76 /* Skip over inter-block insns occurring after BB which are typically
77 associated with BB (e.g., barriers). If there are any such insns,
78 we return the last one. Otherwise, we return the end of BB. */
79
80 static rtx
81 skip_insns_after_block (basic_block bb)
82 {
83 rtx insn, last_insn, next_head, prev;
84
85 next_head = NULL_RTX;
86 if (bb->next_bb != EXIT_BLOCK_PTR)
87 next_head = BB_HEAD (bb->next_bb);
88
89 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
90 {
91 if (insn == next_head)
92 break;
93
94 switch (GET_CODE (insn))
95 {
96 case BARRIER:
97 last_insn = insn;
98 continue;
99
100 case NOTE:
101 switch (NOTE_LINE_NUMBER (insn))
102 {
103 case NOTE_INSN_BLOCK_END:
104 last_insn = insn;
105 continue;
106 case NOTE_INSN_DELETED:
107 case NOTE_INSN_DELETED_LABEL:
108 continue;
109
110 default:
111 continue;
112 break;
113 }
114 break;
115
116 case CODE_LABEL:
117 if (NEXT_INSN (insn)
118 && JUMP_P (NEXT_INSN (insn))
119 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
121 {
122 insn = NEXT_INSN (insn);
123 last_insn = insn;
124 continue;
125 }
126 break;
127
128 default:
129 break;
130 }
131
132 break;
133 }
134
135 /* It is possible to hit contradictory sequence. For instance:
136
137 jump_insn
138 NOTE_INSN_BLOCK_BEG
139 barrier
140
141 Where barrier belongs to jump_insn, but the note does not. This can be
142 created by removing the basic block originally following
143 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
144
145 for (insn = last_insn; insn != BB_END (bb); insn = prev)
146 {
147 prev = PREV_INSN (insn);
148 if (NOTE_P (insn))
149 switch (NOTE_LINE_NUMBER (insn))
150 {
151 case NOTE_INSN_BLOCK_END:
152 case NOTE_INSN_DELETED:
153 case NOTE_INSN_DELETED_LABEL:
154 continue;
155 default:
156 reorder_insns (insn, insn, last_insn);
157 }
158 }
159
160 return last_insn;
161 }
162
163 /* Locate or create a label for a given basic block. */
164
165 static rtx
166 label_for_bb (basic_block bb)
167 {
168 rtx label = BB_HEAD (bb);
169
170 if (!LABEL_P (label))
171 {
172 if (dump_file)
173 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174
175 label = block_label (bb);
176 }
177
178 return label;
179 }
180
181 /* Locate the effective beginning and end of the insn chain for each
182 block, as defined by skip_insns_after_block above. */
183
184 static void
185 record_effective_endpoints (void)
186 {
187 rtx next_insn;
188 basic_block bb;
189 rtx insn;
190
191 for (insn = get_insns ();
192 insn
193 && NOTE_P (insn)
194 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
195 insn = NEXT_INSN (insn))
196 continue;
197 /* No basic blocks at all? */
198 gcc_assert (insn);
199
200 if (PREV_INSN (insn))
201 cfg_layout_function_header =
202 unlink_insn_chain (get_insns (), PREV_INSN (insn));
203 else
204 cfg_layout_function_header = NULL_RTX;
205
206 next_insn = get_insns ();
207 FOR_EACH_BB (bb)
208 {
209 rtx end;
210
211 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
212 bb->il.rtl->header = unlink_insn_chain (next_insn,
213 PREV_INSN (BB_HEAD (bb)));
214 end = skip_insns_after_block (bb);
215 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
216 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
217 next_insn = NEXT_INSN (BB_END (bb));
218 }
219
220 cfg_layout_function_footer = next_insn;
221 if (cfg_layout_function_footer)
222 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
223 }
224 \f
225 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
226 numbers and files. In order to be GGC friendly we need to use separate
227 varrays. This also slightly improve the memory locality in binary search.
228 The _locs array contains locators where the given property change. The
229 block_locators_blocks contains the scope block that is used for all insn
230 locator greater than corresponding block_locators_locs value and smaller
231 than the following one. Similarly for the other properties. */
232 static VEC(int,heap) *block_locators_locs;
233 static GTY(()) VEC(tree,gc) *block_locators_blocks;
234 static VEC(int,heap) *line_locators_locs;
235 static VEC(int,heap) *line_locators_lines;
236 static VEC(int,heap) *file_locators_locs;
237 static GTY(()) varray_type file_locators_files;
238 int prologue_locator;
239 int epilogue_locator;
240
241 /* During the RTL expansion the lexical blocks and line numbers are
242 represented via INSN_NOTEs. Replace them by representation using
243 INSN_LOCATORs. */
244
245 unsigned int
246 insn_locators_initialize (void)
247 {
248 tree block = NULL;
249 tree last_block = NULL;
250 rtx insn, next;
251 int loc = 0;
252 int line_number = 0, last_line_number = 0;
253 const char *file_name = NULL, *last_file_name = NULL;
254
255 prologue_locator = epilogue_locator = 0;
256
257 block_locators_locs = VEC_alloc (int, heap, 32);
258 block_locators_blocks = VEC_alloc (tree, gc, 32);
259 line_locators_locs = VEC_alloc (int, heap, 32);
260 line_locators_lines = VEC_alloc (int, heap, 32);
261 file_locators_locs = VEC_alloc (int, heap, 32);
262 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
263
264 for (insn = get_insns (); insn; insn = next)
265 {
266 int active = 0;
267
268 next = NEXT_INSN (insn);
269
270 if (NOTE_P (insn))
271 {
272 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
273 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
274 if (NOTE_LINE_NUMBER (insn) > 0)
275 {
276 expanded_location xloc;
277 NOTE_EXPANDED_LOCATION (xloc, insn);
278 line_number = xloc.line;
279 file_name = xloc.file;
280 delete_insn (insn);
281 }
282 }
283 else
284 active = (active_insn_p (insn)
285 && GET_CODE (PATTERN (insn)) != ADDR_VEC
286 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
287
288 check_block_change (insn, &block);
289
290 if (active
291 || !next
292 || (!prologue_locator && file_name))
293 {
294 if (last_block != block)
295 {
296 loc++;
297 VEC_safe_push (int, heap, block_locators_locs, loc);
298 VEC_safe_push (tree, gc, block_locators_blocks, block);
299 last_block = block;
300 }
301 if (last_line_number != line_number)
302 {
303 loc++;
304 VEC_safe_push (int, heap, line_locators_locs, loc);
305 VEC_safe_push (int, heap, line_locators_lines, line_number);
306 last_line_number = line_number;
307 }
308 if (last_file_name != file_name)
309 {
310 loc++;
311 VEC_safe_push (int, heap, file_locators_locs, loc);
312 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
313 last_file_name = file_name;
314 }
315 if (!prologue_locator && file_name)
316 prologue_locator = loc;
317 if (!next)
318 epilogue_locator = loc;
319 if (active)
320 INSN_LOCATOR (insn) = loc;
321 }
322 }
323
324 /* Tag the blocks with a depth number so that change_scope can find
325 the common parent easily. */
326 set_block_levels (DECL_INITIAL (cfun->decl), 0);
327
328 free_block_changes ();
329 return 0;
330 }
331
332 struct tree_opt_pass pass_insn_locators_initialize =
333 {
334 "locators", /* name */
335 NULL, /* gate */
336 insn_locators_initialize, /* execute */
337 NULL, /* sub */
338 NULL, /* next */
339 0, /* static_pass_number */
340 0, /* tv_id */
341 0, /* properties_required */
342 0, /* properties_provided */
343 0, /* properties_destroyed */
344 0, /* todo_flags_start */
345 TODO_dump_func, /* todo_flags_finish */
346 0 /* letter */
347 };
348
349
350 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
351 found in the block tree. */
352
353 static void
354 set_block_levels (tree block, int level)
355 {
356 while (block)
357 {
358 BLOCK_NUMBER (block) = level;
359 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
360 block = BLOCK_CHAIN (block);
361 }
362 }
363 \f
364 /* Return sope resulting from combination of S1 and S2. */
365 static tree
366 choose_inner_scope (tree s1, tree s2)
367 {
368 if (!s1)
369 return s2;
370 if (!s2)
371 return s1;
372 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
373 return s1;
374 return s2;
375 }
376 \f
377 /* Emit lexical block notes needed to change scope from S1 to S2. */
378
379 static void
380 change_scope (rtx orig_insn, tree s1, tree s2)
381 {
382 rtx insn = orig_insn;
383 tree com = NULL_TREE;
384 tree ts1 = s1, ts2 = s2;
385 tree s;
386
387 while (ts1 != ts2)
388 {
389 gcc_assert (ts1 && ts2);
390 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
391 ts1 = BLOCK_SUPERCONTEXT (ts1);
392 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
393 ts2 = BLOCK_SUPERCONTEXT (ts2);
394 else
395 {
396 ts1 = BLOCK_SUPERCONTEXT (ts1);
397 ts2 = BLOCK_SUPERCONTEXT (ts2);
398 }
399 }
400 com = ts1;
401
402 /* Close scopes. */
403 s = s1;
404 while (s != com)
405 {
406 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
407 NOTE_BLOCK (note) = s;
408 s = BLOCK_SUPERCONTEXT (s);
409 }
410
411 /* Open scopes. */
412 s = s2;
413 while (s != com)
414 {
415 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
416 NOTE_BLOCK (insn) = s;
417 s = BLOCK_SUPERCONTEXT (s);
418 }
419 }
420
421 /* Return lexical scope block insn belong to. */
422 static tree
423 insn_scope (rtx insn)
424 {
425 int max = VEC_length (int, block_locators_locs);
426 int min = 0;
427 int loc = INSN_LOCATOR (insn);
428
429 /* When block_locators_locs was initialized, the pro- and epilogue
430 insns didn't exist yet and can therefore not be found this way.
431 But we know that they belong to the outer most block of the
432 current function.
433 Without this test, the prologue would be put inside the block of
434 the first valid instruction in the function and when that first
435 insn is part of an inlined function then the low_pc of that
436 inlined function is messed up. Likewise for the epilogue and
437 the last valid instruction. */
438 if (loc == prologue_locator || loc == epilogue_locator)
439 return DECL_INITIAL (cfun->decl);
440
441 if (!max || !loc)
442 return NULL;
443 while (1)
444 {
445 int pos = (min + max) / 2;
446 int tmp = VEC_index (int, block_locators_locs, pos);
447
448 if (tmp <= loc && min != pos)
449 min = pos;
450 else if (tmp > loc && max != pos)
451 max = pos;
452 else
453 {
454 min = pos;
455 break;
456 }
457 }
458 return VEC_index (tree, block_locators_blocks, min);
459 }
460
461 /* Return line number of the statement specified by the locator. */
462 int
463 locator_line (int loc)
464 {
465 int max = VEC_length (int, line_locators_locs);
466 int min = 0;
467
468 if (!max || !loc)
469 return 0;
470 while (1)
471 {
472 int pos = (min + max) / 2;
473 int tmp = VEC_index (int, line_locators_locs, pos);
474
475 if (tmp <= loc && min != pos)
476 min = pos;
477 else if (tmp > loc && max != pos)
478 max = pos;
479 else
480 {
481 min = pos;
482 break;
483 }
484 }
485 return VEC_index (int, line_locators_lines, min);
486 }
487
488 /* Return line number of the statement that produced this insn. */
489 int
490 insn_line (rtx insn)
491 {
492 return locator_line (INSN_LOCATOR (insn));
493 }
494
495 /* Return source file of the statement specified by LOC. */
496 const char *
497 locator_file (int loc)
498 {
499 int max = VEC_length (int, file_locators_locs);
500 int min = 0;
501
502 if (!max || !loc)
503 return NULL;
504 while (1)
505 {
506 int pos = (min + max) / 2;
507 int tmp = VEC_index (int, file_locators_locs, pos);
508
509 if (tmp <= loc && min != pos)
510 min = pos;
511 else if (tmp > loc && max != pos)
512 max = pos;
513 else
514 {
515 min = pos;
516 break;
517 }
518 }
519 return VARRAY_CHAR_PTR (file_locators_files, min);
520 }
521
522 /* Return source file of the statement that produced this insn. */
523 const char *
524 insn_file (rtx insn)
525 {
526 return locator_file (INSN_LOCATOR (insn));
527 }
528
529 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
530 on the scope tree and the newly reordered instructions. */
531
532 void
533 reemit_insn_block_notes (void)
534 {
535 tree cur_block = DECL_INITIAL (cfun->decl);
536 rtx insn, note;
537
538 insn = get_insns ();
539 if (!active_insn_p (insn))
540 insn = next_active_insn (insn);
541 for (; insn; insn = next_active_insn (insn))
542 {
543 tree this_block;
544
545 /* Avoid putting scope notes between jump table and its label. */
546 if (JUMP_P (insn)
547 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
548 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
549 continue;
550
551 this_block = insn_scope (insn);
552 /* For sequences compute scope resulting from merging all scopes
553 of instructions nested inside. */
554 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
555 {
556 int i;
557 rtx body = PATTERN (insn);
558
559 this_block = NULL;
560 for (i = 0; i < XVECLEN (body, 0); i++)
561 this_block = choose_inner_scope (this_block,
562 insn_scope (XVECEXP (body, 0, i)));
563 }
564 if (! this_block)
565 continue;
566
567 if (this_block != cur_block)
568 {
569 change_scope (insn, cur_block, this_block);
570 cur_block = this_block;
571 }
572 }
573
574 /* change_scope emits before the insn, not after. */
575 note = emit_note (NOTE_INSN_DELETED);
576 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
577 delete_insn (note);
578
579 reorder_blocks ();
580 }
581 \f
582 /* Given a reorder chain, rearrange the code to match. */
583
584 static void
585 fixup_reorder_chain (void)
586 {
587 basic_block bb, prev_bb;
588 int index;
589 rtx insn = NULL;
590
591 if (cfg_layout_function_header)
592 {
593 set_first_insn (cfg_layout_function_header);
594 insn = cfg_layout_function_header;
595 while (NEXT_INSN (insn))
596 insn = NEXT_INSN (insn);
597 }
598
599 /* First do the bulk reordering -- rechain the blocks without regard to
600 the needed changes to jumps and labels. */
601
602 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
603 bb != 0;
604 bb = bb->aux, index++)
605 {
606 if (bb->il.rtl->header)
607 {
608 if (insn)
609 NEXT_INSN (insn) = bb->il.rtl->header;
610 else
611 set_first_insn (bb->il.rtl->header);
612 PREV_INSN (bb->il.rtl->header) = insn;
613 insn = bb->il.rtl->header;
614 while (NEXT_INSN (insn))
615 insn = NEXT_INSN (insn);
616 }
617 if (insn)
618 NEXT_INSN (insn) = BB_HEAD (bb);
619 else
620 set_first_insn (BB_HEAD (bb));
621 PREV_INSN (BB_HEAD (bb)) = insn;
622 insn = BB_END (bb);
623 if (bb->il.rtl->footer)
624 {
625 NEXT_INSN (insn) = bb->il.rtl->footer;
626 PREV_INSN (bb->il.rtl->footer) = insn;
627 while (NEXT_INSN (insn))
628 insn = NEXT_INSN (insn);
629 }
630 }
631
632 gcc_assert (index == n_basic_blocks);
633
634 NEXT_INSN (insn) = cfg_layout_function_footer;
635 if (cfg_layout_function_footer)
636 PREV_INSN (cfg_layout_function_footer) = insn;
637
638 while (NEXT_INSN (insn))
639 insn = NEXT_INSN (insn);
640
641 set_last_insn (insn);
642 #ifdef ENABLE_CHECKING
643 verify_insn_chain ();
644 #endif
645 delete_dead_jumptables ();
646
647 /* Now add jumps and labels as needed to match the blocks new
648 outgoing edges. */
649
650 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
651 {
652 edge e_fall, e_taken, e;
653 rtx bb_end_insn;
654 basic_block nb;
655 edge_iterator ei;
656
657 if (EDGE_COUNT (bb->succs) == 0)
658 continue;
659
660 /* Find the old fallthru edge, and another non-EH edge for
661 a taken jump. */
662 e_taken = e_fall = NULL;
663
664 FOR_EACH_EDGE (e, ei, bb->succs)
665 if (e->flags & EDGE_FALLTHRU)
666 e_fall = e;
667 else if (! (e->flags & EDGE_EH))
668 e_taken = e;
669
670 bb_end_insn = BB_END (bb);
671 if (JUMP_P (bb_end_insn))
672 {
673 if (any_condjump_p (bb_end_insn))
674 {
675 /* If the old fallthru is still next, nothing to do. */
676 if (bb->aux == e_fall->dest
677 || e_fall->dest == EXIT_BLOCK_PTR)
678 continue;
679
680 /* The degenerated case of conditional jump jumping to the next
681 instruction can happen for jumps with side effects. We need
682 to construct a forwarder block and this will be done just
683 fine by force_nonfallthru below. */
684 if (!e_taken)
685 ;
686
687 /* There is another special case: if *neither* block is next,
688 such as happens at the very end of a function, then we'll
689 need to add a new unconditional jump. Choose the taken
690 edge based on known or assumed probability. */
691 else if (bb->aux != e_taken->dest)
692 {
693 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
694
695 if (note
696 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
697 && invert_jump (bb_end_insn,
698 (e_fall->dest == EXIT_BLOCK_PTR
699 ? NULL_RTX
700 : label_for_bb (e_fall->dest)), 0))
701 {
702 e_fall->flags &= ~EDGE_FALLTHRU;
703 #ifdef ENABLE_CHECKING
704 gcc_assert (could_fall_through
705 (e_taken->src, e_taken->dest));
706 #endif
707 e_taken->flags |= EDGE_FALLTHRU;
708 update_br_prob_note (bb);
709 e = e_fall, e_fall = e_taken, e_taken = e;
710 }
711 }
712
713 /* If the "jumping" edge is a crossing edge, and the fall
714 through edge is non-crossing, leave things as they are. */
715 else if ((e_taken->flags & EDGE_CROSSING)
716 && !(e_fall->flags & EDGE_CROSSING))
717 continue;
718
719 /* Otherwise we can try to invert the jump. This will
720 basically never fail, however, keep up the pretense. */
721 else if (invert_jump (bb_end_insn,
722 (e_fall->dest == EXIT_BLOCK_PTR
723 ? NULL_RTX
724 : label_for_bb (e_fall->dest)), 0))
725 {
726 e_fall->flags &= ~EDGE_FALLTHRU;
727 #ifdef ENABLE_CHECKING
728 gcc_assert (could_fall_through
729 (e_taken->src, e_taken->dest));
730 #endif
731 e_taken->flags |= EDGE_FALLTHRU;
732 update_br_prob_note (bb);
733 continue;
734 }
735 }
736 else
737 {
738 /* Otherwise we have some return, switch or computed
739 jump. In the 99% case, there should not have been a
740 fallthru edge. */
741 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
742 continue;
743 }
744 }
745 else
746 {
747 /* No fallthru implies a noreturn function with EH edges, or
748 something similarly bizarre. In any case, we don't need to
749 do anything. */
750 if (! e_fall)
751 continue;
752
753 /* If the fallthru block is still next, nothing to do. */
754 if (bb->aux == e_fall->dest)
755 continue;
756
757 /* A fallthru to exit block. */
758 if (e_fall->dest == EXIT_BLOCK_PTR)
759 continue;
760 }
761
762 /* We got here if we need to add a new jump insn. */
763 nb = force_nonfallthru (e_fall);
764 if (nb)
765 {
766 nb->il.rtl->visited = 1;
767 nb->aux = bb->aux;
768 bb->aux = nb;
769 /* Don't process this new block. */
770 bb = nb;
771
772 /* Make sure new bb is tagged for correct section (same as
773 fall-thru source, since you cannot fall-throu across
774 section boundaries). */
775 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
776 if (flag_reorder_blocks_and_partition
777 && targetm.have_named_sections
778 && JUMP_P (BB_END (bb))
779 && !any_condjump_p (BB_END (bb))
780 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
781 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
782 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
783 }
784 }
785
786 /* Put basic_block_info in the new order. */
787
788 if (dump_file)
789 {
790 fprintf (dump_file, "Reordered sequence:\n");
791 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
792 bb;
793 bb = bb->aux, index++)
794 {
795 fprintf (dump_file, " %i ", index);
796 if (get_bb_original (bb))
797 fprintf (dump_file, "duplicate of %i ",
798 get_bb_original (bb)->index);
799 else if (forwarder_block_p (bb)
800 && !LABEL_P (BB_HEAD (bb)))
801 fprintf (dump_file, "compensation ");
802 else
803 fprintf (dump_file, "bb %i ", bb->index);
804 fprintf (dump_file, " [%i]\n", bb->frequency);
805 }
806 }
807
808 prev_bb = ENTRY_BLOCK_PTR;
809 bb = ENTRY_BLOCK_PTR->next_bb;
810 index = NUM_FIXED_BLOCKS;
811
812 for (; bb; prev_bb = bb, bb = bb->aux, index ++)
813 {
814 bb->index = index;
815 SET_BASIC_BLOCK (index, bb);
816
817 bb->prev_bb = prev_bb;
818 prev_bb->next_bb = bb;
819 }
820 prev_bb->next_bb = EXIT_BLOCK_PTR;
821 EXIT_BLOCK_PTR->prev_bb = prev_bb;
822
823 /* Annoying special case - jump around dead jumptables left in the code. */
824 FOR_EACH_BB (bb)
825 {
826 edge e;
827 edge_iterator ei;
828
829 FOR_EACH_EDGE (e, ei, bb->succs)
830 if (e->flags & EDGE_FALLTHRU)
831 break;
832
833 if (e && !can_fallthru (e->src, e->dest))
834 force_nonfallthru (e);
835 }
836 }
837 \f
838 /* Perform sanity checks on the insn chain.
839 1. Check that next/prev pointers are consistent in both the forward and
840 reverse direction.
841 2. Count insns in chain, going both directions, and check if equal.
842 3. Check that get_last_insn () returns the actual end of chain. */
843
844 void
845 verify_insn_chain (void)
846 {
847 rtx x, prevx, nextx;
848 int insn_cnt1, insn_cnt2;
849
850 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
851 x != 0;
852 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
853 gcc_assert (PREV_INSN (x) == prevx);
854
855 gcc_assert (prevx == get_last_insn ());
856
857 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
858 x != 0;
859 nextx = x, insn_cnt2++, x = PREV_INSN (x))
860 gcc_assert (NEXT_INSN (x) == nextx);
861
862 gcc_assert (insn_cnt1 == insn_cnt2);
863 }
864 \f
865 /* If we have assembler epilogues, the block falling through to exit must
866 be the last one in the reordered chain when we reach final. Ensure
867 that this condition is met. */
868 static void
869 fixup_fallthru_exit_predecessor (void)
870 {
871 edge e;
872 edge_iterator ei;
873 basic_block bb = NULL;
874
875 /* This transformation is not valid before reload, because we might
876 separate a call from the instruction that copies the return
877 value. */
878 gcc_assert (reload_completed);
879
880 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
881 if (e->flags & EDGE_FALLTHRU)
882 bb = e->src;
883
884 if (bb && bb->aux)
885 {
886 basic_block c = ENTRY_BLOCK_PTR->next_bb;
887
888 /* If the very first block is the one with the fall-through exit
889 edge, we have to split that block. */
890 if (c == bb)
891 {
892 bb = split_block (bb, NULL)->dest;
893 bb->aux = c->aux;
894 c->aux = bb;
895 bb->il.rtl->footer = c->il.rtl->footer;
896 c->il.rtl->footer = NULL;
897 }
898
899 while (c->aux != bb)
900 c = c->aux;
901
902 c->aux = bb->aux;
903 while (c->aux)
904 c = c->aux;
905
906 c->aux = bb;
907 bb->aux = NULL;
908 }
909 }
910 \f
911 /* Return true in case it is possible to duplicate the basic block BB. */
912
913 /* We do not want to declare the function in a header file, since it should
914 only be used through the cfghooks interface, and we do not want to move
915 it to cfgrtl.c since it would require also moving quite a lot of related
916 code. */
917 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
918
919 bool
920 cfg_layout_can_duplicate_bb_p (basic_block bb)
921 {
922 /* Do not attempt to duplicate tablejumps, as we need to unshare
923 the dispatch table. This is difficult to do, as the instructions
924 computing jump destination may be hoisted outside the basic block. */
925 if (tablejump_p (BB_END (bb), NULL, NULL))
926 return false;
927
928 /* Do not duplicate blocks containing insns that can't be copied. */
929 if (targetm.cannot_copy_insn_p)
930 {
931 rtx insn = BB_HEAD (bb);
932 while (1)
933 {
934 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
935 return false;
936 if (insn == BB_END (bb))
937 break;
938 insn = NEXT_INSN (insn);
939 }
940 }
941
942 return true;
943 }
944
945 rtx
946 duplicate_insn_chain (rtx from, rtx to)
947 {
948 rtx insn, last;
949
950 /* Avoid updating of boundaries of previous basic block. The
951 note will get removed from insn stream in fixup. */
952 last = emit_note (NOTE_INSN_DELETED);
953
954 /* Create copy at the end of INSN chain. The chain will
955 be reordered later. */
956 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
957 {
958 switch (GET_CODE (insn))
959 {
960 case INSN:
961 case CALL_INSN:
962 case JUMP_INSN:
963 /* Avoid copying of dispatch tables. We never duplicate
964 tablejumps, so this can hit only in case the table got
965 moved far from original jump. */
966 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
967 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
968 break;
969 emit_copy_of_insn_after (insn, get_last_insn ());
970 break;
971
972 case CODE_LABEL:
973 break;
974
975 case BARRIER:
976 emit_barrier ();
977 break;
978
979 case NOTE:
980 switch (NOTE_LINE_NUMBER (insn))
981 {
982 /* In case prologue is empty and function contain label
983 in first BB, we may want to copy the block. */
984 case NOTE_INSN_PROLOGUE_END:
985
986 case NOTE_INSN_DELETED:
987 case NOTE_INSN_DELETED_LABEL:
988 /* No problem to strip these. */
989 case NOTE_INSN_EPILOGUE_BEG:
990 /* Debug code expect these notes to exist just once.
991 Keep them in the master copy.
992 ??? It probably makes more sense to duplicate them for each
993 epilogue copy. */
994 case NOTE_INSN_FUNCTION_BEG:
995 /* There is always just single entry to function. */
996 case NOTE_INSN_BASIC_BLOCK:
997 break;
998
999 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1000 emit_note_copy (insn);
1001 break;
1002
1003 default:
1004 /* All other notes should have already been eliminated.
1005 */
1006 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1007
1008 /* It is possible that no_line_number is set and the note
1009 won't be emitted. */
1010 emit_note_copy (insn);
1011 }
1012 break;
1013 default:
1014 gcc_unreachable ();
1015 }
1016 }
1017 insn = NEXT_INSN (last);
1018 delete_insn (last);
1019 return insn;
1020 }
1021 /* Create a duplicate of the basic block BB. */
1022
1023 /* We do not want to declare the function in a header file, since it should
1024 only be used through the cfghooks interface, and we do not want to move
1025 it to cfgrtl.c since it would require also moving quite a lot of related
1026 code. */
1027 extern basic_block cfg_layout_duplicate_bb (basic_block);
1028
1029 basic_block
1030 cfg_layout_duplicate_bb (basic_block bb)
1031 {
1032 rtx insn;
1033 basic_block new_bb;
1034
1035 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1036 new_bb = create_basic_block (insn,
1037 insn ? get_last_insn () : NULL,
1038 EXIT_BLOCK_PTR->prev_bb);
1039
1040 BB_COPY_PARTITION (new_bb, bb);
1041 if (bb->il.rtl->header)
1042 {
1043 insn = bb->il.rtl->header;
1044 while (NEXT_INSN (insn))
1045 insn = NEXT_INSN (insn);
1046 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1047 if (insn)
1048 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1049 }
1050
1051 if (bb->il.rtl->footer)
1052 {
1053 insn = bb->il.rtl->footer;
1054 while (NEXT_INSN (insn))
1055 insn = NEXT_INSN (insn);
1056 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1057 if (insn)
1058 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1059 }
1060
1061 if (bb->il.rtl->global_live_at_start)
1062 {
1063 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1064 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1065 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1066 bb->il.rtl->global_live_at_start);
1067 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1068 bb->il.rtl->global_live_at_end);
1069 }
1070
1071 return new_bb;
1072 }
1073 \f
1074 /* Main entry point to this module - initialize the datastructures for
1075 CFG layout changes. It keeps LOOPS up-to-date if not null.
1076
1077 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1078 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1079 to date. */
1080
1081 void
1082 cfg_layout_initialize (unsigned int flags)
1083 {
1084 initialize_original_copy_tables ();
1085
1086 cfg_layout_rtl_register_cfg_hooks ();
1087
1088 record_effective_endpoints ();
1089
1090 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1091 }
1092
1093 /* Splits superblocks. */
1094 void
1095 break_superblocks (void)
1096 {
1097 sbitmap superblocks;
1098 bool need = false;
1099 basic_block bb;
1100
1101 superblocks = sbitmap_alloc (last_basic_block);
1102 sbitmap_zero (superblocks);
1103
1104 FOR_EACH_BB (bb)
1105 if (bb->flags & BB_SUPERBLOCK)
1106 {
1107 bb->flags &= ~BB_SUPERBLOCK;
1108 SET_BIT (superblocks, bb->index);
1109 need = true;
1110 }
1111
1112 if (need)
1113 {
1114 rebuild_jump_labels (get_insns ());
1115 find_many_sub_basic_blocks (superblocks);
1116 }
1117
1118 free (superblocks);
1119 }
1120
1121 /* Finalize the changes: reorder insn list according to the sequence specified
1122 by aux pointers, enter compensation code, rebuild scope forest. */
1123
1124 void
1125 cfg_layout_finalize (void)
1126 {
1127 basic_block bb;
1128
1129 #ifdef ENABLE_CHECKING
1130 verify_flow_info ();
1131 #endif
1132 rtl_register_cfg_hooks ();
1133 if (reload_completed
1134 #ifdef HAVE_epilogue
1135 && !HAVE_epilogue
1136 #endif
1137 )
1138 fixup_fallthru_exit_predecessor ();
1139 fixup_reorder_chain ();
1140
1141 #ifdef ENABLE_CHECKING
1142 verify_insn_chain ();
1143 #endif
1144 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1145 {
1146 bb->il.rtl->header = bb->il.rtl->footer = NULL;
1147 bb->aux = NULL;
1148 bb->il.rtl->visited = 0;
1149 }
1150
1151 break_superblocks ();
1152
1153 #ifdef ENABLE_CHECKING
1154 verify_flow_info ();
1155 #endif
1156
1157 free_original_copy_tables ();
1158 }
1159
1160 /* Checks whether all N blocks in BBS array can be copied. */
1161 bool
1162 can_copy_bbs_p (basic_block *bbs, unsigned n)
1163 {
1164 unsigned i;
1165 edge e;
1166 int ret = true;
1167
1168 for (i = 0; i < n; i++)
1169 bbs[i]->flags |= BB_DUPLICATED;
1170
1171 for (i = 0; i < n; i++)
1172 {
1173 /* In case we should redirect abnormal edge during duplication, fail. */
1174 edge_iterator ei;
1175 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1176 if ((e->flags & EDGE_ABNORMAL)
1177 && (e->dest->flags & BB_DUPLICATED))
1178 {
1179 ret = false;
1180 goto end;
1181 }
1182
1183 if (!can_duplicate_block_p (bbs[i]))
1184 {
1185 ret = false;
1186 break;
1187 }
1188 }
1189
1190 end:
1191 for (i = 0; i < n; i++)
1192 bbs[i]->flags &= ~BB_DUPLICATED;
1193
1194 return ret;
1195 }
1196
1197 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1198 are placed into array NEW_BBS in the same order. Edges from basic blocks
1199 in BBS are also duplicated and copies of those of them
1200 that lead into BBS are redirected to appropriate newly created block. The
1201 function assigns bbs into loops (copy of basic block bb is assigned to
1202 bb->loop_father->copy loop, so this must be set up correctly in advance)
1203 and updates dominators locally (LOOPS structure that contains the information
1204 about dominators is passed to enable this).
1205
1206 BASE is the superloop to that basic block belongs; if its header or latch
1207 is copied, we do not set the new blocks as header or latch.
1208
1209 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1210 also in the same order.
1211
1212 Newly created basic blocks are put after the basic block AFTER in the
1213 instruction stream, and the order of the blocks in BBS array is preserved. */
1214
1215 void
1216 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1217 edge *edges, unsigned num_edges, edge *new_edges,
1218 struct loop *base, basic_block after)
1219 {
1220 unsigned i, j;
1221 basic_block bb, new_bb, dom_bb;
1222 edge e;
1223
1224 /* Duplicate bbs, update dominators, assign bbs to loops. */
1225 for (i = 0; i < n; i++)
1226 {
1227 /* Duplicate. */
1228 bb = bbs[i];
1229 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1230 after = new_bb;
1231 bb->flags |= BB_DUPLICATED;
1232 /* Possibly set loop header. */
1233 if (bb->loop_father->header == bb && bb->loop_father != base)
1234 new_bb->loop_father->header = new_bb;
1235 /* Or latch. */
1236 if (bb->loop_father->latch == bb && bb->loop_father != base)
1237 new_bb->loop_father->latch = new_bb;
1238 }
1239
1240 /* Set dominators. */
1241 for (i = 0; i < n; i++)
1242 {
1243 bb = bbs[i];
1244 new_bb = new_bbs[i];
1245
1246 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1247 if (dom_bb->flags & BB_DUPLICATED)
1248 {
1249 dom_bb = get_bb_copy (dom_bb);
1250 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1251 }
1252 }
1253
1254 /* Redirect edges. */
1255 for (j = 0; j < num_edges; j++)
1256 new_edges[j] = NULL;
1257 for (i = 0; i < n; i++)
1258 {
1259 edge_iterator ei;
1260 new_bb = new_bbs[i];
1261 bb = bbs[i];
1262
1263 FOR_EACH_EDGE (e, ei, new_bb->succs)
1264 {
1265 for (j = 0; j < num_edges; j++)
1266 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1267 new_edges[j] = e;
1268
1269 if (!(e->dest->flags & BB_DUPLICATED))
1270 continue;
1271 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1272 }
1273 }
1274
1275 /* Clear information about duplicates. */
1276 for (i = 0; i < n; i++)
1277 bbs[i]->flags &= ~BB_DUPLICATED;
1278 }
1279
1280 #include "gt-cfglayout.h"