re PR middle-end/29609 (Even with -O0 -g gcc optimizes a goto away and I cannot...
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
42
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
46
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
51
52 static void change_scope (rtx, tree, tree);
53
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
57 \f
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
60 {
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
63
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
75 }
76 \f
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
80
81 static rtx
82 skip_insns_after_block (basic_block bb)
83 {
84 rtx insn, last_insn, next_head, prev;
85
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
89
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91 {
92 if (insn == next_head)
93 break;
94
95 switch (GET_CODE (insn))
96 {
97 case BARRIER:
98 last_insn = insn;
99 continue;
100
101 case NOTE:
102 switch (NOTE_KIND (insn))
103 {
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
110 }
111 break;
112
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_P (NEXT_INSN (insn))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
118 {
119 insn = NEXT_INSN (insn);
120 last_insn = insn;
121 continue;
122 }
123 break;
124
125 default:
126 break;
127 }
128
129 break;
130 }
131
132 /* It is possible to hit contradictory sequence. For instance:
133
134 jump_insn
135 NOTE_INSN_BLOCK_BEG
136 barrier
137
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
141
142 for (insn = last_insn; insn != BB_END (bb); insn = prev)
143 {
144 prev = PREV_INSN (insn);
145 if (NOTE_P (insn))
146 switch (NOTE_KIND (insn))
147 {
148 case NOTE_INSN_BLOCK_END:
149 gcc_unreachable ();
150 break;
151 case NOTE_INSN_DELETED:
152 case NOTE_INSN_DELETED_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
156 }
157 }
158
159 return last_insn;
160 }
161
162 /* Locate or create a label for a given basic block. */
163
164 static rtx
165 label_for_bb (basic_block bb)
166 {
167 rtx label = BB_HEAD (bb);
168
169 if (!LABEL_P (label))
170 {
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
173
174 label = block_label (bb);
175 }
176
177 return label;
178 }
179
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
182
183 static void
184 record_effective_endpoints (void)
185 {
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
189
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
198
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
204
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
207 {
208 rtx end;
209
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
217 }
218
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
222 }
223 \f
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap) *block_locators_locs;
232 static GTY(()) VEC(tree,gc) *block_locators_blocks;
233 static VEC(int,heap) *locations_locators_locs;
234 DEF_VEC_O(location_t);
235 DEF_VEC_ALLOC_O(location_t,heap);
236 static VEC(location_t,heap) *locations_locators_vals;
237 int prologue_locator;
238 int epilogue_locator;
239
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
242 place are needed. */
243 location_t curr_location, last_location;
244 static tree curr_block, last_block;
245 static int curr_rtl_loc = -1;
246
247 /* Allocate insn locator datastructure. */
248 void
249 insn_locators_alloc (void)
250 {
251 prologue_locator = epilogue_locator = 0;
252
253 block_locators_locs = VEC_alloc (int, heap, 32);
254 block_locators_blocks = VEC_alloc (tree, gc, 32);
255 locations_locators_locs = VEC_alloc (int, heap, 32);
256 locations_locators_vals = VEC_alloc (location_t, heap, 32);
257
258 last_location = -1;
259 curr_location = -1;
260 curr_block = NULL;
261 last_block = NULL;
262 curr_rtl_loc = 0;
263 }
264
265 /* At the end of emit stage, clear current location. */
266 void
267 insn_locators_finalize (void)
268 {
269 if (curr_rtl_loc >= 0)
270 epilogue_locator = curr_insn_locator ();
271 curr_rtl_loc = -1;
272 }
273
274 /* Allocate insn locator datastructure. */
275 void
276 insn_locators_free (void)
277 {
278 prologue_locator = epilogue_locator = 0;
279
280 VEC_free (int, heap, block_locators_locs);
281 VEC_free (tree,gc, block_locators_blocks);
282 VEC_free (int, heap, locations_locators_locs);
283 VEC_free (location_t, heap, locations_locators_vals);
284 }
285
286
287 /* Set current location. */
288 void
289 set_curr_insn_source_location (location_t location)
290 {
291 /* IV opts calls into RTL expansion to compute costs of operations. At this
292 time locators are not initialized. */
293 if (curr_rtl_loc == -1)
294 return;
295 if (location == last_location)
296 return;
297 curr_location = location;
298 }
299
300 /* Set current scope block. */
301 void
302 set_curr_insn_block (tree b)
303 {
304 /* IV opts calls into RTL expansion to compute costs of operations. At this
305 time locators are not initialized. */
306 if (curr_rtl_loc == -1)
307 return;
308 if (b)
309 curr_block = b;
310 }
311
312 /* Return current insn locator. */
313 int
314 curr_insn_locator (void)
315 {
316 if (curr_rtl_loc == -1)
317 return 0;
318 if (last_block != curr_block)
319 {
320 curr_rtl_loc++;
321 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
322 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
323 last_block = curr_block;
324 }
325 if (last_location != curr_location)
326 {
327 curr_rtl_loc++;
328 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
329 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
330 last_location = curr_location;
331 }
332 return curr_rtl_loc;
333 }
334
335 static unsigned int
336 into_cfg_layout_mode (void)
337 {
338 cfg_layout_initialize (0);
339 return 0;
340 }
341
342 static unsigned int
343 outof_cfg_layout_mode (void)
344 {
345 basic_block bb;
346
347 FOR_EACH_BB (bb)
348 if (bb->next_bb != EXIT_BLOCK_PTR)
349 bb->aux = bb->next_bb;
350
351 cfg_layout_finalize ();
352
353 return 0;
354 }
355
356 struct rtl_opt_pass pass_into_cfg_layout_mode =
357 {
358 {
359 RTL_PASS,
360 "into_cfglayout", /* name */
361 NULL, /* gate */
362 into_cfg_layout_mode, /* execute */
363 NULL, /* sub */
364 NULL, /* next */
365 0, /* static_pass_number */
366 0, /* tv_id */
367 0, /* properties_required */
368 0, /* properties_provided */
369 0, /* properties_destroyed */
370 0, /* todo_flags_start */
371 TODO_dump_func, /* todo_flags_finish */
372 }
373 };
374
375 struct rtl_opt_pass pass_outof_cfg_layout_mode =
376 {
377 {
378 RTL_PASS,
379 "outof_cfglayout", /* name */
380 NULL, /* gate */
381 outof_cfg_layout_mode, /* execute */
382 NULL, /* sub */
383 NULL, /* next */
384 0, /* static_pass_number */
385 0, /* tv_id */
386 0, /* properties_required */
387 0, /* properties_provided */
388 0, /* properties_destroyed */
389 0, /* todo_flags_start */
390 TODO_dump_func, /* todo_flags_finish */
391 }
392 };
393 \f
394 /* Return scope resulting from combination of S1 and S2. */
395 static tree
396 choose_inner_scope (tree s1, tree s2)
397 {
398 if (!s1)
399 return s2;
400 if (!s2)
401 return s1;
402 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
403 return s1;
404 return s2;
405 }
406 \f
407 /* Emit lexical block notes needed to change scope from S1 to S2. */
408
409 static void
410 change_scope (rtx orig_insn, tree s1, tree s2)
411 {
412 rtx insn = orig_insn;
413 tree com = NULL_TREE;
414 tree ts1 = s1, ts2 = s2;
415 tree s;
416
417 while (ts1 != ts2)
418 {
419 gcc_assert (ts1 && ts2);
420 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
421 ts1 = BLOCK_SUPERCONTEXT (ts1);
422 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
423 ts2 = BLOCK_SUPERCONTEXT (ts2);
424 else
425 {
426 ts1 = BLOCK_SUPERCONTEXT (ts1);
427 ts2 = BLOCK_SUPERCONTEXT (ts2);
428 }
429 }
430 com = ts1;
431
432 /* Close scopes. */
433 s = s1;
434 while (s != com)
435 {
436 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
437 NOTE_BLOCK (note) = s;
438 s = BLOCK_SUPERCONTEXT (s);
439 }
440
441 /* Open scopes. */
442 s = s2;
443 while (s != com)
444 {
445 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
446 NOTE_BLOCK (insn) = s;
447 s = BLOCK_SUPERCONTEXT (s);
448 }
449 }
450
451 /* Return lexical scope block insn belong to. */
452 static tree
453 insn_scope (const_rtx insn)
454 {
455 int max = VEC_length (int, block_locators_locs);
456 int min = 0;
457 int loc = INSN_LOCATOR (insn);
458
459 /* When block_locators_locs was initialized, the pro- and epilogue
460 insns didn't exist yet and can therefore not be found this way.
461 But we know that they belong to the outer most block of the
462 current function.
463 Without this test, the prologue would be put inside the block of
464 the first valid instruction in the function and when that first
465 insn is part of an inlined function then the low_pc of that
466 inlined function is messed up. Likewise for the epilogue and
467 the last valid instruction. */
468 if (loc == prologue_locator || loc == epilogue_locator)
469 return DECL_INITIAL (cfun->decl);
470
471 if (!max || !loc)
472 return NULL;
473 while (1)
474 {
475 int pos = (min + max) / 2;
476 int tmp = VEC_index (int, block_locators_locs, pos);
477
478 if (tmp <= loc && min != pos)
479 min = pos;
480 else if (tmp > loc && max != pos)
481 max = pos;
482 else
483 {
484 min = pos;
485 break;
486 }
487 }
488 return VEC_index (tree, block_locators_blocks, min);
489 }
490
491 /* Return line number of the statement specified by the locator. */
492 static location_t
493 locator_location (int loc)
494 {
495 int max = VEC_length (int, locations_locators_locs);
496 int min = 0;
497
498 while (1)
499 {
500 int pos = (min + max) / 2;
501 int tmp = VEC_index (int, locations_locators_locs, pos);
502
503 if (tmp <= loc && min != pos)
504 min = pos;
505 else if (tmp > loc && max != pos)
506 max = pos;
507 else
508 {
509 min = pos;
510 break;
511 }
512 }
513 return *VEC_index (location_t, locations_locators_vals, min);
514 }
515
516 /* Return source line of the statement that produced this insn. */
517 int
518 locator_line (int loc)
519 {
520 expanded_location xloc;
521 if (!loc)
522 return 0;
523 else
524 xloc = expand_location (locator_location (loc));
525 return xloc.line;
526 }
527
528 /* Return line number of the statement that produced this insn. */
529 int
530 insn_line (const_rtx insn)
531 {
532 return locator_line (INSN_LOCATOR (insn));
533 }
534
535 /* Return source file of the statement specified by LOC. */
536 const char *
537 locator_file (int loc)
538 {
539 expanded_location xloc;
540 if (!loc)
541 return 0;
542 else
543 xloc = expand_location (locator_location (loc));
544 return xloc.file;
545 }
546
547 /* Return source file of the statement that produced this insn. */
548 const char *
549 insn_file (const_rtx insn)
550 {
551 return locator_file (INSN_LOCATOR (insn));
552 }
553
554 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
555 on the scope tree and the newly reordered instructions. */
556
557 void
558 reemit_insn_block_notes (void)
559 {
560 tree cur_block = DECL_INITIAL (cfun->decl);
561 rtx insn, note;
562
563 insn = get_insns ();
564 if (!active_insn_p (insn))
565 insn = next_active_insn (insn);
566 for (; insn; insn = next_active_insn (insn))
567 {
568 tree this_block;
569
570 /* Avoid putting scope notes between jump table and its label. */
571 if (JUMP_P (insn)
572 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
573 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
574 continue;
575
576 this_block = insn_scope (insn);
577 /* For sequences compute scope resulting from merging all scopes
578 of instructions nested inside. */
579 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
580 {
581 int i;
582 rtx body = PATTERN (insn);
583
584 this_block = NULL;
585 for (i = 0; i < XVECLEN (body, 0); i++)
586 this_block = choose_inner_scope (this_block,
587 insn_scope (XVECEXP (body, 0, i)));
588 }
589 if (! this_block)
590 continue;
591
592 if (this_block != cur_block)
593 {
594 change_scope (insn, cur_block, this_block);
595 cur_block = this_block;
596 }
597 }
598
599 /* change_scope emits before the insn, not after. */
600 note = emit_note (NOTE_INSN_DELETED);
601 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
602 delete_insn (note);
603
604 reorder_blocks ();
605 }
606 \f
607
608 /* Link the basic blocks in the correct order, compacting the basic
609 block queue while at it. This also clears the visited flag on
610 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
611 also clears the basic block header and footer fields.
612
613 This function is usually called after a pass (e.g. tracer) finishes
614 some transformations while in cfglayout mode. The required sequence
615 of the basic blocks is in a linked list along the bb->aux field.
616 This functions re-links the basic block prev_bb and next_bb pointers
617 accordingly, and it compacts and renumbers the blocks. */
618
619 void
620 relink_block_chain (bool stay_in_cfglayout_mode)
621 {
622 basic_block bb, prev_bb;
623 int index;
624
625 /* Maybe dump the re-ordered sequence. */
626 if (dump_file)
627 {
628 fprintf (dump_file, "Reordered sequence:\n");
629 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
630 bb;
631 bb = (basic_block) bb->aux, index++)
632 {
633 fprintf (dump_file, " %i ", index);
634 if (get_bb_original (bb))
635 fprintf (dump_file, "duplicate of %i ",
636 get_bb_original (bb)->index);
637 else if (forwarder_block_p (bb)
638 && !LABEL_P (BB_HEAD (bb)))
639 fprintf (dump_file, "compensation ");
640 else
641 fprintf (dump_file, "bb %i ", bb->index);
642 fprintf (dump_file, " [%i]\n", bb->frequency);
643 }
644 }
645
646 /* Now reorder the blocks. */
647 prev_bb = ENTRY_BLOCK_PTR;
648 bb = ENTRY_BLOCK_PTR->next_bb;
649 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
650 {
651 bb->prev_bb = prev_bb;
652 prev_bb->next_bb = bb;
653 }
654 prev_bb->next_bb = EXIT_BLOCK_PTR;
655 EXIT_BLOCK_PTR->prev_bb = prev_bb;
656
657 /* Then, clean up the aux and visited fields. */
658 FOR_ALL_BB (bb)
659 {
660 bb->aux = NULL;
661 bb->il.rtl->visited = 0;
662 if (!stay_in_cfglayout_mode)
663 bb->il.rtl->header = bb->il.rtl->footer = NULL;
664 }
665
666 /* Maybe reset the original copy tables, they are not valid anymore
667 when we renumber the basic blocks in compact_blocks. If we are
668 are going out of cfglayout mode, don't re-allocate the tables. */
669 free_original_copy_tables ();
670 if (stay_in_cfglayout_mode)
671 initialize_original_copy_tables ();
672
673 /* Finally, put basic_block_info in the new order. */
674 compact_blocks ();
675 }
676 \f
677
678 /* Given a reorder chain, rearrange the code to match. */
679
680 static void
681 fixup_reorder_chain (void)
682 {
683 basic_block bb;
684 rtx insn = NULL;
685
686 if (cfg_layout_function_header)
687 {
688 set_first_insn (cfg_layout_function_header);
689 insn = cfg_layout_function_header;
690 while (NEXT_INSN (insn))
691 insn = NEXT_INSN (insn);
692 }
693
694 /* First do the bulk reordering -- rechain the blocks without regard to
695 the needed changes to jumps and labels. */
696
697 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
698 {
699 if (bb->il.rtl->header)
700 {
701 if (insn)
702 NEXT_INSN (insn) = bb->il.rtl->header;
703 else
704 set_first_insn (bb->il.rtl->header);
705 PREV_INSN (bb->il.rtl->header) = insn;
706 insn = bb->il.rtl->header;
707 while (NEXT_INSN (insn))
708 insn = NEXT_INSN (insn);
709 }
710 if (insn)
711 NEXT_INSN (insn) = BB_HEAD (bb);
712 else
713 set_first_insn (BB_HEAD (bb));
714 PREV_INSN (BB_HEAD (bb)) = insn;
715 insn = BB_END (bb);
716 if (bb->il.rtl->footer)
717 {
718 NEXT_INSN (insn) = bb->il.rtl->footer;
719 PREV_INSN (bb->il.rtl->footer) = insn;
720 while (NEXT_INSN (insn))
721 insn = NEXT_INSN (insn);
722 }
723 }
724
725 NEXT_INSN (insn) = cfg_layout_function_footer;
726 if (cfg_layout_function_footer)
727 PREV_INSN (cfg_layout_function_footer) = insn;
728
729 while (NEXT_INSN (insn))
730 insn = NEXT_INSN (insn);
731
732 set_last_insn (insn);
733 #ifdef ENABLE_CHECKING
734 verify_insn_chain ();
735 #endif
736
737 /* Now add jumps and labels as needed to match the blocks new
738 outgoing edges. */
739
740 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
741 {
742 edge e_fall, e_taken, e;
743 rtx bb_end_insn;
744 basic_block nb;
745 edge_iterator ei;
746
747 if (EDGE_COUNT (bb->succs) == 0)
748 continue;
749
750 /* Find the old fallthru edge, and another non-EH edge for
751 a taken jump. */
752 e_taken = e_fall = NULL;
753
754 FOR_EACH_EDGE (e, ei, bb->succs)
755 if (e->flags & EDGE_FALLTHRU)
756 e_fall = e;
757 else if (! (e->flags & EDGE_EH))
758 e_taken = e;
759
760 bb_end_insn = BB_END (bb);
761 if (JUMP_P (bb_end_insn))
762 {
763 if (any_condjump_p (bb_end_insn))
764 {
765 /* If the old fallthru is still next, nothing to do. */
766 if (bb->aux == e_fall->dest
767 || e_fall->dest == EXIT_BLOCK_PTR)
768 continue;
769
770 /* The degenerated case of conditional jump jumping to the next
771 instruction can happen for jumps with side effects. We need
772 to construct a forwarder block and this will be done just
773 fine by force_nonfallthru below. */
774 if (!e_taken)
775 ;
776
777 /* There is another special case: if *neither* block is next,
778 such as happens at the very end of a function, then we'll
779 need to add a new unconditional jump. Choose the taken
780 edge based on known or assumed probability. */
781 else if (bb->aux != e_taken->dest)
782 {
783 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
784
785 if (note
786 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
787 && invert_jump (bb_end_insn,
788 (e_fall->dest == EXIT_BLOCK_PTR
789 ? NULL_RTX
790 : label_for_bb (e_fall->dest)), 0))
791 {
792 e_fall->flags &= ~EDGE_FALLTHRU;
793 #ifdef ENABLE_CHECKING
794 gcc_assert (could_fall_through
795 (e_taken->src, e_taken->dest));
796 #endif
797 e_taken->flags |= EDGE_FALLTHRU;
798 update_br_prob_note (bb);
799 e = e_fall, e_fall = e_taken, e_taken = e;
800 }
801 }
802
803 /* If the "jumping" edge is a crossing edge, and the fall
804 through edge is non-crossing, leave things as they are. */
805 else if ((e_taken->flags & EDGE_CROSSING)
806 && !(e_fall->flags & EDGE_CROSSING))
807 continue;
808
809 /* Otherwise we can try to invert the jump. This will
810 basically never fail, however, keep up the pretense. */
811 else if (invert_jump (bb_end_insn,
812 (e_fall->dest == EXIT_BLOCK_PTR
813 ? NULL_RTX
814 : label_for_bb (e_fall->dest)), 0))
815 {
816 e_fall->flags &= ~EDGE_FALLTHRU;
817 #ifdef ENABLE_CHECKING
818 gcc_assert (could_fall_through
819 (e_taken->src, e_taken->dest));
820 #endif
821 e_taken->flags |= EDGE_FALLTHRU;
822 update_br_prob_note (bb);
823 continue;
824 }
825 }
826 else
827 {
828 /* Otherwise we have some return, switch or computed
829 jump. In the 99% case, there should not have been a
830 fallthru edge. */
831 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
832 continue;
833 }
834 }
835 else
836 {
837 /* No fallthru implies a noreturn function with EH edges, or
838 something similarly bizarre. In any case, we don't need to
839 do anything. */
840 if (! e_fall)
841 continue;
842
843 /* If the fallthru block is still next, nothing to do. */
844 if (bb->aux == e_fall->dest)
845 continue;
846
847 /* A fallthru to exit block. */
848 if (e_fall->dest == EXIT_BLOCK_PTR)
849 continue;
850 }
851
852 /* We got here if we need to add a new jump insn. */
853 nb = force_nonfallthru (e_fall);
854 if (nb)
855 {
856 nb->il.rtl->visited = 1;
857 nb->aux = bb->aux;
858 bb->aux = nb;
859 /* Don't process this new block. */
860 bb = nb;
861
862 /* Make sure new bb is tagged for correct section (same as
863 fall-thru source, since you cannot fall-throu across
864 section boundaries). */
865 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
866 if (flag_reorder_blocks_and_partition
867 && targetm.have_named_sections
868 && JUMP_P (BB_END (bb))
869 && !any_condjump_p (BB_END (bb))
870 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
871 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
872 }
873 }
874
875 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
876
877 /* Annoying special case - jump around dead jumptables left in the code. */
878 FOR_EACH_BB (bb)
879 {
880 edge e;
881 edge_iterator ei;
882
883 FOR_EACH_EDGE (e, ei, bb->succs)
884 if (e->flags & EDGE_FALLTHRU)
885 break;
886
887 if (e && !can_fallthru (e->src, e->dest))
888 force_nonfallthru (e);
889 }
890
891 /* Ensure goto_locus from edges has some instructions with that locus
892 in RTL. */
893 if (!optimize)
894 FOR_EACH_BB (bb)
895 {
896 edge e;
897 edge_iterator ei;
898
899 FOR_EACH_EDGE (e, ei, bb->succs)
900 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
901 {
902 basic_block nb;
903
904 if (simplejump_p (BB_END (e->src)))
905 {
906 if (INSN_LOCATOR (BB_END (e->src)) == (int) e->goto_locus)
907 continue;
908 if (INSN_LOCATOR (BB_END (e->src)) == 0)
909 {
910 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
911 continue;
912 }
913 }
914 if (e->dest != EXIT_BLOCK_PTR)
915 {
916 insn = BB_HEAD (e->dest);
917 if (!INSN_P (insn))
918 insn = next_insn (insn);
919 if (insn && INSN_P (insn)
920 && INSN_LOCATOR (insn) == (int) e->goto_locus)
921 continue;
922 }
923 nb = split_edge (e);
924 if (!INSN_P (BB_END (nb)))
925 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
926 nb);
927 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
928 }
929 }
930 }
931 \f
932 /* Perform sanity checks on the insn chain.
933 1. Check that next/prev pointers are consistent in both the forward and
934 reverse direction.
935 2. Count insns in chain, going both directions, and check if equal.
936 3. Check that get_last_insn () returns the actual end of chain. */
937
938 void
939 verify_insn_chain (void)
940 {
941 rtx x, prevx, nextx;
942 int insn_cnt1, insn_cnt2;
943
944 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
945 x != 0;
946 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
947 gcc_assert (PREV_INSN (x) == prevx);
948
949 gcc_assert (prevx == get_last_insn ());
950
951 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
952 x != 0;
953 nextx = x, insn_cnt2++, x = PREV_INSN (x))
954 gcc_assert (NEXT_INSN (x) == nextx);
955
956 gcc_assert (insn_cnt1 == insn_cnt2);
957 }
958 \f
959 /* If we have assembler epilogues, the block falling through to exit must
960 be the last one in the reordered chain when we reach final. Ensure
961 that this condition is met. */
962 static void
963 fixup_fallthru_exit_predecessor (void)
964 {
965 edge e;
966 edge_iterator ei;
967 basic_block bb = NULL;
968
969 /* This transformation is not valid before reload, because we might
970 separate a call from the instruction that copies the return
971 value. */
972 gcc_assert (reload_completed);
973
974 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
975 if (e->flags & EDGE_FALLTHRU)
976 bb = e->src;
977
978 if (bb && bb->aux)
979 {
980 basic_block c = ENTRY_BLOCK_PTR->next_bb;
981
982 /* If the very first block is the one with the fall-through exit
983 edge, we have to split that block. */
984 if (c == bb)
985 {
986 bb = split_block (bb, NULL)->dest;
987 bb->aux = c->aux;
988 c->aux = bb;
989 bb->il.rtl->footer = c->il.rtl->footer;
990 c->il.rtl->footer = NULL;
991 }
992
993 while (c->aux != bb)
994 c = (basic_block) c->aux;
995
996 c->aux = bb->aux;
997 while (c->aux)
998 c = (basic_block) c->aux;
999
1000 c->aux = bb;
1001 bb->aux = NULL;
1002 }
1003 }
1004
1005 /* In case there are more than one fallthru predecessors of exit, force that
1006 there is only one. */
1007
1008 static void
1009 force_one_exit_fallthru (void)
1010 {
1011 edge e, predecessor = NULL;
1012 bool more = false;
1013 edge_iterator ei;
1014 basic_block forwarder, bb;
1015
1016 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1017 if (e->flags & EDGE_FALLTHRU)
1018 {
1019 if (predecessor == NULL)
1020 predecessor = e;
1021 else
1022 {
1023 more = true;
1024 break;
1025 }
1026 }
1027
1028 if (!more)
1029 return;
1030
1031 /* Exit has several fallthru predecessors. Create a forwarder block for
1032 them. */
1033 forwarder = split_edge (predecessor);
1034 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1035 {
1036 if (e->src == forwarder
1037 || !(e->flags & EDGE_FALLTHRU))
1038 ei_next (&ei);
1039 else
1040 redirect_edge_and_branch_force (e, forwarder);
1041 }
1042
1043 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1044 exit block. */
1045 FOR_EACH_BB (bb)
1046 {
1047 if (bb->aux == NULL && bb != forwarder)
1048 {
1049 bb->aux = forwarder;
1050 break;
1051 }
1052 }
1053 }
1054 \f
1055 /* Return true in case it is possible to duplicate the basic block BB. */
1056
1057 /* We do not want to declare the function in a header file, since it should
1058 only be used through the cfghooks interface, and we do not want to move
1059 it to cfgrtl.c since it would require also moving quite a lot of related
1060 code. */
1061 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1062
1063 bool
1064 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1065 {
1066 /* Do not attempt to duplicate tablejumps, as we need to unshare
1067 the dispatch table. This is difficult to do, as the instructions
1068 computing jump destination may be hoisted outside the basic block. */
1069 if (tablejump_p (BB_END (bb), NULL, NULL))
1070 return false;
1071
1072 /* Do not duplicate blocks containing insns that can't be copied. */
1073 if (targetm.cannot_copy_insn_p)
1074 {
1075 rtx insn = BB_HEAD (bb);
1076 while (1)
1077 {
1078 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1079 return false;
1080 if (insn == BB_END (bb))
1081 break;
1082 insn = NEXT_INSN (insn);
1083 }
1084 }
1085
1086 return true;
1087 }
1088
1089 rtx
1090 duplicate_insn_chain (rtx from, rtx to)
1091 {
1092 rtx insn, last;
1093
1094 /* Avoid updating of boundaries of previous basic block. The
1095 note will get removed from insn stream in fixup. */
1096 last = emit_note (NOTE_INSN_DELETED);
1097
1098 /* Create copy at the end of INSN chain. The chain will
1099 be reordered later. */
1100 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1101 {
1102 switch (GET_CODE (insn))
1103 {
1104 case INSN:
1105 case CALL_INSN:
1106 case JUMP_INSN:
1107 /* Avoid copying of dispatch tables. We never duplicate
1108 tablejumps, so this can hit only in case the table got
1109 moved far from original jump. */
1110 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1111 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1112 break;
1113 emit_copy_of_insn_after (insn, get_last_insn ());
1114 break;
1115
1116 case CODE_LABEL:
1117 break;
1118
1119 case BARRIER:
1120 emit_barrier ();
1121 break;
1122
1123 case NOTE:
1124 switch (NOTE_KIND (insn))
1125 {
1126 /* In case prologue is empty and function contain label
1127 in first BB, we may want to copy the block. */
1128 case NOTE_INSN_PROLOGUE_END:
1129
1130 case NOTE_INSN_DELETED:
1131 case NOTE_INSN_DELETED_LABEL:
1132 /* No problem to strip these. */
1133 case NOTE_INSN_EPILOGUE_BEG:
1134 /* Debug code expect these notes to exist just once.
1135 Keep them in the master copy.
1136 ??? It probably makes more sense to duplicate them for each
1137 epilogue copy. */
1138 case NOTE_INSN_FUNCTION_BEG:
1139 /* There is always just single entry to function. */
1140 case NOTE_INSN_BASIC_BLOCK:
1141 break;
1142
1143 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1144 emit_note_copy (insn);
1145 break;
1146
1147 default:
1148 /* All other notes should have already been eliminated.
1149 */
1150 gcc_unreachable ();
1151 }
1152 break;
1153 default:
1154 gcc_unreachable ();
1155 }
1156 }
1157 insn = NEXT_INSN (last);
1158 delete_insn (last);
1159 return insn;
1160 }
1161 /* Create a duplicate of the basic block BB. */
1162
1163 /* We do not want to declare the function in a header file, since it should
1164 only be used through the cfghooks interface, and we do not want to move
1165 it to cfgrtl.c since it would require also moving quite a lot of related
1166 code. */
1167 extern basic_block cfg_layout_duplicate_bb (basic_block);
1168
1169 basic_block
1170 cfg_layout_duplicate_bb (basic_block bb)
1171 {
1172 rtx insn;
1173 basic_block new_bb;
1174
1175 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1176 new_bb = create_basic_block (insn,
1177 insn ? get_last_insn () : NULL,
1178 EXIT_BLOCK_PTR->prev_bb);
1179
1180 BB_COPY_PARTITION (new_bb, bb);
1181 if (bb->il.rtl->header)
1182 {
1183 insn = bb->il.rtl->header;
1184 while (NEXT_INSN (insn))
1185 insn = NEXT_INSN (insn);
1186 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1187 if (insn)
1188 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1189 }
1190
1191 if (bb->il.rtl->footer)
1192 {
1193 insn = bb->il.rtl->footer;
1194 while (NEXT_INSN (insn))
1195 insn = NEXT_INSN (insn);
1196 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1197 if (insn)
1198 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1199 }
1200
1201 return new_bb;
1202 }
1203
1204 \f
1205 /* Main entry point to this module - initialize the datastructures for
1206 CFG layout changes. It keeps LOOPS up-to-date if not null.
1207
1208 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1209
1210 void
1211 cfg_layout_initialize (unsigned int flags)
1212 {
1213 rtx x;
1214 basic_block bb;
1215
1216 initialize_original_copy_tables ();
1217
1218 cfg_layout_rtl_register_cfg_hooks ();
1219
1220 record_effective_endpoints ();
1221
1222 /* Make sure that the targets of non local gotos are marked. */
1223 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1224 {
1225 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1226 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1227 }
1228
1229 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1230 }
1231
1232 /* Splits superblocks. */
1233 void
1234 break_superblocks (void)
1235 {
1236 sbitmap superblocks;
1237 bool need = false;
1238 basic_block bb;
1239
1240 superblocks = sbitmap_alloc (last_basic_block);
1241 sbitmap_zero (superblocks);
1242
1243 FOR_EACH_BB (bb)
1244 if (bb->flags & BB_SUPERBLOCK)
1245 {
1246 bb->flags &= ~BB_SUPERBLOCK;
1247 SET_BIT (superblocks, bb->index);
1248 need = true;
1249 }
1250
1251 if (need)
1252 {
1253 rebuild_jump_labels (get_insns ());
1254 find_many_sub_basic_blocks (superblocks);
1255 }
1256
1257 free (superblocks);
1258 }
1259
1260 /* Finalize the changes: reorder insn list according to the sequence specified
1261 by aux pointers, enter compensation code, rebuild scope forest. */
1262
1263 void
1264 cfg_layout_finalize (void)
1265 {
1266 #ifdef ENABLE_CHECKING
1267 verify_flow_info ();
1268 #endif
1269 force_one_exit_fallthru ();
1270 rtl_register_cfg_hooks ();
1271 if (reload_completed
1272 #ifdef HAVE_epilogue
1273 && !HAVE_epilogue
1274 #endif
1275 )
1276 fixup_fallthru_exit_predecessor ();
1277 fixup_reorder_chain ();
1278
1279 rebuild_jump_labels (get_insns ());
1280 delete_dead_jumptables ();
1281
1282 #ifdef ENABLE_CHECKING
1283 verify_insn_chain ();
1284 verify_flow_info ();
1285 #endif
1286 }
1287
1288 /* Checks whether all N blocks in BBS array can be copied. */
1289 bool
1290 can_copy_bbs_p (basic_block *bbs, unsigned n)
1291 {
1292 unsigned i;
1293 edge e;
1294 int ret = true;
1295
1296 for (i = 0; i < n; i++)
1297 bbs[i]->flags |= BB_DUPLICATED;
1298
1299 for (i = 0; i < n; i++)
1300 {
1301 /* In case we should redirect abnormal edge during duplication, fail. */
1302 edge_iterator ei;
1303 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1304 if ((e->flags & EDGE_ABNORMAL)
1305 && (e->dest->flags & BB_DUPLICATED))
1306 {
1307 ret = false;
1308 goto end;
1309 }
1310
1311 if (!can_duplicate_block_p (bbs[i]))
1312 {
1313 ret = false;
1314 break;
1315 }
1316 }
1317
1318 end:
1319 for (i = 0; i < n; i++)
1320 bbs[i]->flags &= ~BB_DUPLICATED;
1321
1322 return ret;
1323 }
1324
1325 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1326 are placed into array NEW_BBS in the same order. Edges from basic blocks
1327 in BBS are also duplicated and copies of those of them
1328 that lead into BBS are redirected to appropriate newly created block. The
1329 function assigns bbs into loops (copy of basic block bb is assigned to
1330 bb->loop_father->copy loop, so this must be set up correctly in advance)
1331 and updates dominators locally (LOOPS structure that contains the information
1332 about dominators is passed to enable this).
1333
1334 BASE is the superloop to that basic block belongs; if its header or latch
1335 is copied, we do not set the new blocks as header or latch.
1336
1337 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1338 also in the same order.
1339
1340 Newly created basic blocks are put after the basic block AFTER in the
1341 instruction stream, and the order of the blocks in BBS array is preserved. */
1342
1343 void
1344 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1345 edge *edges, unsigned num_edges, edge *new_edges,
1346 struct loop *base, basic_block after)
1347 {
1348 unsigned i, j;
1349 basic_block bb, new_bb, dom_bb;
1350 edge e;
1351
1352 /* Duplicate bbs, update dominators, assign bbs to loops. */
1353 for (i = 0; i < n; i++)
1354 {
1355 /* Duplicate. */
1356 bb = bbs[i];
1357 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1358 after = new_bb;
1359 bb->flags |= BB_DUPLICATED;
1360 /* Possibly set loop header. */
1361 if (bb->loop_father->header == bb && bb->loop_father != base)
1362 new_bb->loop_father->header = new_bb;
1363 /* Or latch. */
1364 if (bb->loop_father->latch == bb && bb->loop_father != base)
1365 new_bb->loop_father->latch = new_bb;
1366 }
1367
1368 /* Set dominators. */
1369 for (i = 0; i < n; i++)
1370 {
1371 bb = bbs[i];
1372 new_bb = new_bbs[i];
1373
1374 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1375 if (dom_bb->flags & BB_DUPLICATED)
1376 {
1377 dom_bb = get_bb_copy (dom_bb);
1378 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1379 }
1380 }
1381
1382 /* Redirect edges. */
1383 for (j = 0; j < num_edges; j++)
1384 new_edges[j] = NULL;
1385 for (i = 0; i < n; i++)
1386 {
1387 edge_iterator ei;
1388 new_bb = new_bbs[i];
1389 bb = bbs[i];
1390
1391 FOR_EACH_EDGE (e, ei, new_bb->succs)
1392 {
1393 for (j = 0; j < num_edges; j++)
1394 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1395 new_edges[j] = e;
1396
1397 if (!(e->dest->flags & BB_DUPLICATED))
1398 continue;
1399 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1400 }
1401 }
1402
1403 /* Clear information about duplicates. */
1404 for (i = 0; i < n; i++)
1405 bbs[i]->flags &= ~BB_DUPLICATED;
1406 }
1407
1408 #include "gt-cfglayout.h"