arm.h (FUNCTION_ARG_ADVANCE): Only adjust iwmmxt_nregs if TARGET_IWMMXT_ABI.
[gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
39 \f
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
59 #include "target.h"
60 #include "cfgloop.h"
61 #include "ggc.h"
62 #include "tree-pass.h"
63
64 static int can_delete_note_p (rtx);
65 static int can_delete_label_p (rtx);
66 static void commit_one_edge_insertion (edge, int);
67 static basic_block rtl_split_edge (edge);
68 static bool rtl_move_block_after (basic_block, basic_block);
69 static int rtl_verify_flow_info (void);
70 static basic_block cfg_layout_split_block (basic_block, void *);
71 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
72 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
73 static void cfg_layout_delete_block (basic_block);
74 static void rtl_delete_block (basic_block);
75 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
76 static edge rtl_redirect_edge_and_branch (edge, basic_block);
77 static basic_block rtl_split_block (basic_block, void *);
78 static void rtl_dump_bb (basic_block, FILE *, int);
79 static int rtl_verify_flow_info_1 (void);
80 static void rtl_make_forwarder_block (edge);
81 \f
82 /* Return true if NOTE is not one of the ones that must be kept paired,
83 so that we may simply delete it. */
84
85 static int
86 can_delete_note_p (rtx note)
87 {
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
90 }
91
92 /* True if a given label can be deleted. */
93
94 static int
95 can_delete_label_p (rtx label)
96 {
97 return (!LABEL_PRESERVE_P (label)
98 /* User declared labels must be preserved. */
99 && LABEL_NAME (label) == 0
100 && !in_expr_list_p (forced_labels, label));
101 }
102
103 /* Delete INSN by patching it out. Return the next insn. */
104
105 rtx
106 delete_insn (rtx insn)
107 {
108 rtx next = NEXT_INSN (insn);
109 rtx note;
110 bool really_delete = true;
111
112 if (LABEL_P (insn))
113 {
114 /* Some labels can't be directly removed from the INSN chain, as they
115 might be references via variables, constant pool etc.
116 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
117 if (! can_delete_label_p (insn))
118 {
119 const char *name = LABEL_NAME (insn);
120
121 really_delete = false;
122 PUT_CODE (insn, NOTE);
123 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
124 NOTE_DELETED_LABEL_NAME (insn) = name;
125 }
126
127 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
128 }
129
130 if (really_delete)
131 {
132 /* If this insn has already been deleted, something is very wrong. */
133 gcc_assert (!INSN_DELETED_P (insn));
134 remove_insn (insn);
135 INSN_DELETED_P (insn) = 1;
136 }
137
138 /* If deleting a jump, decrement the use count of the label. Deleting
139 the label itself should happen in the normal course of block merging. */
140 if (JUMP_P (insn)
141 && JUMP_LABEL (insn)
142 && LABEL_P (JUMP_LABEL (insn)))
143 LABEL_NUSES (JUMP_LABEL (insn))--;
144
145 /* Also if deleting an insn that references a label. */
146 else
147 {
148 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
149 && LABEL_P (XEXP (note, 0)))
150 {
151 LABEL_NUSES (XEXP (note, 0))--;
152 remove_note (insn, note);
153 }
154 }
155
156 if (JUMP_P (insn)
157 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
158 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
159 {
160 rtx pat = PATTERN (insn);
161 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
162 int len = XVECLEN (pat, diff_vec_p);
163 int i;
164
165 for (i = 0; i < len; i++)
166 {
167 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
168
169 /* When deleting code in bulk (e.g. removing many unreachable
170 blocks) we can delete a label that's a target of the vector
171 before deleting the vector itself. */
172 if (!NOTE_P (label))
173 LABEL_NUSES (label)--;
174 }
175 }
176
177 return next;
178 }
179
180 /* Like delete_insn but also purge dead edges from BB. */
181 rtx
182 delete_insn_and_edges (rtx insn)
183 {
184 rtx x;
185 bool purge = false;
186
187 if (INSN_P (insn)
188 && BLOCK_FOR_INSN (insn)
189 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
190 purge = true;
191 x = delete_insn (insn);
192 if (purge)
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
194 return x;
195 }
196
197 /* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
199
200 void
201 delete_insn_chain (rtx start, rtx finish)
202 {
203 rtx next;
204
205 /* Unchain the insns one by one. It would be quicker to delete all of these
206 with a single unchaining, rather than one at a time, but we need to keep
207 the NOTE's. */
208 while (1)
209 {
210 next = NEXT_INSN (start);
211 if (NOTE_P (start) && !can_delete_note_p (start))
212 ;
213 else
214 next = delete_insn (start);
215
216 if (start == finish)
217 break;
218 start = next;
219 }
220 }
221
222 /* Like delete_insn but also purge dead edges from BB. */
223 void
224 delete_insn_chain_and_edges (rtx first, rtx last)
225 {
226 bool purge = false;
227
228 if (INSN_P (last)
229 && BLOCK_FOR_INSN (last)
230 && BB_END (BLOCK_FOR_INSN (last)) == last)
231 purge = true;
232 delete_insn_chain (first, last);
233 if (purge)
234 purge_dead_edges (BLOCK_FOR_INSN (last));
235 }
236 \f
237 /* Create a new basic block consisting of the instructions between HEAD and END
238 inclusive. This function is designed to allow fast BB construction - reuses
239 the note and basic block struct in BB_NOTE, if any and do not grow
240 BASIC_BLOCK chain and should be used directly only by CFG construction code.
241 END can be NULL in to create new empty basic block before HEAD. Both END
242 and HEAD can be NULL to create basic block at the end of INSN chain.
243 AFTER is the basic block we should be put after. */
244
245 basic_block
246 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
247 {
248 basic_block bb;
249
250 if (bb_note
251 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
252 && bb->aux == NULL)
253 {
254 /* If we found an existing note, thread it back onto the chain. */
255
256 rtx after;
257
258 if (LABEL_P (head))
259 after = head;
260 else
261 {
262 after = PREV_INSN (head);
263 head = bb_note;
264 }
265
266 if (after != bb_note && NEXT_INSN (after) != bb_note)
267 reorder_insns_nobb (bb_note, bb_note, after);
268 }
269 else
270 {
271 /* Otherwise we must create a note and a basic block structure. */
272
273 bb = alloc_block ();
274
275 init_rtl_bb_info (bb);
276 if (!head && !end)
277 head = end = bb_note
278 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
279 else if (LABEL_P (head) && end)
280 {
281 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
282 if (head == end)
283 end = bb_note;
284 }
285 else
286 {
287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
288 head = bb_note;
289 if (!end)
290 end = head;
291 }
292
293 NOTE_BASIC_BLOCK (bb_note) = bb;
294 }
295
296 /* Always include the bb note in the block. */
297 if (NEXT_INSN (end) == bb_note)
298 end = bb_note;
299
300 BB_HEAD (bb) = head;
301 BB_END (bb) = end;
302 bb->index = last_basic_block++;
303 bb->flags = BB_NEW | BB_RTL;
304 link_block (bb, after);
305 SET_BASIC_BLOCK (bb->index, bb);
306 update_bb_for_insn (bb);
307 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
308
309 /* Tag the block so that we know it has been used when considering
310 other basic block notes. */
311 bb->aux = bb;
312
313 return bb;
314 }
315
316 /* Create new basic block consisting of instructions in between HEAD and END
317 and place it to the BB chain after block AFTER. END can be NULL in to
318 create new empty basic block before HEAD. Both END and HEAD can be NULL to
319 create basic block at the end of INSN chain. */
320
321 static basic_block
322 rtl_create_basic_block (void *headp, void *endp, basic_block after)
323 {
324 rtx head = headp, end = endp;
325 basic_block bb;
326
327 /* Grow the basic block array if needed. */
328 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
329 {
330 size_t old_size = VEC_length (basic_block, basic_block_info);
331 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
332 basic_block *p;
333 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
334 p = VEC_address (basic_block, basic_block_info);
335 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
336 }
337
338 n_basic_blocks++;
339
340 bb = create_basic_block_structure (head, end, NULL, after);
341 bb->aux = NULL;
342 return bb;
343 }
344
345 static basic_block
346 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
347 {
348 basic_block newbb = rtl_create_basic_block (head, end, after);
349
350 return newbb;
351 }
352 \f
353 /* Delete the insns in a (non-live) block. We physically delete every
354 non-deleted-note insn, and update the flow graph appropriately.
355
356 Return nonzero if we deleted an exception handler. */
357
358 /* ??? Preserving all such notes strikes me as wrong. It would be nice
359 to post-process the stream to remove empty blocks, loops, ranges, etc. */
360
361 static void
362 rtl_delete_block (basic_block b)
363 {
364 rtx insn, end, tmp;
365
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached. We need
368 to remove the label from the exception_handler_label list. */
369 insn = BB_HEAD (b);
370 if (LABEL_P (insn))
371 maybe_remove_eh_handler (insn);
372
373 /* Include any jump table following the basic block. */
374 end = BB_END (b);
375 if (tablejump_p (end, NULL, &tmp))
376 end = tmp;
377
378 /* Include any barriers that may follow the basic block. */
379 tmp = next_nonnote_insn (end);
380 while (tmp && BARRIER_P (tmp))
381 {
382 end = tmp;
383 tmp = next_nonnote_insn (end);
384 }
385
386 /* Selectively delete the entire chain. */
387 BB_HEAD (b) = NULL;
388 delete_insn_chain (insn, end);
389 if (b->il.rtl->global_live_at_start)
390 {
391 FREE_REG_SET (b->il.rtl->global_live_at_start);
392 FREE_REG_SET (b->il.rtl->global_live_at_end);
393 b->il.rtl->global_live_at_start = NULL;
394 b->il.rtl->global_live_at_end = NULL;
395 }
396 }
397 \f
398 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
399
400 void
401 compute_bb_for_insn (void)
402 {
403 basic_block bb;
404
405 FOR_EACH_BB (bb)
406 {
407 rtx end = BB_END (bb);
408 rtx insn;
409
410 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
411 {
412 BLOCK_FOR_INSN (insn) = bb;
413 if (insn == end)
414 break;
415 }
416 }
417 }
418
419 /* Release the basic_block_for_insn array. */
420
421 unsigned int
422 free_bb_for_insn (void)
423 {
424 rtx insn;
425 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
426 if (!BARRIER_P (insn))
427 BLOCK_FOR_INSN (insn) = NULL;
428 return 0;
429 }
430
431 struct tree_opt_pass pass_free_cfg =
432 {
433 NULL, /* name */
434 NULL, /* gate */
435 free_bb_for_insn, /* execute */
436 NULL, /* sub */
437 NULL, /* next */
438 0, /* static_pass_number */
439 0, /* tv_id */
440 0, /* properties_required */
441 0, /* properties_provided */
442 PROP_cfg, /* properties_destroyed */
443 0, /* todo_flags_start */
444 0, /* todo_flags_finish */
445 0 /* letter */
446 };
447
448 /* Return RTX to emit after when we want to emit code on the entry of function. */
449 rtx
450 entry_of_function (void)
451 {
452 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
453 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
454 }
455
456 /* Emit INSN at the entry point of the function, ensuring that it is only
457 executed once per function. */
458 void
459 emit_insn_at_entry (rtx insn)
460 {
461 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
462 edge e = ei_safe_edge (ei);
463 gcc_assert (e->flags & EDGE_FALLTHRU);
464
465 insert_insn_on_edge (insn, e);
466 commit_edge_insertions ();
467 }
468
469 /* Update insns block within BB. */
470
471 void
472 update_bb_for_insn (basic_block bb)
473 {
474 rtx insn;
475
476 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
477 {
478 if (!BARRIER_P (insn))
479 set_block_for_insn (insn, bb);
480 if (insn == BB_END (bb))
481 break;
482 }
483 }
484 \f
485 /* Creates a new basic block just after basic block B by splitting
486 everything after specified instruction I. */
487
488 static basic_block
489 rtl_split_block (basic_block bb, void *insnp)
490 {
491 basic_block new_bb;
492 rtx insn = insnp;
493 edge e;
494 edge_iterator ei;
495
496 if (!insn)
497 {
498 insn = first_insn_after_basic_block_note (bb);
499
500 if (insn)
501 insn = PREV_INSN (insn);
502 else
503 insn = get_last_insn ();
504 }
505
506 /* We probably should check type of the insn so that we do not create
507 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
508 bother. */
509 if (insn == BB_END (bb))
510 emit_note_after (NOTE_INSN_DELETED, insn);
511
512 /* Create the new basic block. */
513 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
514 BB_COPY_PARTITION (new_bb, bb);
515 BB_END (bb) = insn;
516
517 /* Redirect the outgoing edges. */
518 new_bb->succs = bb->succs;
519 bb->succs = NULL;
520 FOR_EACH_EDGE (e, ei, new_bb->succs)
521 e->src = new_bb;
522
523 if (bb->il.rtl->global_live_at_start)
524 {
525 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
526 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
527 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
528
529 /* We now have to calculate which registers are live at the end
530 of the split basic block and at the start of the new basic
531 block. Start with those registers that are known to be live
532 at the end of the original basic block and get
533 propagate_block to determine which registers are live. */
534 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
535 propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
536 COPY_REG_SET (bb->il.rtl->global_live_at_end,
537 new_bb->il.rtl->global_live_at_start);
538 #ifdef HAVE_conditional_execution
539 /* In the presence of conditional execution we are not able to update
540 liveness precisely. */
541 if (reload_completed)
542 {
543 bb->flags |= BB_DIRTY;
544 new_bb->flags |= BB_DIRTY;
545 }
546 #endif
547 }
548
549 return new_bb;
550 }
551
552 /* Blocks A and B are to be merged into a single block A. The insns
553 are already contiguous. */
554
555 static void
556 rtl_merge_blocks (basic_block a, basic_block b)
557 {
558 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
559 rtx del_first = NULL_RTX, del_last = NULL_RTX;
560 int b_empty = 0;
561
562 /* If there was a CODE_LABEL beginning B, delete it. */
563 if (LABEL_P (b_head))
564 {
565 /* This might have been an EH label that no longer has incoming
566 EH edges. Update data structures to match. */
567 maybe_remove_eh_handler (b_head);
568
569 /* Detect basic blocks with nothing but a label. This can happen
570 in particular at the end of a function. */
571 if (b_head == b_end)
572 b_empty = 1;
573
574 del_first = del_last = b_head;
575 b_head = NEXT_INSN (b_head);
576 }
577
578 /* Delete the basic block note and handle blocks containing just that
579 note. */
580 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
581 {
582 if (b_head == b_end)
583 b_empty = 1;
584 if (! del_last)
585 del_first = b_head;
586
587 del_last = b_head;
588 b_head = NEXT_INSN (b_head);
589 }
590
591 /* If there was a jump out of A, delete it. */
592 if (JUMP_P (a_end))
593 {
594 rtx prev;
595
596 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
597 if (!NOTE_P (prev)
598 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
599 || prev == BB_HEAD (a))
600 break;
601
602 del_first = a_end;
603
604 #ifdef HAVE_cc0
605 /* If this was a conditional jump, we need to also delete
606 the insn that set cc0. */
607 if (only_sets_cc0_p (prev))
608 {
609 rtx tmp = prev;
610
611 prev = prev_nonnote_insn (prev);
612 if (!prev)
613 prev = BB_HEAD (a);
614 del_first = tmp;
615 }
616 #endif
617
618 a_end = PREV_INSN (del_first);
619 }
620 else if (BARRIER_P (NEXT_INSN (a_end)))
621 del_first = NEXT_INSN (a_end);
622
623 /* Delete everything marked above as well as crap that might be
624 hanging out between the two blocks. */
625 BB_HEAD (b) = NULL;
626 delete_insn_chain (del_first, del_last);
627
628 /* Reassociate the insns of B with A. */
629 if (!b_empty)
630 {
631 rtx x;
632
633 for (x = a_end; x != b_end; x = NEXT_INSN (x))
634 set_block_for_insn (x, a);
635
636 set_block_for_insn (b_end, a);
637
638 a_end = b_end;
639 }
640
641 BB_END (a) = a_end;
642 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
643 }
644
645 /* Return true when block A and B can be merged. */
646 static bool
647 rtl_can_merge_blocks (basic_block a,basic_block b)
648 {
649 /* If we are partitioning hot/cold basic blocks, we don't want to
650 mess up unconditional or indirect jumps that cross between hot
651 and cold sections.
652
653 Basic block partitioning may result in some jumps that appear to
654 be optimizable (or blocks that appear to be mergeable), but which really
655 must be left untouched (they are required to make it safely across
656 partition boundaries). See the comments at the top of
657 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
658
659 if (BB_PARTITION (a) != BB_PARTITION (b))
660 return false;
661
662 /* There must be exactly one edge in between the blocks. */
663 return (single_succ_p (a)
664 && single_succ (a) == b
665 && single_pred_p (b)
666 && a != b
667 /* Must be simple edge. */
668 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
669 && a->next_bb == b
670 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
671 /* If the jump insn has side effects,
672 we can't kill the edge. */
673 && (!JUMP_P (BB_END (a))
674 || (reload_completed
675 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
676 }
677 \f
678 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
679 exist. */
680
681 rtx
682 block_label (basic_block block)
683 {
684 if (block == EXIT_BLOCK_PTR)
685 return NULL_RTX;
686
687 if (!LABEL_P (BB_HEAD (block)))
688 {
689 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
690 }
691
692 return BB_HEAD (block);
693 }
694
695 /* Attempt to perform edge redirection by replacing possibly complex jump
696 instruction by unconditional jump or removing jump completely. This can
697 apply only if all edges now point to the same block. The parameters and
698 return values are equivalent to redirect_edge_and_branch. */
699
700 edge
701 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
702 {
703 basic_block src = e->src;
704 rtx insn = BB_END (src), kill_from;
705 rtx set;
706 int fallthru = 0;
707
708 /* If we are partitioning hot/cold basic blocks, we don't want to
709 mess up unconditional or indirect jumps that cross between hot
710 and cold sections.
711
712 Basic block partitioning may result in some jumps that appear to
713 be optimizable (or blocks that appear to be mergeable), but which really
714 must be left untouched (they are required to make it safely across
715 partition boundaries). See the comments at the top of
716 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
717
718 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
719 || BB_PARTITION (src) != BB_PARTITION (target))
720 return NULL;
721
722 /* We can replace or remove a complex jump only when we have exactly
723 two edges. Also, if we have exactly one outgoing edge, we can
724 redirect that. */
725 if (EDGE_COUNT (src->succs) >= 3
726 /* Verify that all targets will be TARGET. Specifically, the
727 edge that is not E must also go to TARGET. */
728 || (EDGE_COUNT (src->succs) == 2
729 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
730 return NULL;
731
732 if (!onlyjump_p (insn))
733 return NULL;
734 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
735 return NULL;
736
737 /* Avoid removing branch with side effects. */
738 set = single_set (insn);
739 if (!set || side_effects_p (set))
740 return NULL;
741
742 /* In case we zap a conditional jump, we'll need to kill
743 the cc0 setter too. */
744 kill_from = insn;
745 #ifdef HAVE_cc0
746 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
747 kill_from = PREV_INSN (insn);
748 #endif
749
750 /* See if we can create the fallthru edge. */
751 if (in_cfglayout || can_fallthru (src, target))
752 {
753 if (dump_file)
754 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
755 fallthru = 1;
756
757 /* Selectively unlink whole insn chain. */
758 if (in_cfglayout)
759 {
760 rtx insn = src->il.rtl->footer;
761
762 delete_insn_chain (kill_from, BB_END (src));
763
764 /* Remove barriers but keep jumptables. */
765 while (insn)
766 {
767 if (BARRIER_P (insn))
768 {
769 if (PREV_INSN (insn))
770 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
771 else
772 src->il.rtl->footer = NEXT_INSN (insn);
773 if (NEXT_INSN (insn))
774 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
775 }
776 if (LABEL_P (insn))
777 break;
778 insn = NEXT_INSN (insn);
779 }
780 }
781 else
782 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
783 }
784
785 /* If this already is simplejump, redirect it. */
786 else if (simplejump_p (insn))
787 {
788 if (e->dest == target)
789 return NULL;
790 if (dump_file)
791 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
792 INSN_UID (insn), e->dest->index, target->index);
793 if (!redirect_jump (insn, block_label (target), 0))
794 {
795 gcc_assert (target == EXIT_BLOCK_PTR);
796 return NULL;
797 }
798 }
799
800 /* Cannot do anything for target exit block. */
801 else if (target == EXIT_BLOCK_PTR)
802 return NULL;
803
804 /* Or replace possibly complicated jump insn by simple jump insn. */
805 else
806 {
807 rtx target_label = block_label (target);
808 rtx barrier, label, table;
809
810 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
811 JUMP_LABEL (BB_END (src)) = target_label;
812 LABEL_NUSES (target_label)++;
813 if (dump_file)
814 fprintf (dump_file, "Replacing insn %i by jump %i\n",
815 INSN_UID (insn), INSN_UID (BB_END (src)));
816
817
818 delete_insn_chain (kill_from, insn);
819
820 /* Recognize a tablejump that we are converting to a
821 simple jump and remove its associated CODE_LABEL
822 and ADDR_VEC or ADDR_DIFF_VEC. */
823 if (tablejump_p (insn, &label, &table))
824 delete_insn_chain (label, table);
825
826 barrier = next_nonnote_insn (BB_END (src));
827 if (!barrier || !BARRIER_P (barrier))
828 emit_barrier_after (BB_END (src));
829 else
830 {
831 if (barrier != NEXT_INSN (BB_END (src)))
832 {
833 /* Move the jump before barrier so that the notes
834 which originally were or were created before jump table are
835 inside the basic block. */
836 rtx new_insn = BB_END (src);
837 rtx tmp;
838
839 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
840 tmp = NEXT_INSN (tmp))
841 set_block_for_insn (tmp, src);
842
843 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
844 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
845
846 NEXT_INSN (new_insn) = barrier;
847 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
848
849 PREV_INSN (new_insn) = PREV_INSN (barrier);
850 PREV_INSN (barrier) = new_insn;
851 }
852 }
853 }
854
855 /* Keep only one edge out and set proper flags. */
856 if (!single_succ_p (src))
857 remove_edge (e);
858 gcc_assert (single_succ_p (src));
859
860 e = single_succ_edge (src);
861 if (fallthru)
862 e->flags = EDGE_FALLTHRU;
863 else
864 e->flags = 0;
865
866 e->probability = REG_BR_PROB_BASE;
867 e->count = src->count;
868
869 if (e->dest != target)
870 redirect_edge_succ (e, target);
871
872 return e;
873 }
874
875 /* Redirect edge representing branch of (un)conditional jump or tablejump,
876 NULL on failure */
877 static edge
878 redirect_branch_edge (edge e, basic_block target)
879 {
880 rtx tmp;
881 rtx old_label = BB_HEAD (e->dest);
882 basic_block src = e->src;
883 rtx insn = BB_END (src);
884
885 /* We can only redirect non-fallthru edges of jump insn. */
886 if (e->flags & EDGE_FALLTHRU)
887 return NULL;
888 else if (!JUMP_P (insn))
889 return NULL;
890
891 /* Recognize a tablejump and adjust all matching cases. */
892 if (tablejump_p (insn, NULL, &tmp))
893 {
894 rtvec vec;
895 int j;
896 rtx new_label = block_label (target);
897
898 if (target == EXIT_BLOCK_PTR)
899 return NULL;
900 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
901 vec = XVEC (PATTERN (tmp), 0);
902 else
903 vec = XVEC (PATTERN (tmp), 1);
904
905 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
906 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
907 {
908 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
909 --LABEL_NUSES (old_label);
910 ++LABEL_NUSES (new_label);
911 }
912
913 /* Handle casesi dispatch insns. */
914 if ((tmp = single_set (insn)) != NULL
915 && SET_DEST (tmp) == pc_rtx
916 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
917 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
918 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
919 {
920 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
921 new_label);
922 --LABEL_NUSES (old_label);
923 ++LABEL_NUSES (new_label);
924 }
925 }
926 else
927 {
928 /* ?? We may play the games with moving the named labels from
929 one basic block to the other in case only one computed_jump is
930 available. */
931 if (computed_jump_p (insn)
932 /* A return instruction can't be redirected. */
933 || returnjump_p (insn))
934 return NULL;
935
936 /* If the insn doesn't go where we think, we're confused. */
937 gcc_assert (JUMP_LABEL (insn) == old_label);
938
939 /* If the substitution doesn't succeed, die. This can happen
940 if the back end emitted unrecognizable instructions or if
941 target is exit block on some arches. */
942 if (!redirect_jump (insn, block_label (target), 0))
943 {
944 gcc_assert (target == EXIT_BLOCK_PTR);
945 return NULL;
946 }
947 }
948
949 if (dump_file)
950 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
951 e->src->index, e->dest->index, target->index);
952
953 if (e->dest != target)
954 e = redirect_edge_succ_nodup (e, target);
955 return e;
956 }
957
958 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
959 expense of adding new instructions or reordering basic blocks.
960
961 Function can be also called with edge destination equivalent to the TARGET.
962 Then it should try the simplifications and do nothing if none is possible.
963
964 Return edge representing the branch if transformation succeeded. Return NULL
965 on failure.
966 We still return NULL in case E already destinated TARGET and we didn't
967 managed to simplify instruction stream. */
968
969 static edge
970 rtl_redirect_edge_and_branch (edge e, basic_block target)
971 {
972 edge ret;
973 basic_block src = e->src;
974
975 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
976 return NULL;
977
978 if (e->dest == target)
979 return e;
980
981 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
982 {
983 src->flags |= BB_DIRTY;
984 return ret;
985 }
986
987 ret = redirect_branch_edge (e, target);
988 if (!ret)
989 return NULL;
990
991 src->flags |= BB_DIRTY;
992 return ret;
993 }
994
995 /* Like force_nonfallthru below, but additionally performs redirection
996 Used by redirect_edge_and_branch_force. */
997
998 static basic_block
999 force_nonfallthru_and_redirect (edge e, basic_block target)
1000 {
1001 basic_block jump_block, new_bb = NULL, src = e->src;
1002 rtx note;
1003 edge new_edge;
1004 int abnormal_edge_flags = 0;
1005
1006 /* In the case the last instruction is conditional jump to the next
1007 instruction, first redirect the jump itself and then continue
1008 by creating a basic block afterwards to redirect fallthru edge. */
1009 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1010 && any_condjump_p (BB_END (e->src))
1011 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1012 {
1013 rtx note;
1014 edge b = unchecked_make_edge (e->src, target, 0);
1015 bool redirected;
1016
1017 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1018 gcc_assert (redirected);
1019
1020 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1021 if (note)
1022 {
1023 int prob = INTVAL (XEXP (note, 0));
1024
1025 b->probability = prob;
1026 b->count = e->count * prob / REG_BR_PROB_BASE;
1027 e->probability -= e->probability;
1028 e->count -= b->count;
1029 if (e->probability < 0)
1030 e->probability = 0;
1031 if (e->count < 0)
1032 e->count = 0;
1033 }
1034 }
1035
1036 if (e->flags & EDGE_ABNORMAL)
1037 {
1038 /* Irritating special case - fallthru edge to the same block as abnormal
1039 edge.
1040 We can't redirect abnormal edge, but we still can split the fallthru
1041 one and create separate abnormal edge to original destination.
1042 This allows bb-reorder to make such edge non-fallthru. */
1043 gcc_assert (e->dest == target);
1044 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1045 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1046 }
1047 else
1048 {
1049 gcc_assert (e->flags & EDGE_FALLTHRU);
1050 if (e->src == ENTRY_BLOCK_PTR)
1051 {
1052 /* We can't redirect the entry block. Create an empty block
1053 at the start of the function which we use to add the new
1054 jump. */
1055 edge tmp;
1056 edge_iterator ei;
1057 bool found = false;
1058
1059 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1060
1061 /* Change the existing edge's source to be the new block, and add
1062 a new edge from the entry block to the new block. */
1063 e->src = bb;
1064 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1065 {
1066 if (tmp == e)
1067 {
1068 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1069 found = true;
1070 break;
1071 }
1072 else
1073 ei_next (&ei);
1074 }
1075
1076 gcc_assert (found);
1077
1078 VEC_safe_push (edge, gc, bb->succs, e);
1079 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1080 }
1081 }
1082
1083 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1084 {
1085 /* Create the new structures. */
1086
1087 /* If the old block ended with a tablejump, skip its table
1088 by searching forward from there. Otherwise start searching
1089 forward from the last instruction of the old block. */
1090 if (!tablejump_p (BB_END (e->src), NULL, &note))
1091 note = BB_END (e->src);
1092 note = NEXT_INSN (note);
1093
1094 jump_block = create_basic_block (note, NULL, e->src);
1095 jump_block->count = e->count;
1096 jump_block->frequency = EDGE_FREQUENCY (e);
1097 jump_block->loop_depth = target->loop_depth;
1098
1099 if (target->il.rtl->global_live_at_start)
1100 {
1101 jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1102 jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1103 COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
1104 target->il.rtl->global_live_at_start);
1105 COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
1106 target->il.rtl->global_live_at_start);
1107 }
1108
1109 /* Make sure new block ends up in correct hot/cold section. */
1110
1111 BB_COPY_PARTITION (jump_block, e->src);
1112 if (flag_reorder_blocks_and_partition
1113 && targetm.have_named_sections
1114 && JUMP_P (BB_END (jump_block))
1115 && !any_condjump_p (BB_END (jump_block))
1116 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1117 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1118 NULL_RTX,
1119 REG_NOTES
1120 (BB_END
1121 (jump_block)));
1122
1123 /* Wire edge in. */
1124 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1125 new_edge->probability = e->probability;
1126 new_edge->count = e->count;
1127
1128 /* Redirect old edge. */
1129 redirect_edge_pred (e, jump_block);
1130 e->probability = REG_BR_PROB_BASE;
1131
1132 new_bb = jump_block;
1133 }
1134 else
1135 jump_block = e->src;
1136
1137 e->flags &= ~EDGE_FALLTHRU;
1138 if (target == EXIT_BLOCK_PTR)
1139 {
1140 #ifdef HAVE_return
1141 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1142 #else
1143 gcc_unreachable ();
1144 #endif
1145 }
1146 else
1147 {
1148 rtx label = block_label (target);
1149 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1150 JUMP_LABEL (BB_END (jump_block)) = label;
1151 LABEL_NUSES (label)++;
1152 }
1153
1154 emit_barrier_after (BB_END (jump_block));
1155 redirect_edge_succ_nodup (e, target);
1156
1157 if (abnormal_edge_flags)
1158 make_edge (src, target, abnormal_edge_flags);
1159
1160 return new_bb;
1161 }
1162
1163 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1164 (and possibly create new basic block) to make edge non-fallthru.
1165 Return newly created BB or NULL if none. */
1166
1167 basic_block
1168 force_nonfallthru (edge e)
1169 {
1170 return force_nonfallthru_and_redirect (e, e->dest);
1171 }
1172
1173 /* Redirect edge even at the expense of creating new jump insn or
1174 basic block. Return new basic block if created, NULL otherwise.
1175 Conversion must be possible. */
1176
1177 static basic_block
1178 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1179 {
1180 if (redirect_edge_and_branch (e, target)
1181 || e->dest == target)
1182 return NULL;
1183
1184 /* In case the edge redirection failed, try to force it to be non-fallthru
1185 and redirect newly created simplejump. */
1186 e->src->flags |= BB_DIRTY;
1187 return force_nonfallthru_and_redirect (e, target);
1188 }
1189
1190 /* The given edge should potentially be a fallthru edge. If that is in
1191 fact true, delete the jump and barriers that are in the way. */
1192
1193 static void
1194 rtl_tidy_fallthru_edge (edge e)
1195 {
1196 rtx q;
1197 basic_block b = e->src, c = b->next_bb;
1198
1199 /* ??? In a late-running flow pass, other folks may have deleted basic
1200 blocks by nopping out blocks, leaving multiple BARRIERs between here
1201 and the target label. They ought to be chastised and fixed.
1202
1203 We can also wind up with a sequence of undeletable labels between
1204 one block and the next.
1205
1206 So search through a sequence of barriers, labels, and notes for
1207 the head of block C and assert that we really do fall through. */
1208
1209 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1210 if (INSN_P (q))
1211 return;
1212
1213 /* Remove what will soon cease being the jump insn from the source block.
1214 If block B consisted only of this single jump, turn it into a deleted
1215 note. */
1216 q = BB_END (b);
1217 if (JUMP_P (q)
1218 && onlyjump_p (q)
1219 && (any_uncondjump_p (q)
1220 || single_succ_p (b)))
1221 {
1222 #ifdef HAVE_cc0
1223 /* If this was a conditional jump, we need to also delete
1224 the insn that set cc0. */
1225 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1226 q = PREV_INSN (q);
1227 #endif
1228
1229 q = PREV_INSN (q);
1230 }
1231
1232 /* Selectively unlink the sequence. */
1233 if (q != PREV_INSN (BB_HEAD (c)))
1234 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1235
1236 e->flags |= EDGE_FALLTHRU;
1237 }
1238 \f
1239 /* Should move basic block BB after basic block AFTER. NIY. */
1240
1241 static bool
1242 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1243 basic_block after ATTRIBUTE_UNUSED)
1244 {
1245 return false;
1246 }
1247
1248 /* Split a (typically critical) edge. Return the new block.
1249 The edge must not be abnormal.
1250
1251 ??? The code generally expects to be called on critical edges.
1252 The case of a block ending in an unconditional jump to a
1253 block with multiple predecessors is not handled optimally. */
1254
1255 static basic_block
1256 rtl_split_edge (edge edge_in)
1257 {
1258 basic_block bb;
1259 rtx before;
1260
1261 /* Abnormal edges cannot be split. */
1262 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1263
1264 /* We are going to place the new block in front of edge destination.
1265 Avoid existence of fallthru predecessors. */
1266 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1267 {
1268 edge e;
1269 edge_iterator ei;
1270
1271 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1272 if (e->flags & EDGE_FALLTHRU)
1273 break;
1274
1275 if (e)
1276 force_nonfallthru (e);
1277 }
1278
1279 /* Create the basic block note. */
1280 if (edge_in->dest != EXIT_BLOCK_PTR)
1281 before = BB_HEAD (edge_in->dest);
1282 else
1283 before = NULL_RTX;
1284
1285 /* If this is a fall through edge to the exit block, the blocks might be
1286 not adjacent, and the right place is the after the source. */
1287 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1288 {
1289 before = NEXT_INSN (BB_END (edge_in->src));
1290 bb = create_basic_block (before, NULL, edge_in->src);
1291 BB_COPY_PARTITION (bb, edge_in->src);
1292 }
1293 else
1294 {
1295 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1296 /* ??? Why not edge_in->dest->prev_bb here? */
1297 BB_COPY_PARTITION (bb, edge_in->dest);
1298 }
1299
1300 /* ??? This info is likely going to be out of date very soon. */
1301 if (edge_in->dest->il.rtl->global_live_at_start)
1302 {
1303 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1304 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1305 COPY_REG_SET (bb->il.rtl->global_live_at_start,
1306 edge_in->dest->il.rtl->global_live_at_start);
1307 COPY_REG_SET (bb->il.rtl->global_live_at_end,
1308 edge_in->dest->il.rtl->global_live_at_start);
1309 }
1310
1311 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1312
1313 /* For non-fallthru edges, we must adjust the predecessor's
1314 jump instruction to target our new block. */
1315 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1316 {
1317 edge redirected = redirect_edge_and_branch (edge_in, bb);
1318 gcc_assert (redirected);
1319 }
1320 else
1321 redirect_edge_succ (edge_in, bb);
1322
1323 return bb;
1324 }
1325
1326 /* Queue instructions for insertion on an edge between two basic blocks.
1327 The new instructions and basic blocks (if any) will not appear in the
1328 CFG until commit_edge_insertions is called. */
1329
1330 void
1331 insert_insn_on_edge (rtx pattern, edge e)
1332 {
1333 /* We cannot insert instructions on an abnormal critical edge.
1334 It will be easier to find the culprit if we die now. */
1335 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1336
1337 if (e->insns.r == NULL_RTX)
1338 start_sequence ();
1339 else
1340 push_to_sequence (e->insns.r);
1341
1342 emit_insn (pattern);
1343
1344 e->insns.r = get_insns ();
1345 end_sequence ();
1346 }
1347
1348 /* Update the CFG for the instructions queued on edge E. */
1349
1350 static void
1351 commit_one_edge_insertion (edge e, int watch_calls)
1352 {
1353 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1354 basic_block bb = NULL;
1355
1356 /* Pull the insns off the edge now since the edge might go away. */
1357 insns = e->insns.r;
1358 e->insns.r = NULL_RTX;
1359
1360 /* Special case -- avoid inserting code between call and storing
1361 its return value. */
1362 if (watch_calls && (e->flags & EDGE_FALLTHRU)
1363 && single_pred_p (e->dest)
1364 && e->src != ENTRY_BLOCK_PTR
1365 && CALL_P (BB_END (e->src)))
1366 {
1367 rtx next = next_nonnote_insn (BB_END (e->src));
1368
1369 after = BB_HEAD (e->dest);
1370 /* The first insn after the call may be a stack pop, skip it. */
1371 while (next
1372 && keep_with_call_p (next))
1373 {
1374 after = next;
1375 next = next_nonnote_insn (next);
1376 }
1377 bb = e->dest;
1378 }
1379 if (!before && !after)
1380 {
1381 /* Figure out where to put these things. If the destination has
1382 one predecessor, insert there. Except for the exit block. */
1383 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1384 {
1385 bb = e->dest;
1386
1387 /* Get the location correct wrt a code label, and "nice" wrt
1388 a basic block note, and before everything else. */
1389 tmp = BB_HEAD (bb);
1390 if (LABEL_P (tmp))
1391 tmp = NEXT_INSN (tmp);
1392 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1393 tmp = NEXT_INSN (tmp);
1394 if (tmp == BB_HEAD (bb))
1395 before = tmp;
1396 else if (tmp)
1397 after = PREV_INSN (tmp);
1398 else
1399 after = get_last_insn ();
1400 }
1401
1402 /* If the source has one successor and the edge is not abnormal,
1403 insert there. Except for the entry block. */
1404 else if ((e->flags & EDGE_ABNORMAL) == 0
1405 && single_succ_p (e->src)
1406 && e->src != ENTRY_BLOCK_PTR)
1407 {
1408 bb = e->src;
1409
1410 /* It is possible to have a non-simple jump here. Consider a target
1411 where some forms of unconditional jumps clobber a register. This
1412 happens on the fr30 for example.
1413
1414 We know this block has a single successor, so we can just emit
1415 the queued insns before the jump. */
1416 if (JUMP_P (BB_END (bb)))
1417 before = BB_END (bb);
1418 else
1419 {
1420 /* We'd better be fallthru, or we've lost track of
1421 what's what. */
1422 gcc_assert (e->flags & EDGE_FALLTHRU);
1423
1424 after = BB_END (bb);
1425 }
1426 }
1427 /* Otherwise we must split the edge. */
1428 else
1429 {
1430 bb = split_edge (e);
1431 after = BB_END (bb);
1432
1433 if (flag_reorder_blocks_and_partition
1434 && targetm.have_named_sections
1435 && e->src != ENTRY_BLOCK_PTR
1436 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1437 && !(e->flags & EDGE_CROSSING))
1438 {
1439 rtx bb_note, cur_insn;
1440
1441 bb_note = NULL_RTX;
1442 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1443 cur_insn = NEXT_INSN (cur_insn))
1444 if (NOTE_P (cur_insn)
1445 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
1446 {
1447 bb_note = cur_insn;
1448 break;
1449 }
1450
1451 if (JUMP_P (BB_END (bb))
1452 && !any_condjump_p (BB_END (bb))
1453 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1454 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1455 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1456 }
1457 }
1458 }
1459
1460 /* Now that we've found the spot, do the insertion. */
1461
1462 if (before)
1463 {
1464 emit_insn_before_noloc (insns, before);
1465 last = prev_nonnote_insn (before);
1466 }
1467 else
1468 last = emit_insn_after_noloc (insns, after);
1469
1470 if (returnjump_p (last))
1471 {
1472 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1473 This is not currently a problem because this only happens
1474 for the (single) epilogue, which already has a fallthru edge
1475 to EXIT. */
1476
1477 e = single_succ_edge (bb);
1478 gcc_assert (e->dest == EXIT_BLOCK_PTR
1479 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1480
1481 e->flags &= ~EDGE_FALLTHRU;
1482 emit_barrier_after (last);
1483
1484 if (before)
1485 delete_insn (before);
1486 }
1487 else
1488 gcc_assert (!JUMP_P (last));
1489
1490 /* Mark the basic block for find_many_sub_basic_blocks. */
1491 bb->aux = &bb->aux;
1492 }
1493
1494 /* Update the CFG for all queued instructions. */
1495
1496 void
1497 commit_edge_insertions (void)
1498 {
1499 basic_block bb;
1500 sbitmap blocks;
1501 bool changed = false;
1502
1503 #ifdef ENABLE_CHECKING
1504 verify_flow_info ();
1505 #endif
1506
1507 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1508 {
1509 edge e;
1510 edge_iterator ei;
1511
1512 FOR_EACH_EDGE (e, ei, bb->succs)
1513 if (e->insns.r)
1514 {
1515 changed = true;
1516 commit_one_edge_insertion (e, false);
1517 }
1518 }
1519
1520 if (!changed)
1521 return;
1522
1523 blocks = sbitmap_alloc (last_basic_block);
1524 sbitmap_zero (blocks);
1525 FOR_EACH_BB (bb)
1526 if (bb->aux)
1527 {
1528 SET_BIT (blocks, bb->index);
1529 /* Check for forgotten bb->aux values before commit_edge_insertions
1530 call. */
1531 gcc_assert (bb->aux == &bb->aux);
1532 bb->aux = NULL;
1533 }
1534 find_many_sub_basic_blocks (blocks);
1535 sbitmap_free (blocks);
1536 }
1537 \f
1538 /* Update the CFG for all queued instructions, taking special care of inserting
1539 code on edges between call and storing its return value. */
1540
1541 void
1542 commit_edge_insertions_watch_calls (void)
1543 {
1544 basic_block bb;
1545 sbitmap blocks;
1546 bool changed = false;
1547
1548 #ifdef ENABLE_CHECKING
1549 verify_flow_info ();
1550 #endif
1551
1552 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1553 {
1554 edge e;
1555 edge_iterator ei;
1556
1557 FOR_EACH_EDGE (e, ei, bb->succs)
1558 if (e->insns.r)
1559 {
1560 changed = true;
1561 commit_one_edge_insertion (e, true);
1562 }
1563 }
1564
1565 if (!changed)
1566 return;
1567
1568 blocks = sbitmap_alloc (last_basic_block);
1569 sbitmap_zero (blocks);
1570 FOR_EACH_BB (bb)
1571 if (bb->aux)
1572 {
1573 SET_BIT (blocks, bb->index);
1574 /* Check for forgotten bb->aux values before commit_edge_insertions
1575 call. */
1576 gcc_assert (bb->aux == &bb->aux);
1577 bb->aux = NULL;
1578 }
1579 find_many_sub_basic_blocks (blocks);
1580 sbitmap_free (blocks);
1581 }
1582 \f
1583 /* Print out RTL-specific basic block information (live information
1584 at start and end). */
1585
1586 static void
1587 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1588 {
1589 rtx insn;
1590 rtx last;
1591 char *s_indent;
1592
1593 s_indent = alloca ((size_t) indent + 1);
1594 memset (s_indent, ' ', (size_t) indent);
1595 s_indent[indent] = '\0';
1596
1597 fprintf (outf, ";;%s Registers live at start: ", s_indent);
1598 dump_regset (bb->il.rtl->global_live_at_start, outf);
1599 putc ('\n', outf);
1600
1601 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1602 insn = NEXT_INSN (insn))
1603 print_rtl_single (outf, insn);
1604
1605 fprintf (outf, ";;%s Registers live at end: ", s_indent);
1606 dump_regset (bb->il.rtl->global_live_at_end, outf);
1607 putc ('\n', outf);
1608 }
1609 \f
1610 /* Like print_rtl, but also print out live information for the start of each
1611 basic block. */
1612
1613 void
1614 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1615 {
1616 rtx tmp_rtx;
1617
1618 if (rtx_first == 0)
1619 fprintf (outf, "(nil)\n");
1620 else
1621 {
1622 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1623 int max_uid = get_max_uid ();
1624 basic_block *start = XCNEWVEC (basic_block, max_uid);
1625 basic_block *end = XCNEWVEC (basic_block, max_uid);
1626 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1627
1628 basic_block bb;
1629
1630 FOR_EACH_BB_REVERSE (bb)
1631 {
1632 rtx x;
1633
1634 start[INSN_UID (BB_HEAD (bb))] = bb;
1635 end[INSN_UID (BB_END (bb))] = bb;
1636 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1637 {
1638 enum bb_state state = IN_MULTIPLE_BB;
1639
1640 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1641 state = IN_ONE_BB;
1642 in_bb_p[INSN_UID (x)] = state;
1643
1644 if (x == BB_END (bb))
1645 break;
1646 }
1647 }
1648
1649 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1650 {
1651 int did_output;
1652
1653 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1654 {
1655 fprintf (outf, ";; Start of basic block %d, registers live:",
1656 bb->index);
1657 dump_regset (bb->il.rtl->global_live_at_start, outf);
1658 putc ('\n', outf);
1659 }
1660
1661 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1662 && !NOTE_P (tmp_rtx)
1663 && !BARRIER_P (tmp_rtx))
1664 fprintf (outf, ";; Insn is not within a basic block\n");
1665 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1666 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1667
1668 did_output = print_rtl_single (outf, tmp_rtx);
1669
1670 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1671 {
1672 fprintf (outf, ";; End of basic block %d, registers live:\n",
1673 bb->index);
1674 dump_regset (bb->il.rtl->global_live_at_end, outf);
1675 putc ('\n', outf);
1676 }
1677
1678 if (did_output)
1679 putc ('\n', outf);
1680 }
1681
1682 free (start);
1683 free (end);
1684 free (in_bb_p);
1685 }
1686
1687 if (current_function_epilogue_delay_list != 0)
1688 {
1689 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1690 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1691 tmp_rtx = XEXP (tmp_rtx, 1))
1692 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1693 }
1694 }
1695 \f
1696 void
1697 update_br_prob_note (basic_block bb)
1698 {
1699 rtx note;
1700 if (!JUMP_P (BB_END (bb)))
1701 return;
1702 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1703 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1704 return;
1705 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1706 }
1707 \f
1708 /* Verify the CFG and RTL consistency common for both underlying RTL and
1709 cfglayout RTL.
1710
1711 Currently it does following checks:
1712
1713 - test head/end pointers
1714 - overlapping of basic blocks
1715 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1716 - tails of basic blocks (ensure that boundary is necessary)
1717 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1718 and NOTE_INSN_BASIC_BLOCK
1719 - verify that no fall_thru edge crosses hot/cold partition boundaries
1720
1721 In future it can be extended check a lot of other stuff as well
1722 (reachability of basic blocks, life information, etc. etc.). */
1723
1724 static int
1725 rtl_verify_flow_info_1 (void)
1726 {
1727 const int max_uid = get_max_uid ();
1728 rtx last_head = get_last_insn ();
1729 basic_block *bb_info;
1730 rtx x;
1731 int err = 0;
1732 basic_block bb;
1733
1734 bb_info = XCNEWVEC (basic_block, max_uid);
1735
1736 FOR_EACH_BB_REVERSE (bb)
1737 {
1738 rtx head = BB_HEAD (bb);
1739 rtx end = BB_END (bb);
1740
1741 /* Verify the end of the basic block is in the INSN chain. */
1742 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1743 if (x == end)
1744 break;
1745
1746 if (!(bb->flags & BB_RTL))
1747 {
1748 error ("BB_RTL flag not set for block %d", bb->index);
1749 err = 1;
1750 }
1751
1752 if (!x)
1753 {
1754 error ("end insn %d for block %d not found in the insn stream",
1755 INSN_UID (end), bb->index);
1756 err = 1;
1757 }
1758
1759 /* Work backwards from the end to the head of the basic block
1760 to verify the head is in the RTL chain. */
1761 for (; x != NULL_RTX; x = PREV_INSN (x))
1762 {
1763 /* While walking over the insn chain, verify insns appear
1764 in only one basic block and initialize the BB_INFO array
1765 used by other passes. */
1766 if (bb_info[INSN_UID (x)] != NULL)
1767 {
1768 error ("insn %d is in multiple basic blocks (%d and %d)",
1769 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1770 err = 1;
1771 }
1772
1773 bb_info[INSN_UID (x)] = bb;
1774
1775 if (x == head)
1776 break;
1777 }
1778 if (!x)
1779 {
1780 error ("head insn %d for block %d not found in the insn stream",
1781 INSN_UID (head), bb->index);
1782 err = 1;
1783 }
1784
1785 last_head = x;
1786 }
1787
1788 /* Now check the basic blocks (boundaries etc.) */
1789 FOR_EACH_BB_REVERSE (bb)
1790 {
1791 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1792 edge e, fallthru = NULL;
1793 rtx note;
1794 edge_iterator ei;
1795
1796 if (JUMP_P (BB_END (bb))
1797 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1798 && EDGE_COUNT (bb->succs) >= 2
1799 && any_condjump_p (BB_END (bb)))
1800 {
1801 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1802 && profile_status != PROFILE_ABSENT)
1803 {
1804 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1805 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1806 err = 1;
1807 }
1808 }
1809 FOR_EACH_EDGE (e, ei, bb->succs)
1810 {
1811 if (e->flags & EDGE_FALLTHRU)
1812 {
1813 n_fallthru++, fallthru = e;
1814 if ((e->flags & EDGE_CROSSING)
1815 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1816 && e->src != ENTRY_BLOCK_PTR
1817 && e->dest != EXIT_BLOCK_PTR))
1818 {
1819 error ("fallthru edge crosses section boundary (bb %i)",
1820 e->src->index);
1821 err = 1;
1822 }
1823 }
1824
1825 if ((e->flags & ~(EDGE_DFS_BACK
1826 | EDGE_CAN_FALLTHRU
1827 | EDGE_IRREDUCIBLE_LOOP
1828 | EDGE_LOOP_EXIT
1829 | EDGE_CROSSING)) == 0)
1830 n_branch++;
1831
1832 if (e->flags & EDGE_ABNORMAL_CALL)
1833 n_call++;
1834
1835 if (e->flags & EDGE_EH)
1836 n_eh++;
1837 else if (e->flags & EDGE_ABNORMAL)
1838 n_abnormal++;
1839 }
1840
1841 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1842 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1843 {
1844 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1845 err = 1;
1846 }
1847 if (n_branch
1848 && (!JUMP_P (BB_END (bb))
1849 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1850 || any_condjump_p (BB_END (bb))))))
1851 {
1852 error ("too many outgoing branch edges from bb %i", bb->index);
1853 err = 1;
1854 }
1855 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1856 {
1857 error ("fallthru edge after unconditional jump %i", bb->index);
1858 err = 1;
1859 }
1860 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1861 {
1862 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1863 err = 1;
1864 }
1865 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1866 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1867 {
1868 error ("wrong amount of branch edges after conditional jump %i",
1869 bb->index);
1870 err = 1;
1871 }
1872 if (n_call && !CALL_P (BB_END (bb)))
1873 {
1874 error ("call edges for non-call insn in bb %i", bb->index);
1875 err = 1;
1876 }
1877 if (n_abnormal
1878 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1879 && (!JUMP_P (BB_END (bb))
1880 || any_condjump_p (BB_END (bb))
1881 || any_uncondjump_p (BB_END (bb))))
1882 {
1883 error ("abnormal edges for no purpose in bb %i", bb->index);
1884 err = 1;
1885 }
1886
1887 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1888 /* We may have a barrier inside a basic block before dead code
1889 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1890 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1891 {
1892 debug_rtx (x);
1893 if (! BLOCK_FOR_INSN (x))
1894 error
1895 ("insn %d inside basic block %d but block_for_insn is NULL",
1896 INSN_UID (x), bb->index);
1897 else
1898 error
1899 ("insn %d inside basic block %d but block_for_insn is %i",
1900 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1901
1902 err = 1;
1903 }
1904
1905 /* OK pointers are correct. Now check the header of basic
1906 block. It ought to contain optional CODE_LABEL followed
1907 by NOTE_BASIC_BLOCK. */
1908 x = BB_HEAD (bb);
1909 if (LABEL_P (x))
1910 {
1911 if (BB_END (bb) == x)
1912 {
1913 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1914 bb->index);
1915 err = 1;
1916 }
1917
1918 x = NEXT_INSN (x);
1919 }
1920
1921 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1922 {
1923 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1924 bb->index);
1925 err = 1;
1926 }
1927
1928 if (BB_END (bb) == x)
1929 /* Do checks for empty blocks here. */
1930 ;
1931 else
1932 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1933 {
1934 if (NOTE_INSN_BASIC_BLOCK_P (x))
1935 {
1936 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1937 INSN_UID (x), bb->index);
1938 err = 1;
1939 }
1940
1941 if (x == BB_END (bb))
1942 break;
1943
1944 if (control_flow_insn_p (x))
1945 {
1946 error ("in basic block %d:", bb->index);
1947 fatal_insn ("flow control insn inside a basic block", x);
1948 }
1949 }
1950 }
1951
1952 /* Clean up. */
1953 free (bb_info);
1954 return err;
1955 }
1956
1957 /* Verify the CFG and RTL consistency common for both underlying RTL and
1958 cfglayout RTL.
1959
1960 Currently it does following checks:
1961 - all checks of rtl_verify_flow_info_1
1962 - check that all insns are in the basic blocks
1963 (except the switch handling code, barriers and notes)
1964 - check that all returns are followed by barriers
1965 - check that all fallthru edge points to the adjacent blocks. */
1966 static int
1967 rtl_verify_flow_info (void)
1968 {
1969 basic_block bb;
1970 int err = rtl_verify_flow_info_1 ();
1971 rtx x;
1972 int num_bb_notes;
1973 const rtx rtx_first = get_insns ();
1974 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1975
1976 FOR_EACH_BB_REVERSE (bb)
1977 {
1978 edge e;
1979 edge_iterator ei;
1980
1981 if (bb->predictions)
1982 {
1983 error ("bb prediction set for block %i, but it is not used in RTL land", bb->index);
1984 err = 1;
1985 }
1986
1987 FOR_EACH_EDGE (e, ei, bb->succs)
1988 if (e->flags & EDGE_FALLTHRU)
1989 break;
1990 if (!e)
1991 {
1992 rtx insn;
1993
1994 /* Ensure existence of barrier in BB with no fallthru edges. */
1995 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
1996 insn = NEXT_INSN (insn))
1997 if (!insn
1998 || (NOTE_P (insn)
1999 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2000 {
2001 error ("missing barrier after block %i", bb->index);
2002 err = 1;
2003 break;
2004 }
2005 }
2006 else if (e->src != ENTRY_BLOCK_PTR
2007 && e->dest != EXIT_BLOCK_PTR)
2008 {
2009 rtx insn;
2010
2011 if (e->src->next_bb != e->dest)
2012 {
2013 error
2014 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2015 e->src->index, e->dest->index);
2016 err = 1;
2017 }
2018 else
2019 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2020 insn = NEXT_INSN (insn))
2021 if (BARRIER_P (insn) || INSN_P (insn))
2022 {
2023 error ("verify_flow_info: Incorrect fallthru %i->%i",
2024 e->src->index, e->dest->index);
2025 fatal_insn ("wrong insn in the fallthru edge", insn);
2026 err = 1;
2027 }
2028 }
2029 }
2030
2031 num_bb_notes = 0;
2032 last_bb_seen = ENTRY_BLOCK_PTR;
2033
2034 for (x = rtx_first; x; x = NEXT_INSN (x))
2035 {
2036 if (NOTE_INSN_BASIC_BLOCK_P (x))
2037 {
2038 bb = NOTE_BASIC_BLOCK (x);
2039
2040 num_bb_notes++;
2041 if (bb != last_bb_seen->next_bb)
2042 internal_error ("basic blocks not laid down consecutively");
2043
2044 curr_bb = last_bb_seen = bb;
2045 }
2046
2047 if (!curr_bb)
2048 {
2049 switch (GET_CODE (x))
2050 {
2051 case BARRIER:
2052 case NOTE:
2053 break;
2054
2055 case CODE_LABEL:
2056 /* An addr_vec is placed outside any basic block. */
2057 if (NEXT_INSN (x)
2058 && JUMP_P (NEXT_INSN (x))
2059 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2060 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2061 x = NEXT_INSN (x);
2062
2063 /* But in any case, non-deletable labels can appear anywhere. */
2064 break;
2065
2066 default:
2067 fatal_insn ("insn outside basic block", x);
2068 }
2069 }
2070
2071 if (JUMP_P (x)
2072 && returnjump_p (x) && ! condjump_p (x)
2073 && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
2074 fatal_insn ("return not followed by barrier", x);
2075 if (curr_bb && x == BB_END (curr_bb))
2076 curr_bb = NULL;
2077 }
2078
2079 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2080 internal_error
2081 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2082 num_bb_notes, n_basic_blocks);
2083
2084 return err;
2085 }
2086 \f
2087 /* Assume that the preceding pass has possibly eliminated jump instructions
2088 or converted the unconditional jumps. Eliminate the edges from CFG.
2089 Return true if any edges are eliminated. */
2090
2091 bool
2092 purge_dead_edges (basic_block bb)
2093 {
2094 edge e;
2095 rtx insn = BB_END (bb), note;
2096 bool purged = false;
2097 bool found;
2098 edge_iterator ei;
2099
2100 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2101 if (NONJUMP_INSN_P (insn)
2102 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2103 {
2104 rtx eqnote;
2105
2106 if (! may_trap_p (PATTERN (insn))
2107 || ((eqnote = find_reg_equal_equiv_note (insn))
2108 && ! may_trap_p (XEXP (eqnote, 0))))
2109 remove_note (insn, note);
2110 }
2111
2112 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2113 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2114 {
2115 /* There are three types of edges we need to handle correctly here: EH
2116 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2117 latter can appear when nonlocal gotos are used. */
2118 if (e->flags & EDGE_EH)
2119 {
2120 if (can_throw_internal (BB_END (bb))
2121 /* If this is a call edge, verify that this is a call insn. */
2122 && (! (e->flags & EDGE_ABNORMAL_CALL)
2123 || CALL_P (BB_END (bb))))
2124 {
2125 ei_next (&ei);
2126 continue;
2127 }
2128 }
2129 else if (e->flags & EDGE_ABNORMAL_CALL)
2130 {
2131 if (CALL_P (BB_END (bb))
2132 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2133 || INTVAL (XEXP (note, 0)) >= 0))
2134 {
2135 ei_next (&ei);
2136 continue;
2137 }
2138 }
2139 else
2140 {
2141 ei_next (&ei);
2142 continue;
2143 }
2144
2145 remove_edge (e);
2146 bb->flags |= BB_DIRTY;
2147 purged = true;
2148 }
2149
2150 if (JUMP_P (insn))
2151 {
2152 rtx note;
2153 edge b,f;
2154 edge_iterator ei;
2155
2156 /* We do care only about conditional jumps and simplejumps. */
2157 if (!any_condjump_p (insn)
2158 && !returnjump_p (insn)
2159 && !simplejump_p (insn))
2160 return purged;
2161
2162 /* Branch probability/prediction notes are defined only for
2163 condjumps. We've possibly turned condjump into simplejump. */
2164 if (simplejump_p (insn))
2165 {
2166 note = find_reg_note (insn, REG_BR_PROB, NULL);
2167 if (note)
2168 remove_note (insn, note);
2169 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2170 remove_note (insn, note);
2171 }
2172
2173 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2174 {
2175 /* Avoid abnormal flags to leak from computed jumps turned
2176 into simplejumps. */
2177
2178 e->flags &= ~EDGE_ABNORMAL;
2179
2180 /* See if this edge is one we should keep. */
2181 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2182 /* A conditional jump can fall through into the next
2183 block, so we should keep the edge. */
2184 {
2185 ei_next (&ei);
2186 continue;
2187 }
2188 else if (e->dest != EXIT_BLOCK_PTR
2189 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2190 /* If the destination block is the target of the jump,
2191 keep the edge. */
2192 {
2193 ei_next (&ei);
2194 continue;
2195 }
2196 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2197 /* If the destination block is the exit block, and this
2198 instruction is a return, then keep the edge. */
2199 {
2200 ei_next (&ei);
2201 continue;
2202 }
2203 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2204 /* Keep the edges that correspond to exceptions thrown by
2205 this instruction and rematerialize the EDGE_ABNORMAL
2206 flag we just cleared above. */
2207 {
2208 e->flags |= EDGE_ABNORMAL;
2209 ei_next (&ei);
2210 continue;
2211 }
2212
2213 /* We do not need this edge. */
2214 bb->flags |= BB_DIRTY;
2215 purged = true;
2216 remove_edge (e);
2217 }
2218
2219 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2220 return purged;
2221
2222 if (dump_file)
2223 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2224
2225 if (!optimize)
2226 return purged;
2227
2228 /* Redistribute probabilities. */
2229 if (single_succ_p (bb))
2230 {
2231 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2232 single_succ_edge (bb)->count = bb->count;
2233 }
2234 else
2235 {
2236 note = find_reg_note (insn, REG_BR_PROB, NULL);
2237 if (!note)
2238 return purged;
2239
2240 b = BRANCH_EDGE (bb);
2241 f = FALLTHRU_EDGE (bb);
2242 b->probability = INTVAL (XEXP (note, 0));
2243 f->probability = REG_BR_PROB_BASE - b->probability;
2244 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2245 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2246 }
2247
2248 return purged;
2249 }
2250 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2251 {
2252 /* First, there should not be any EH or ABCALL edges resulting
2253 from non-local gotos and the like. If there were, we shouldn't
2254 have created the sibcall in the first place. Second, there
2255 should of course never have been a fallthru edge. */
2256 gcc_assert (single_succ_p (bb));
2257 gcc_assert (single_succ_edge (bb)->flags
2258 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2259
2260 return 0;
2261 }
2262
2263 /* If we don't see a jump insn, we don't know exactly why the block would
2264 have been broken at this point. Look for a simple, non-fallthru edge,
2265 as these are only created by conditional branches. If we find such an
2266 edge we know that there used to be a jump here and can then safely
2267 remove all non-fallthru edges. */
2268 found = false;
2269 FOR_EACH_EDGE (e, ei, bb->succs)
2270 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2271 {
2272 found = true;
2273 break;
2274 }
2275
2276 if (!found)
2277 return purged;
2278
2279 /* Remove all but the fake and fallthru edges. The fake edge may be
2280 the only successor for this block in the case of noreturn
2281 calls. */
2282 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2283 {
2284 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2285 {
2286 bb->flags |= BB_DIRTY;
2287 remove_edge (e);
2288 purged = true;
2289 }
2290 else
2291 ei_next (&ei);
2292 }
2293
2294 gcc_assert (single_succ_p (bb));
2295
2296 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2297 single_succ_edge (bb)->count = bb->count;
2298
2299 if (dump_file)
2300 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2301 bb->index);
2302 return purged;
2303 }
2304
2305 /* Search all basic blocks for potentially dead edges and purge them. Return
2306 true if some edge has been eliminated. */
2307
2308 bool
2309 purge_all_dead_edges (void)
2310 {
2311 int purged = false;
2312 basic_block bb;
2313
2314 FOR_EACH_BB (bb)
2315 {
2316 bool purged_here = purge_dead_edges (bb);
2317
2318 purged |= purged_here;
2319 }
2320
2321 return purged;
2322 }
2323
2324 /* Same as split_block but update cfg_layout structures. */
2325
2326 static basic_block
2327 cfg_layout_split_block (basic_block bb, void *insnp)
2328 {
2329 rtx insn = insnp;
2330 basic_block new_bb = rtl_split_block (bb, insn);
2331
2332 new_bb->il.rtl->footer = bb->il.rtl->footer;
2333 bb->il.rtl->footer = NULL;
2334
2335 return new_bb;
2336 }
2337
2338
2339 /* Redirect Edge to DEST. */
2340 static edge
2341 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2342 {
2343 basic_block src = e->src;
2344 edge ret;
2345
2346 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2347 return NULL;
2348
2349 if (e->dest == dest)
2350 return e;
2351
2352 if (e->src != ENTRY_BLOCK_PTR
2353 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2354 {
2355 src->flags |= BB_DIRTY;
2356 return ret;
2357 }
2358
2359 if (e->src == ENTRY_BLOCK_PTR
2360 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2361 {
2362 if (dump_file)
2363 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2364 e->src->index, dest->index);
2365
2366 e->src->flags |= BB_DIRTY;
2367 redirect_edge_succ (e, dest);
2368 return e;
2369 }
2370
2371 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2372 in the case the basic block appears to be in sequence. Avoid this
2373 transformation. */
2374
2375 if (e->flags & EDGE_FALLTHRU)
2376 {
2377 /* Redirect any branch edges unified with the fallthru one. */
2378 if (JUMP_P (BB_END (src))
2379 && label_is_jump_target_p (BB_HEAD (e->dest),
2380 BB_END (src)))
2381 {
2382 edge redirected;
2383
2384 if (dump_file)
2385 fprintf (dump_file, "Fallthru edge unified with branch "
2386 "%i->%i redirected to %i\n",
2387 e->src->index, e->dest->index, dest->index);
2388 e->flags &= ~EDGE_FALLTHRU;
2389 redirected = redirect_branch_edge (e, dest);
2390 gcc_assert (redirected);
2391 e->flags |= EDGE_FALLTHRU;
2392 e->src->flags |= BB_DIRTY;
2393 return e;
2394 }
2395 /* In case we are redirecting fallthru edge to the branch edge
2396 of conditional jump, remove it. */
2397 if (EDGE_COUNT (src->succs) == 2)
2398 {
2399 /* Find the edge that is different from E. */
2400 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2401
2402 if (s->dest == dest
2403 && any_condjump_p (BB_END (src))
2404 && onlyjump_p (BB_END (src)))
2405 delete_insn (BB_END (src));
2406 }
2407 ret = redirect_edge_succ_nodup (e, dest);
2408 if (dump_file)
2409 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2410 e->src->index, e->dest->index, dest->index);
2411 }
2412 else
2413 ret = redirect_branch_edge (e, dest);
2414
2415 /* We don't want simplejumps in the insn stream during cfglayout. */
2416 gcc_assert (!simplejump_p (BB_END (src)));
2417
2418 src->flags |= BB_DIRTY;
2419 return ret;
2420 }
2421
2422 /* Simple wrapper as we always can redirect fallthru edges. */
2423 static basic_block
2424 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2425 {
2426 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2427
2428 gcc_assert (redirected);
2429 return NULL;
2430 }
2431
2432 /* Same as delete_basic_block but update cfg_layout structures. */
2433
2434 static void
2435 cfg_layout_delete_block (basic_block bb)
2436 {
2437 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2438
2439 if (bb->il.rtl->header)
2440 {
2441 next = BB_HEAD (bb);
2442 if (prev)
2443 NEXT_INSN (prev) = bb->il.rtl->header;
2444 else
2445 set_first_insn (bb->il.rtl->header);
2446 PREV_INSN (bb->il.rtl->header) = prev;
2447 insn = bb->il.rtl->header;
2448 while (NEXT_INSN (insn))
2449 insn = NEXT_INSN (insn);
2450 NEXT_INSN (insn) = next;
2451 PREV_INSN (next) = insn;
2452 }
2453 next = NEXT_INSN (BB_END (bb));
2454 if (bb->il.rtl->footer)
2455 {
2456 insn = bb->il.rtl->footer;
2457 while (insn)
2458 {
2459 if (BARRIER_P (insn))
2460 {
2461 if (PREV_INSN (insn))
2462 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2463 else
2464 bb->il.rtl->footer = NEXT_INSN (insn);
2465 if (NEXT_INSN (insn))
2466 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2467 }
2468 if (LABEL_P (insn))
2469 break;
2470 insn = NEXT_INSN (insn);
2471 }
2472 if (bb->il.rtl->footer)
2473 {
2474 insn = BB_END (bb);
2475 NEXT_INSN (insn) = bb->il.rtl->footer;
2476 PREV_INSN (bb->il.rtl->footer) = insn;
2477 while (NEXT_INSN (insn))
2478 insn = NEXT_INSN (insn);
2479 NEXT_INSN (insn) = next;
2480 if (next)
2481 PREV_INSN (next) = insn;
2482 else
2483 set_last_insn (insn);
2484 }
2485 }
2486 if (bb->next_bb != EXIT_BLOCK_PTR)
2487 to = &bb->next_bb->il.rtl->header;
2488 else
2489 to = &cfg_layout_function_footer;
2490
2491 rtl_delete_block (bb);
2492
2493 if (prev)
2494 prev = NEXT_INSN (prev);
2495 else
2496 prev = get_insns ();
2497 if (next)
2498 next = PREV_INSN (next);
2499 else
2500 next = get_last_insn ();
2501
2502 if (next && NEXT_INSN (next) != prev)
2503 {
2504 remaints = unlink_insn_chain (prev, next);
2505 insn = remaints;
2506 while (NEXT_INSN (insn))
2507 insn = NEXT_INSN (insn);
2508 NEXT_INSN (insn) = *to;
2509 if (*to)
2510 PREV_INSN (*to) = insn;
2511 *to = remaints;
2512 }
2513 }
2514
2515 /* Return true when blocks A and B can be safely merged. */
2516 static bool
2517 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2518 {
2519 /* If we are partitioning hot/cold basic blocks, we don't want to
2520 mess up unconditional or indirect jumps that cross between hot
2521 and cold sections.
2522
2523 Basic block partitioning may result in some jumps that appear to
2524 be optimizable (or blocks that appear to be mergeable), but which really
2525 must be left untouched (they are required to make it safely across
2526 partition boundaries). See the comments at the top of
2527 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2528
2529 if (BB_PARTITION (a) != BB_PARTITION (b))
2530 return false;
2531
2532 /* There must be exactly one edge in between the blocks. */
2533 return (single_succ_p (a)
2534 && single_succ (a) == b
2535 && single_pred_p (b) == 1
2536 && a != b
2537 /* Must be simple edge. */
2538 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2539 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2540 /* If the jump insn has side effects,
2541 we can't kill the edge. */
2542 && (!JUMP_P (BB_END (a))
2543 || (reload_completed
2544 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2545 }
2546
2547 /* Merge block A and B. The blocks must be mergeable. */
2548
2549 static void
2550 cfg_layout_merge_blocks (basic_block a, basic_block b)
2551 {
2552 #ifdef ENABLE_CHECKING
2553 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2554 #endif
2555
2556 /* If there was a CODE_LABEL beginning B, delete it. */
2557 if (LABEL_P (BB_HEAD (b)))
2558 {
2559 /* This might have been an EH label that no longer has incoming
2560 EH edges. Update data structures to match. */
2561 maybe_remove_eh_handler (BB_HEAD (b));
2562
2563 delete_insn (BB_HEAD (b));
2564 }
2565
2566 /* We should have fallthru edge in a, or we can do dummy redirection to get
2567 it cleaned up. */
2568 if (JUMP_P (BB_END (a)))
2569 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2570 gcc_assert (!JUMP_P (BB_END (a)));
2571
2572 /* Possible line number notes should appear in between. */
2573 if (b->il.rtl->header)
2574 {
2575 rtx first = BB_END (a), last;
2576
2577 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
2578 delete_insn_chain (NEXT_INSN (first), last);
2579 b->il.rtl->header = NULL;
2580 }
2581
2582 /* In the case basic blocks are not adjacent, move them around. */
2583 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2584 {
2585 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2586
2587 emit_insn_after_noloc (first, BB_END (a));
2588 /* Skip possible DELETED_LABEL insn. */
2589 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2590 first = NEXT_INSN (first);
2591 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2592 BB_HEAD (b) = NULL;
2593 delete_insn (first);
2594 }
2595 /* Otherwise just re-associate the instructions. */
2596 else
2597 {
2598 rtx insn;
2599
2600 for (insn = BB_HEAD (b);
2601 insn != NEXT_INSN (BB_END (b));
2602 insn = NEXT_INSN (insn))
2603 set_block_for_insn (insn, a);
2604 insn = BB_HEAD (b);
2605 /* Skip possible DELETED_LABEL insn. */
2606 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2607 insn = NEXT_INSN (insn);
2608 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2609 BB_HEAD (b) = NULL;
2610 BB_END (a) = BB_END (b);
2611 delete_insn (insn);
2612 }
2613
2614 /* Possible tablejumps and barriers should appear after the block. */
2615 if (b->il.rtl->footer)
2616 {
2617 if (!a->il.rtl->footer)
2618 a->il.rtl->footer = b->il.rtl->footer;
2619 else
2620 {
2621 rtx last = a->il.rtl->footer;
2622
2623 while (NEXT_INSN (last))
2624 last = NEXT_INSN (last);
2625 NEXT_INSN (last) = b->il.rtl->footer;
2626 PREV_INSN (b->il.rtl->footer) = last;
2627 }
2628 b->il.rtl->footer = NULL;
2629 }
2630 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
2631
2632 if (dump_file)
2633 fprintf (dump_file, "Merged blocks %d and %d.\n",
2634 a->index, b->index);
2635 }
2636
2637 /* Split edge E. */
2638
2639 static basic_block
2640 cfg_layout_split_edge (edge e)
2641 {
2642 basic_block new_bb =
2643 create_basic_block (e->src != ENTRY_BLOCK_PTR
2644 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2645 NULL_RTX, e->src);
2646
2647 /* ??? This info is likely going to be out of date very soon, but we must
2648 create it to avoid getting an ICE later. */
2649 if (e->dest->il.rtl->global_live_at_start)
2650 {
2651 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
2652 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
2653 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
2654 e->dest->il.rtl->global_live_at_start);
2655 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
2656 e->dest->il.rtl->global_live_at_start);
2657 }
2658
2659 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2660 redirect_edge_and_branch_force (e, new_bb);
2661
2662 return new_bb;
2663 }
2664
2665 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2666
2667 static void
2668 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2669 {
2670 }
2671
2672 /* Return 1 if BB ends with a call, possibly followed by some
2673 instructions that must stay with the call, 0 otherwise. */
2674
2675 static bool
2676 rtl_block_ends_with_call_p (basic_block bb)
2677 {
2678 rtx insn = BB_END (bb);
2679
2680 while (!CALL_P (insn)
2681 && insn != BB_HEAD (bb)
2682 && keep_with_call_p (insn))
2683 insn = PREV_INSN (insn);
2684 return (CALL_P (insn));
2685 }
2686
2687 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2688
2689 static bool
2690 rtl_block_ends_with_condjump_p (basic_block bb)
2691 {
2692 return any_condjump_p (BB_END (bb));
2693 }
2694
2695 /* Return true if we need to add fake edge to exit.
2696 Helper function for rtl_flow_call_edges_add. */
2697
2698 static bool
2699 need_fake_edge_p (rtx insn)
2700 {
2701 if (!INSN_P (insn))
2702 return false;
2703
2704 if ((CALL_P (insn)
2705 && !SIBLING_CALL_P (insn)
2706 && !find_reg_note (insn, REG_NORETURN, NULL)
2707 && !CONST_OR_PURE_CALL_P (insn)))
2708 return true;
2709
2710 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2711 && MEM_VOLATILE_P (PATTERN (insn)))
2712 || (GET_CODE (PATTERN (insn)) == PARALLEL
2713 && asm_noperands (insn) != -1
2714 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2715 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2716 }
2717
2718 /* Add fake edges to the function exit for any non constant and non noreturn
2719 calls, volatile inline assembly in the bitmap of blocks specified by
2720 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2721 that were split.
2722
2723 The goal is to expose cases in which entering a basic block does not imply
2724 that all subsequent instructions must be executed. */
2725
2726 static int
2727 rtl_flow_call_edges_add (sbitmap blocks)
2728 {
2729 int i;
2730 int blocks_split = 0;
2731 int last_bb = last_basic_block;
2732 bool check_last_block = false;
2733
2734 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2735 return 0;
2736
2737 if (! blocks)
2738 check_last_block = true;
2739 else
2740 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2741
2742 /* In the last basic block, before epilogue generation, there will be
2743 a fallthru edge to EXIT. Special care is required if the last insn
2744 of the last basic block is a call because make_edge folds duplicate
2745 edges, which would result in the fallthru edge also being marked
2746 fake, which would result in the fallthru edge being removed by
2747 remove_fake_edges, which would result in an invalid CFG.
2748
2749 Moreover, we can't elide the outgoing fake edge, since the block
2750 profiler needs to take this into account in order to solve the minimal
2751 spanning tree in the case that the call doesn't return.
2752
2753 Handle this by adding a dummy instruction in a new last basic block. */
2754 if (check_last_block)
2755 {
2756 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2757 rtx insn = BB_END (bb);
2758
2759 /* Back up past insns that must be kept in the same block as a call. */
2760 while (insn != BB_HEAD (bb)
2761 && keep_with_call_p (insn))
2762 insn = PREV_INSN (insn);
2763
2764 if (need_fake_edge_p (insn))
2765 {
2766 edge e;
2767
2768 e = find_edge (bb, EXIT_BLOCK_PTR);
2769 if (e)
2770 {
2771 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2772 commit_edge_insertions ();
2773 }
2774 }
2775 }
2776
2777 /* Now add fake edges to the function exit for any non constant
2778 calls since there is no way that we can determine if they will
2779 return or not... */
2780
2781 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2782 {
2783 basic_block bb = BASIC_BLOCK (i);
2784 rtx insn;
2785 rtx prev_insn;
2786
2787 if (!bb)
2788 continue;
2789
2790 if (blocks && !TEST_BIT (blocks, i))
2791 continue;
2792
2793 for (insn = BB_END (bb); ; insn = prev_insn)
2794 {
2795 prev_insn = PREV_INSN (insn);
2796 if (need_fake_edge_p (insn))
2797 {
2798 edge e;
2799 rtx split_at_insn = insn;
2800
2801 /* Don't split the block between a call and an insn that should
2802 remain in the same block as the call. */
2803 if (CALL_P (insn))
2804 while (split_at_insn != BB_END (bb)
2805 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2806 split_at_insn = NEXT_INSN (split_at_insn);
2807
2808 /* The handling above of the final block before the epilogue
2809 should be enough to verify that there is no edge to the exit
2810 block in CFG already. Calling make_edge in such case would
2811 cause us to mark that edge as fake and remove it later. */
2812
2813 #ifdef ENABLE_CHECKING
2814 if (split_at_insn == BB_END (bb))
2815 {
2816 e = find_edge (bb, EXIT_BLOCK_PTR);
2817 gcc_assert (e == NULL);
2818 }
2819 #endif
2820
2821 /* Note that the following may create a new basic block
2822 and renumber the existing basic blocks. */
2823 if (split_at_insn != BB_END (bb))
2824 {
2825 e = split_block (bb, split_at_insn);
2826 if (e)
2827 blocks_split++;
2828 }
2829
2830 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2831 }
2832
2833 if (insn == BB_HEAD (bb))
2834 break;
2835 }
2836 }
2837
2838 if (blocks_split)
2839 verify_flow_info ();
2840
2841 return blocks_split;
2842 }
2843
2844 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2845 the conditional branch target, SECOND_HEAD should be the fall-thru
2846 there is no need to handle this here the loop versioning code handles
2847 this. the reason for SECON_HEAD is that it is needed for condition
2848 in trees, and this should be of the same type since it is a hook. */
2849 static void
2850 rtl_lv_add_condition_to_bb (basic_block first_head ,
2851 basic_block second_head ATTRIBUTE_UNUSED,
2852 basic_block cond_bb, void *comp_rtx)
2853 {
2854 rtx label, seq, jump;
2855 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2856 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2857 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2858 enum machine_mode mode;
2859
2860
2861 label = block_label (first_head);
2862 mode = GET_MODE (op0);
2863 if (mode == VOIDmode)
2864 mode = GET_MODE (op1);
2865
2866 start_sequence ();
2867 op0 = force_operand (op0, NULL_RTX);
2868 op1 = force_operand (op1, NULL_RTX);
2869 do_compare_rtx_and_jump (op0, op1, comp, 0,
2870 mode, NULL_RTX, NULL_RTX, label);
2871 jump = get_last_insn ();
2872 JUMP_LABEL (jump) = label;
2873 LABEL_NUSES (label)++;
2874 seq = get_insns ();
2875 end_sequence ();
2876
2877 /* Add the new cond , in the new head. */
2878 emit_insn_after(seq, BB_END(cond_bb));
2879 }
2880
2881
2882 /* Given a block B with unconditional branch at its end, get the
2883 store the return the branch edge and the fall-thru edge in
2884 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2885 static void
2886 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2887 edge *fallthru_edge)
2888 {
2889 edge e = EDGE_SUCC (b, 0);
2890
2891 if (e->flags & EDGE_FALLTHRU)
2892 {
2893 *fallthru_edge = e;
2894 *branch_edge = EDGE_SUCC (b, 1);
2895 }
2896 else
2897 {
2898 *branch_edge = e;
2899 *fallthru_edge = EDGE_SUCC (b, 1);
2900 }
2901 }
2902
2903 void
2904 init_rtl_bb_info (basic_block bb)
2905 {
2906 gcc_assert (!bb->il.rtl);
2907 bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info));
2908 }
2909
2910
2911 /* Add EXPR to the end of basic block BB. */
2912
2913 rtx
2914 insert_insn_end_bb_new (rtx pat, basic_block bb)
2915 {
2916 rtx insn = BB_END (bb);
2917 rtx new_insn;
2918 rtx pat_end = pat;
2919
2920 while (NEXT_INSN (pat_end) != NULL_RTX)
2921 pat_end = NEXT_INSN (pat_end);
2922
2923 /* If the last insn is a jump, insert EXPR in front [taking care to
2924 handle cc0, etc. properly]. Similarly we need to care trapping
2925 instructions in presence of non-call exceptions. */
2926
2927 if (JUMP_P (insn)
2928 || (NONJUMP_INSN_P (insn)
2929 && (!single_succ_p (bb)
2930 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2931 {
2932 #ifdef HAVE_cc0
2933 rtx note;
2934 #endif
2935 /* If this is a jump table, then we can't insert stuff here. Since
2936 we know the previous real insn must be the tablejump, we insert
2937 the new instruction just before the tablejump. */
2938 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2939 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2940 insn = prev_real_insn (insn);
2941
2942 #ifdef HAVE_cc0
2943 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2944 if cc0 isn't set. */
2945 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2946 if (note)
2947 insn = XEXP (note, 0);
2948 else
2949 {
2950 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2951 if (maybe_cc0_setter
2952 && INSN_P (maybe_cc0_setter)
2953 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2954 insn = maybe_cc0_setter;
2955 }
2956 #endif
2957 /* FIXME: What if something in cc0/jump uses value set in new
2958 insn? */
2959 new_insn = emit_insn_before_noloc (pat, insn);
2960 }
2961
2962 /* Likewise if the last insn is a call, as will happen in the presence
2963 of exception handling. */
2964 else if (CALL_P (insn)
2965 && (!single_succ_p (bb)
2966 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2967 {
2968 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2969 we search backward and place the instructions before the first
2970 parameter is loaded. Do this for everyone for consistency and a
2971 presumption that we'll get better code elsewhere as well. */
2972
2973 /* Since different machines initialize their parameter registers
2974 in different orders, assume nothing. Collect the set of all
2975 parameter registers. */
2976 insn = find_first_parameter_load (insn, BB_HEAD (bb));
2977
2978 /* If we found all the parameter loads, then we want to insert
2979 before the first parameter load.
2980
2981 If we did not find all the parameter loads, then we might have
2982 stopped on the head of the block, which could be a CODE_LABEL.
2983 If we inserted before the CODE_LABEL, then we would be putting
2984 the insn in the wrong basic block. In that case, put the insn
2985 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
2986 while (LABEL_P (insn)
2987 || NOTE_INSN_BASIC_BLOCK_P (insn))
2988 insn = NEXT_INSN (insn);
2989
2990 new_insn = emit_insn_before_noloc (pat, insn);
2991 }
2992 else
2993 new_insn = emit_insn_after_noloc (pat, insn);
2994
2995 return new_insn;
2996 }
2997
2998 /* Implementation of CFG manipulation for linearized RTL. */
2999 struct cfg_hooks rtl_cfg_hooks = {
3000 "rtl",
3001 rtl_verify_flow_info,
3002 rtl_dump_bb,
3003 rtl_create_basic_block,
3004 rtl_redirect_edge_and_branch,
3005 rtl_redirect_edge_and_branch_force,
3006 rtl_delete_block,
3007 rtl_split_block,
3008 rtl_move_block_after,
3009 rtl_can_merge_blocks, /* can_merge_blocks_p */
3010 rtl_merge_blocks,
3011 rtl_predict_edge,
3012 rtl_predicted_by_p,
3013 NULL, /* can_duplicate_block_p */
3014 NULL, /* duplicate_block */
3015 rtl_split_edge,
3016 rtl_make_forwarder_block,
3017 rtl_tidy_fallthru_edge,
3018 rtl_block_ends_with_call_p,
3019 rtl_block_ends_with_condjump_p,
3020 rtl_flow_call_edges_add,
3021 NULL, /* execute_on_growing_pred */
3022 NULL, /* execute_on_shrinking_pred */
3023 NULL, /* duplicate loop for trees */
3024 NULL, /* lv_add_condition_to_bb */
3025 NULL, /* lv_adjust_loop_header_phi*/
3026 NULL, /* extract_cond_bb_edges */
3027 NULL /* flush_pending_stmts */
3028 };
3029
3030 /* Implementation of CFG manipulation for cfg layout RTL, where
3031 basic block connected via fallthru edges does not have to be adjacent.
3032 This representation will hopefully become the default one in future
3033 version of the compiler. */
3034
3035 /* We do not want to declare these functions in a header file, since they
3036 should only be used through the cfghooks interface, and we do not want to
3037 move them here since it would require also moving quite a lot of related
3038 code. */
3039 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3040 extern basic_block cfg_layout_duplicate_bb (basic_block);
3041
3042 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3043 "cfglayout mode",
3044 rtl_verify_flow_info_1,
3045 rtl_dump_bb,
3046 cfg_layout_create_basic_block,
3047 cfg_layout_redirect_edge_and_branch,
3048 cfg_layout_redirect_edge_and_branch_force,
3049 cfg_layout_delete_block,
3050 cfg_layout_split_block,
3051 rtl_move_block_after,
3052 cfg_layout_can_merge_blocks_p,
3053 cfg_layout_merge_blocks,
3054 rtl_predict_edge,
3055 rtl_predicted_by_p,
3056 cfg_layout_can_duplicate_bb_p,
3057 cfg_layout_duplicate_bb,
3058 cfg_layout_split_edge,
3059 rtl_make_forwarder_block,
3060 NULL,
3061 rtl_block_ends_with_call_p,
3062 rtl_block_ends_with_condjump_p,
3063 rtl_flow_call_edges_add,
3064 NULL, /* execute_on_growing_pred */
3065 NULL, /* execute_on_shrinking_pred */
3066 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3067 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3068 NULL, /* lv_adjust_loop_header_phi*/
3069 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3070 NULL /* flush_pending_stmts */
3071 };