physmem.c (physmem_total): Use getsysinfo on Tru64 UNIX.
[gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
30 merge_blocks_nomove
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and committing to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
41 verify_flow_info
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
44 \f
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "tree.h"
50 #include "rtl.h"
51 #include "hard-reg-set.h"
52 #include "basic-block.h"
53 #include "regs.h"
54 #include "flags.h"
55 #include "output.h"
56 #include "function.h"
57 #include "except.h"
58 #include "toplev.h"
59 #include "tm_p.h"
60 #include "obstack.h"
61 #include "insn-config.h"
62
63 /* Stubs in case we don't have a return insn. */
64 #ifndef HAVE_return
65 #define HAVE_return 0
66 #define gen_return() NULL_RTX
67 #endif
68
69 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
70 /* ??? Should probably be using LABEL_NUSES instead. It would take a
71 bit of surgery to be able to use or co-opt the routines in jump. */
72 rtx label_value_list;
73 rtx tail_recursion_label_list;
74
75 static int can_delete_note_p PARAMS ((rtx));
76 static int can_delete_label_p PARAMS ((rtx));
77 static void commit_one_edge_insertion PARAMS ((edge, int));
78 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
79 static rtx last_loop_beg_note PARAMS ((rtx));
80 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
81 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
82 \f
83 /* Return true if NOTE is not one of the ones that must be kept paired,
84 so that we may simply delete it. */
85
86 static int
87 can_delete_note_p (note)
88 rtx note;
89 {
90 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
91 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
92 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
93 }
94
95 /* True if a given label can be deleted. */
96
97 static int
98 can_delete_label_p (label)
99 rtx label;
100 {
101 return (!LABEL_PRESERVE_P (label)
102 /* User declared labels must be preserved. */
103 && LABEL_NAME (label) == 0
104 && !in_expr_list_p (forced_labels, label)
105 && !in_expr_list_p (label_value_list, label));
106 }
107
108 /* Delete INSN by patching it out. Return the next insn. */
109
110 rtx
111 delete_insn (insn)
112 rtx insn;
113 {
114 rtx next = NEXT_INSN (insn);
115 rtx note;
116 bool really_delete = true;
117
118 if (GET_CODE (insn) == CODE_LABEL)
119 {
120 /* Some labels can't be directly removed from the INSN chain, as they
121 might be references via variables, constant pool etc.
122 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
123 if (! can_delete_label_p (insn))
124 {
125 const char *name = LABEL_NAME (insn);
126
127 really_delete = false;
128 PUT_CODE (insn, NOTE);
129 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
130 NOTE_SOURCE_FILE (insn) = name;
131 }
132
133 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
134 }
135
136 if (really_delete)
137 {
138 /* If this insn has already been deleted, something is very wrong. */
139 if (INSN_DELETED_P (insn))
140 abort ();
141 remove_insn (insn);
142 INSN_DELETED_P (insn) = 1;
143 }
144
145 /* If deleting a jump, decrement the use count of the label. Deleting
146 the label itself should happen in the normal course of block merging. */
147 if (GET_CODE (insn) == JUMP_INSN
148 && JUMP_LABEL (insn)
149 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
150 LABEL_NUSES (JUMP_LABEL (insn))--;
151
152 /* Also if deleting an insn that references a label. */
153 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
154 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
155 LABEL_NUSES (XEXP (note, 0))--;
156
157 if (GET_CODE (insn) == JUMP_INSN
158 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
159 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
160 {
161 rtx pat = PATTERN (insn);
162 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
163 int len = XVECLEN (pat, diff_vec_p);
164 int i;
165
166 for (i = 0; i < len; i++)
167 {
168 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
169
170 /* When deleting code in bulk (e.g. removing many unreachable
171 blocks) we can delete a label that's a target of the vector
172 before deleting the vector itself. */
173 if (GET_CODE (label) != NOTE)
174 LABEL_NUSES (label)--;
175 }
176 }
177
178 return next;
179 }
180
181 /* Like delete_insn but also purge dead edges from BB. */
182 rtx
183 delete_insn_and_edges (insn)
184 rtx insn;
185 {
186 rtx x;
187 bool purge = false;
188
189 if (INSN_P (insn)
190 && BLOCK_FOR_INSN (insn)
191 && BLOCK_FOR_INSN (insn)->end == insn)
192 purge = true;
193 x = delete_insn (insn);
194 if (purge)
195 purge_dead_edges (BLOCK_FOR_INSN (insn));
196 return x;
197 }
198
199 /* Unlink a chain of insns between START and FINISH, leaving notes
200 that must be paired. */
201
202 void
203 delete_insn_chain (start, finish)
204 rtx start, finish;
205 {
206 rtx next;
207
208 /* Unchain the insns one by one. It would be quicker to delete all of these
209 with a single unchaining, rather than one at a time, but we need to keep
210 the NOTE's. */
211 while (1)
212 {
213 next = NEXT_INSN (start);
214 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
215 ;
216 else
217 next = delete_insn (start);
218
219 if (start == finish)
220 break;
221 start = next;
222 }
223 }
224
225 /* Like delete_insn but also purge dead edges from BB. */
226 void
227 delete_insn_chain_and_edges (first, last)
228 rtx first, last;
229 {
230 bool purge = false;
231
232 if (INSN_P (last)
233 && BLOCK_FOR_INSN (last)
234 && BLOCK_FOR_INSN (last)->end == last)
235 purge = true;
236 delete_insn_chain (first, last);
237 if (purge)
238 purge_dead_edges (BLOCK_FOR_INSN (last));
239 }
240 \f
241 /* Create a new basic block consisting of the instructions between HEAD and END
242 inclusive. This function is designed to allow fast BB construction - reuses
243 the note and basic block struct in BB_NOTE, if any and do not grow
244 BASIC_BLOCK chain and should be used directly only by CFG construction code.
245 END can be NULL in to create new empty basic block before HEAD. Both END
246 and HEAD can be NULL to create basic block at the end of INSN chain.
247 AFTER is the basic block we should be put after. */
248
249 basic_block
250 create_basic_block_structure (head, end, bb_note, after)
251 rtx head, end, bb_note;
252 basic_block after;
253 {
254 basic_block bb;
255
256 if (bb_note
257 && ! RTX_INTEGRATED_P (bb_note)
258 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 && bb->aux == NULL)
260 {
261 /* If we found an existing note, thread it back onto the chain. */
262
263 rtx after;
264
265 if (GET_CODE (head) == CODE_LABEL)
266 after = head;
267 else
268 {
269 after = PREV_INSN (head);
270 head = bb_note;
271 }
272
273 if (after != bb_note && NEXT_INSN (after) != bb_note)
274 reorder_insns_nobb (bb_note, bb_note, after);
275 }
276 else
277 {
278 /* Otherwise we must create a note and a basic block structure. */
279
280 bb = alloc_block ();
281
282 if (!head && !end)
283 head = end = bb_note
284 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
285 else if (GET_CODE (head) == CODE_LABEL && end)
286 {
287 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
288 if (head == end)
289 end = bb_note;
290 }
291 else
292 {
293 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
294 head = bb_note;
295 if (!end)
296 end = head;
297 }
298
299 NOTE_BASIC_BLOCK (bb_note) = bb;
300 }
301
302 /* Always include the bb note in the block. */
303 if (NEXT_INSN (end) == bb_note)
304 end = bb_note;
305
306 bb->head = head;
307 bb->end = end;
308 bb->index = last_basic_block++;
309 bb->flags = BB_NEW;
310 link_block (bb, after);
311 BASIC_BLOCK (bb->index) = bb;
312 update_bb_for_insn (bb);
313
314 /* Tag the block so that we know it has been used when considering
315 other basic block notes. */
316 bb->aux = bb;
317
318 return bb;
319 }
320
321 /* Create new basic block consisting of instructions in between HEAD and END
322 and place it to the BB chain after block AFTER. END can be NULL in to
323 create new empty basic block before HEAD. Both END and HEAD can be NULL to
324 create basic block at the end of INSN chain. */
325
326 basic_block
327 create_basic_block (head, end, after)
328 rtx head, end;
329 basic_block after;
330 {
331 basic_block bb;
332
333 /* Place the new block just after the end. */
334 VARRAY_GROW (basic_block_info, last_basic_block+1);
335
336 n_basic_blocks++;
337
338 bb = create_basic_block_structure (head, end, NULL, after);
339 bb->aux = NULL;
340 return bb;
341 }
342 \f
343 /* Delete the insns in a (non-live) block. We physically delete every
344 non-deleted-note insn, and update the flow graph appropriately.
345
346 Return nonzero if we deleted an exception handler. */
347
348 /* ??? Preserving all such notes strikes me as wrong. It would be nice
349 to post-process the stream to remove empty blocks, loops, ranges, etc. */
350
351 int
352 flow_delete_block_noexpunge (b)
353 basic_block b;
354 {
355 int deleted_handler = 0;
356 rtx insn, end, tmp;
357
358 /* If the head of this block is a CODE_LABEL, then it might be the
359 label for an exception handler which can't be reached.
360
361 We need to remove the label from the exception_handler_label list
362 and remove the associated NOTE_INSN_EH_REGION_BEG and
363 NOTE_INSN_EH_REGION_END notes. */
364
365 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
366 hanging before the block. */
367
368 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
369 {
370 if (GET_CODE (insn) != NOTE)
371 break;
372 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
373 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
374 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
375 }
376
377 insn = b->head;
378
379 never_reached_warning (insn, b->end);
380
381 if (GET_CODE (insn) == CODE_LABEL)
382 maybe_remove_eh_handler (insn);
383
384 /* Include any jump table following the basic block. */
385 end = b->end;
386 if (GET_CODE (end) == JUMP_INSN
387 && (tmp = JUMP_LABEL (end)) != NULL_RTX
388 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
389 && GET_CODE (tmp) == JUMP_INSN
390 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
391 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
392 end = tmp;
393
394 /* Include any barrier that may follow the basic block. */
395 tmp = next_nonnote_insn (end);
396 if (tmp && GET_CODE (tmp) == BARRIER)
397 end = tmp;
398
399 /* Selectively delete the entire chain. */
400 b->head = NULL;
401 delete_insn_chain (insn, end);
402
403 /* Remove the edges into and out of this block. Note that there may
404 indeed be edges in, if we are removing an unreachable loop. */
405 while (b->pred != NULL)
406 remove_edge (b->pred);
407 while (b->succ != NULL)
408 remove_edge (b->succ);
409
410 b->pred = NULL;
411 b->succ = NULL;
412
413 return deleted_handler;
414 }
415
416 int
417 flow_delete_block (b)
418 basic_block b;
419 {
420 int deleted_handler = flow_delete_block_noexpunge (b);
421
422 /* Remove the basic block from the array. */
423 expunge_block (b);
424
425 return deleted_handler;
426 }
427 \f
428 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
429
430 void
431 compute_bb_for_insn ()
432 {
433 basic_block bb;
434
435 FOR_EACH_BB (bb)
436 {
437 rtx end = bb->end;
438 rtx insn;
439
440 for (insn = bb->head; ; insn = NEXT_INSN (insn))
441 {
442 BLOCK_FOR_INSN (insn) = bb;
443 if (insn == end)
444 break;
445 }
446 }
447 }
448
449 /* Release the basic_block_for_insn array. */
450
451 void
452 free_bb_for_insn ()
453 {
454 rtx insn;
455 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
456 if (GET_CODE (insn) != BARRIER)
457 BLOCK_FOR_INSN (insn) = NULL;
458 }
459
460 /* Update insns block within BB. */
461
462 void
463 update_bb_for_insn (bb)
464 basic_block bb;
465 {
466 rtx insn;
467
468 for (insn = bb->head; ; insn = NEXT_INSN (insn))
469 {
470 set_block_for_insn (insn, bb);
471 if (insn == bb->end)
472 break;
473 }
474 }
475 \f
476 /* Split a block BB after insn INSN creating a new fallthru edge.
477 Return the new edge. Note that to keep other parts of the compiler happy,
478 this function renumbers all the basic blocks so that the new
479 one has a number one greater than the block split. */
480
481 edge
482 split_block (bb, insn)
483 basic_block bb;
484 rtx insn;
485 {
486 basic_block new_bb;
487 edge new_edge;
488 edge e;
489
490 /* There is no point splitting the block after its end. */
491 if (bb->end == insn)
492 return 0;
493
494 /* Create the new basic block. */
495 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
496 new_bb->count = bb->count;
497 new_bb->frequency = bb->frequency;
498 new_bb->loop_depth = bb->loop_depth;
499 bb->end = insn;
500
501 /* Redirect the outgoing edges. */
502 new_bb->succ = bb->succ;
503 bb->succ = NULL;
504 for (e = new_bb->succ; e; e = e->succ_next)
505 e->src = new_bb;
506
507 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
508
509 if (bb->global_live_at_start)
510 {
511 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
512 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
513 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
514
515 /* We now have to calculate which registers are live at the end
516 of the split basic block and at the start of the new basic
517 block. Start with those registers that are known to be live
518 at the end of the original basic block and get
519 propagate_block to determine which registers are live. */
520 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
521 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
522 COPY_REG_SET (bb->global_live_at_end,
523 new_bb->global_live_at_start);
524 #ifdef HAVE_conditional_execution
525 /* In the presence of conditional execution we are not able to update
526 liveness precisely. */
527 if (reload_completed)
528 {
529 bb->flags |= BB_DIRTY;
530 new_bb->flags |= BB_DIRTY;
531 }
532 #endif
533 }
534
535 return new_edge;
536 }
537
538 /* Blocks A and B are to be merged into a single block A. The insns
539 are already contiguous, hence `nomove'. */
540
541 void
542 merge_blocks_nomove (a, b)
543 basic_block a, b;
544 {
545 rtx b_head = b->head, b_end = b->end, a_end = a->end;
546 rtx del_first = NULL_RTX, del_last = NULL_RTX;
547 int b_empty = 0;
548 edge e;
549
550 /* If there was a CODE_LABEL beginning B, delete it. */
551 if (GET_CODE (b_head) == CODE_LABEL)
552 {
553 /* Detect basic blocks with nothing but a label. This can happen
554 in particular at the end of a function. */
555 if (b_head == b_end)
556 b_empty = 1;
557
558 del_first = del_last = b_head;
559 b_head = NEXT_INSN (b_head);
560 }
561
562 /* Delete the basic block note and handle blocks containing just that
563 note. */
564 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
565 {
566 if (b_head == b_end)
567 b_empty = 1;
568 if (! del_last)
569 del_first = b_head;
570
571 del_last = b_head;
572 b_head = NEXT_INSN (b_head);
573 }
574
575 /* If there was a jump out of A, delete it. */
576 if (GET_CODE (a_end) == JUMP_INSN)
577 {
578 rtx prev;
579
580 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
581 if (GET_CODE (prev) != NOTE
582 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
583 || prev == a->head)
584 break;
585
586 del_first = a_end;
587
588 #ifdef HAVE_cc0
589 /* If this was a conditional jump, we need to also delete
590 the insn that set cc0. */
591 if (only_sets_cc0_p (prev))
592 {
593 rtx tmp = prev;
594
595 prev = prev_nonnote_insn (prev);
596 if (!prev)
597 prev = a->head;
598 del_first = tmp;
599 }
600 #endif
601
602 a_end = PREV_INSN (del_first);
603 }
604 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
605 del_first = NEXT_INSN (a_end);
606
607 /* Normally there should only be one successor of A and that is B, but
608 partway though the merge of blocks for conditional_execution we'll
609 be merging a TEST block with THEN and ELSE successors. Free the
610 whole lot of them and hope the caller knows what they're doing. */
611 while (a->succ)
612 remove_edge (a->succ);
613
614 /* Adjust the edges out of B for the new owner. */
615 for (e = b->succ; e; e = e->succ_next)
616 e->src = a;
617 a->succ = b->succ;
618 a->flags |= b->flags;
619
620 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
621 b->pred = b->succ = NULL;
622 a->global_live_at_end = b->global_live_at_end;
623
624 expunge_block (b);
625
626 /* Delete everything marked above as well as crap that might be
627 hanging out between the two blocks. */
628 delete_insn_chain (del_first, del_last);
629
630 /* Reassociate the insns of B with A. */
631 if (!b_empty)
632 {
633 rtx x;
634
635 for (x = a_end; x != b_end; x = NEXT_INSN (x))
636 set_block_for_insn (x, a);
637
638 set_block_for_insn (b_end, a);
639
640 a_end = b_end;
641 }
642
643 a->end = a_end;
644 }
645 \f
646 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
647 exist. */
648
649 rtx
650 block_label (block)
651 basic_block block;
652 {
653 if (block == EXIT_BLOCK_PTR)
654 return NULL_RTX;
655
656 if (GET_CODE (block->head) != CODE_LABEL)
657 {
658 block->head = emit_label_before (gen_label_rtx (), block->head);
659 }
660
661 return block->head;
662 }
663
664 /* Attempt to perform edge redirection by replacing possibly complex jump
665 instruction by unconditional jump or removing jump completely. This can
666 apply only if all edges now point to the same block. The parameters and
667 return values are equivalent to redirect_edge_and_branch. */
668
669 static bool
670 try_redirect_by_replacing_jump (e, target)
671 edge e;
672 basic_block target;
673 {
674 basic_block src = e->src;
675 rtx insn = src->end, kill_from;
676 edge tmp;
677 rtx set, table;
678 int fallthru = 0;
679
680 /* Verify that all targets will be TARGET. */
681 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
682 if (tmp->dest != target && tmp != e)
683 break;
684
685 if (tmp || !onlyjump_p (insn))
686 return false;
687 if (reload_completed && JUMP_LABEL (insn)
688 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
689 && GET_CODE (table) == JUMP_INSN
690 && (GET_CODE (PATTERN (table)) == ADDR_VEC
691 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
692 return false;
693
694 /* Avoid removing branch with side effects. */
695 set = single_set (insn);
696 if (!set || side_effects_p (set))
697 return false;
698
699 /* In case we zap a conditional jump, we'll need to kill
700 the cc0 setter too. */
701 kill_from = insn;
702 #ifdef HAVE_cc0
703 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
704 kill_from = PREV_INSN (insn);
705 #endif
706
707 /* See if we can create the fallthru edge. */
708 if (can_fallthru (src, target))
709 {
710 if (rtl_dump_file)
711 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
712 fallthru = 1;
713
714 /* Selectively unlink whole insn chain. */
715 delete_insn_chain (kill_from, PREV_INSN (target->head));
716 }
717
718 /* If this already is simplejump, redirect it. */
719 else if (simplejump_p (insn))
720 {
721 if (e->dest == target)
722 return false;
723 if (rtl_dump_file)
724 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
725 INSN_UID (insn), e->dest->index, target->index);
726 if (!redirect_jump (insn, block_label (target), 0))
727 {
728 if (target == EXIT_BLOCK_PTR)
729 return false;
730 abort ();
731 }
732 }
733
734 /* Cannot do anything for target exit block. */
735 else if (target == EXIT_BLOCK_PTR)
736 return false;
737
738 /* Or replace possibly complicated jump insn by simple jump insn. */
739 else
740 {
741 rtx target_label = block_label (target);
742 rtx barrier, tmp;
743
744 emit_jump_insn_after (gen_jump (target_label), insn);
745 JUMP_LABEL (src->end) = target_label;
746 LABEL_NUSES (target_label)++;
747 if (rtl_dump_file)
748 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
749 INSN_UID (insn), INSN_UID (src->end));
750
751
752 delete_insn_chain (kill_from, insn);
753
754 /* Recognize a tablejump that we are converting to a
755 simple jump and remove its associated CODE_LABEL
756 and ADDR_VEC or ADDR_DIFF_VEC. */
757 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
758 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
759 && GET_CODE (tmp) == JUMP_INSN
760 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
761 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
762 {
763 delete_insn_chain (JUMP_LABEL (insn), tmp);
764 }
765
766 barrier = next_nonnote_insn (src->end);
767 if (!barrier || GET_CODE (barrier) != BARRIER)
768 emit_barrier_after (src->end);
769 }
770
771 /* Keep only one edge out and set proper flags. */
772 while (src->succ->succ_next)
773 remove_edge (src->succ);
774 e = src->succ;
775 if (fallthru)
776 e->flags = EDGE_FALLTHRU;
777 else
778 e->flags = 0;
779
780 e->probability = REG_BR_PROB_BASE;
781 e->count = src->count;
782
783 /* We don't want a block to end on a line-number note since that has
784 the potential of changing the code between -g and not -g. */
785 while (GET_CODE (e->src->end) == NOTE
786 && NOTE_LINE_NUMBER (e->src->end) >= 0)
787 delete_insn (e->src->end);
788
789 if (e->dest != target)
790 redirect_edge_succ (e, target);
791
792 return true;
793 }
794
795 /* Return last loop_beg note appearing after INSN, before start of next
796 basic block. Return INSN if there are no such notes.
797
798 When emitting jump to redirect a fallthru edge, it should always appear
799 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
800 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
801 test. */
802
803 static rtx
804 last_loop_beg_note (insn)
805 rtx insn;
806 {
807 rtx last = insn;
808
809 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
810 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
811 insn = NEXT_INSN (insn))
812 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
813 last = insn;
814
815 return last;
816 }
817
818 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
819 expense of adding new instructions or reordering basic blocks.
820
821 Function can be also called with edge destination equivalent to the TARGET.
822 Then it should try the simplifications and do nothing if none is possible.
823
824 Return true if transformation succeeded. We still return false in case E
825 already destinated TARGET and we didn't managed to simplify instruction
826 stream. */
827
828 bool
829 redirect_edge_and_branch (e, target)
830 edge e;
831 basic_block target;
832 {
833 rtx tmp;
834 rtx old_label = e->dest->head;
835 basic_block src = e->src;
836 rtx insn = src->end;
837
838 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
839 return false;
840
841 if (try_redirect_by_replacing_jump (e, target))
842 return true;
843
844 /* Do this fast path late, as we want above code to simplify for cases
845 where called on single edge leaving basic block containing nontrivial
846 jump insn. */
847 else if (e->dest == target)
848 return false;
849
850 /* We can only redirect non-fallthru edges of jump insn. */
851 if (e->flags & EDGE_FALLTHRU)
852 return false;
853 else if (GET_CODE (insn) != JUMP_INSN)
854 return false;
855
856 /* Recognize a tablejump and adjust all matching cases. */
857 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
858 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
859 && GET_CODE (tmp) == JUMP_INSN
860 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
861 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
862 {
863 rtvec vec;
864 int j;
865 rtx new_label = block_label (target);
866
867 if (target == EXIT_BLOCK_PTR)
868 return false;
869 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
870 vec = XVEC (PATTERN (tmp), 0);
871 else
872 vec = XVEC (PATTERN (tmp), 1);
873
874 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
875 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
876 {
877 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
878 --LABEL_NUSES (old_label);
879 ++LABEL_NUSES (new_label);
880 }
881
882 /* Handle casesi dispatch insns */
883 if ((tmp = single_set (insn)) != NULL
884 && SET_DEST (tmp) == pc_rtx
885 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
886 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
887 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
888 {
889 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
890 new_label);
891 --LABEL_NUSES (old_label);
892 ++LABEL_NUSES (new_label);
893 }
894 }
895 else
896 {
897 /* ?? We may play the games with moving the named labels from
898 one basic block to the other in case only one computed_jump is
899 available. */
900 if (computed_jump_p (insn)
901 /* A return instruction can't be redirected. */
902 || returnjump_p (insn))
903 return false;
904
905 /* If the insn doesn't go where we think, we're confused. */
906 if (JUMP_LABEL (insn) != old_label)
907 abort ();
908
909 /* If the substitution doesn't succeed, die. This can happen
910 if the back end emitted unrecognizable instructions or if
911 target is exit block on some arches. */
912 if (!redirect_jump (insn, block_label (target), 0))
913 {
914 if (target == EXIT_BLOCK_PTR)
915 return false;
916 abort ();
917 }
918 }
919
920 if (rtl_dump_file)
921 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
922 e->src->index, e->dest->index, target->index);
923
924 if (e->dest != target)
925 redirect_edge_succ_nodup (e, target);
926
927 return true;
928 }
929
930 /* Like force_nonfallthru below, but additionally performs redirection
931 Used by redirect_edge_and_branch_force. */
932
933 static basic_block
934 force_nonfallthru_and_redirect (e, target)
935 edge e;
936 basic_block target;
937 {
938 basic_block jump_block, new_bb = NULL, src = e->src;
939 rtx note;
940 edge new_edge;
941 int abnormal_edge_flags = 0;
942
943 if (e->flags & EDGE_ABNORMAL)
944 {
945 /* Irritating special case - fallthru edge to the same block as abnormal
946 edge.
947 We can't redirect abnormal edge, but we still can split the fallthru
948 one and create separate abnormal edge to original destination.
949 This allows bb-reorder to make such edge non-fallthru. */
950 if (e->dest != target)
951 abort ();
952 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
953 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
954 }
955 else if (!(e->flags & EDGE_FALLTHRU))
956 abort ();
957 else if (e->src == ENTRY_BLOCK_PTR)
958 {
959 /* We can't redirect the entry block. Create an empty block at the
960 start of the function which we use to add the new jump. */
961 edge *pe1;
962 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
963
964 /* Change the existing edge's source to be the new block, and add
965 a new edge from the entry block to the new block. */
966 e->src = bb;
967 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
968 if (*pe1 == e)
969 {
970 *pe1 = e->succ_next;
971 break;
972 }
973 e->succ_next = 0;
974 bb->succ = e;
975 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
976 }
977
978 if (e->src->succ->succ_next || abnormal_edge_flags)
979 {
980 /* Create the new structures. */
981
982 /* Position the new block correctly relative to loop notes. */
983 note = last_loop_beg_note (e->src->end);
984 note = NEXT_INSN (note);
985
986 /* ... and ADDR_VECs. */
987 if (note != NULL
988 && GET_CODE (note) == CODE_LABEL
989 && NEXT_INSN (note)
990 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
991 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
992 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
993 note = NEXT_INSN (NEXT_INSN (note));
994
995 jump_block = create_basic_block (note, NULL, e->src);
996 jump_block->count = e->count;
997 jump_block->frequency = EDGE_FREQUENCY (e);
998 jump_block->loop_depth = target->loop_depth;
999
1000 if (target->global_live_at_start)
1001 {
1002 jump_block->global_live_at_start
1003 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1004 jump_block->global_live_at_end
1005 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1006 COPY_REG_SET (jump_block->global_live_at_start,
1007 target->global_live_at_start);
1008 COPY_REG_SET (jump_block->global_live_at_end,
1009 target->global_live_at_start);
1010 }
1011
1012 /* Wire edge in. */
1013 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1014 new_edge->probability = e->probability;
1015 new_edge->count = e->count;
1016
1017 /* Redirect old edge. */
1018 redirect_edge_pred (e, jump_block);
1019 e->probability = REG_BR_PROB_BASE;
1020
1021 new_bb = jump_block;
1022 }
1023 else
1024 jump_block = e->src;
1025
1026 e->flags &= ~EDGE_FALLTHRU;
1027 if (target == EXIT_BLOCK_PTR)
1028 {
1029 if (HAVE_return)
1030 emit_jump_insn_after (gen_return (), jump_block->end);
1031 else
1032 abort ();
1033 }
1034 else
1035 {
1036 rtx label = block_label (target);
1037 emit_jump_insn_after (gen_jump (label), jump_block->end);
1038 JUMP_LABEL (jump_block->end) = label;
1039 LABEL_NUSES (label)++;
1040 }
1041
1042 emit_barrier_after (jump_block->end);
1043 redirect_edge_succ_nodup (e, target);
1044
1045 if (abnormal_edge_flags)
1046 make_edge (src, target, abnormal_edge_flags);
1047
1048 return new_bb;
1049 }
1050
1051 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1052 (and possibly create new basic block) to make edge non-fallthru.
1053 Return newly created BB or NULL if none. */
1054
1055 basic_block
1056 force_nonfallthru (e)
1057 edge e;
1058 {
1059 return force_nonfallthru_and_redirect (e, e->dest);
1060 }
1061
1062 /* Redirect edge even at the expense of creating new jump insn or
1063 basic block. Return new basic block if created, NULL otherwise.
1064 Abort if conversion is impossible. */
1065
1066 basic_block
1067 redirect_edge_and_branch_force (e, target)
1068 edge e;
1069 basic_block target;
1070 {
1071 if (redirect_edge_and_branch (e, target)
1072 || e->dest == target)
1073 return NULL;
1074
1075 /* In case the edge redirection failed, try to force it to be non-fallthru
1076 and redirect newly created simplejump. */
1077 return force_nonfallthru_and_redirect (e, target);
1078 }
1079
1080 /* The given edge should potentially be a fallthru edge. If that is in
1081 fact true, delete the jump and barriers that are in the way. */
1082
1083 void
1084 tidy_fallthru_edge (e, b, c)
1085 edge e;
1086 basic_block b, c;
1087 {
1088 rtx q;
1089
1090 /* ??? In a late-running flow pass, other folks may have deleted basic
1091 blocks by nopping out blocks, leaving multiple BARRIERs between here
1092 and the target label. They ought to be chastized and fixed.
1093
1094 We can also wind up with a sequence of undeletable labels between
1095 one block and the next.
1096
1097 So search through a sequence of barriers, labels, and notes for
1098 the head of block C and assert that we really do fall through. */
1099
1100 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1101 if (INSN_P (q))
1102 return;
1103
1104 /* Remove what will soon cease being the jump insn from the source block.
1105 If block B consisted only of this single jump, turn it into a deleted
1106 note. */
1107 q = b->end;
1108 if (GET_CODE (q) == JUMP_INSN
1109 && onlyjump_p (q)
1110 && (any_uncondjump_p (q)
1111 || (b->succ == e && e->succ_next == NULL)))
1112 {
1113 #ifdef HAVE_cc0
1114 /* If this was a conditional jump, we need to also delete
1115 the insn that set cc0. */
1116 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1117 q = PREV_INSN (q);
1118 #endif
1119
1120 q = PREV_INSN (q);
1121
1122 /* We don't want a block to end on a line-number note since that has
1123 the potential of changing the code between -g and not -g. */
1124 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1125 q = PREV_INSN (q);
1126 }
1127
1128 /* Selectively unlink the sequence. */
1129 if (q != PREV_INSN (c->head))
1130 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1131
1132 e->flags |= EDGE_FALLTHRU;
1133 }
1134
1135 /* Fix up edges that now fall through, or rather should now fall through
1136 but previously required a jump around now deleted blocks. Simplify
1137 the search by only examining blocks numerically adjacent, since this
1138 is how find_basic_blocks created them. */
1139
1140 void
1141 tidy_fallthru_edges ()
1142 {
1143 basic_block b, c;
1144
1145 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1146 return;
1147
1148 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1149 {
1150 edge s;
1151
1152 c = b->next_bb;
1153
1154 /* We care about simple conditional or unconditional jumps with
1155 a single successor.
1156
1157 If we had a conditional branch to the next instruction when
1158 find_basic_blocks was called, then there will only be one
1159 out edge for the block which ended with the conditional
1160 branch (since we do not create duplicate edges).
1161
1162 Furthermore, the edge will be marked as a fallthru because we
1163 merge the flags for the duplicate edges. So we do not want to
1164 check that the edge is not a FALLTHRU edge. */
1165
1166 if ((s = b->succ) != NULL
1167 && ! (s->flags & EDGE_COMPLEX)
1168 && s->succ_next == NULL
1169 && s->dest == c
1170 /* If the jump insn has side effects, we can't tidy the edge. */
1171 && (GET_CODE (b->end) != JUMP_INSN
1172 || onlyjump_p (b->end)))
1173 tidy_fallthru_edge (s, b, c);
1174 }
1175 }
1176 \f
1177 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1178 is back edge of syntactic loop. */
1179
1180 static bool
1181 back_edge_of_syntactic_loop_p (bb1, bb2)
1182 basic_block bb1, bb2;
1183 {
1184 rtx insn;
1185 int count = 0;
1186 basic_block bb;
1187
1188 if (bb1 == bb2)
1189 return true;
1190
1191 /* ??? Could we guarantee that bb indices are monotone, so that we could
1192 just compare them? */
1193 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1194 continue;
1195
1196 if (!bb)
1197 return false;
1198
1199 for (insn = bb1->end; insn != bb2->head && count >= 0;
1200 insn = NEXT_INSN (insn))
1201 if (GET_CODE (insn) == NOTE)
1202 {
1203 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1204 count++;
1205 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1206 count--;
1207 }
1208
1209 return count >= 0;
1210 }
1211
1212 /* Split a (typically critical) edge. Return the new block.
1213 Abort on abnormal edges.
1214
1215 ??? The code generally expects to be called on critical edges.
1216 The case of a block ending in an unconditional jump to a
1217 block with multiple predecessors is not handled optimally. */
1218
1219 basic_block
1220 split_edge (edge_in)
1221 edge edge_in;
1222 {
1223 basic_block bb;
1224 rtx before;
1225
1226 /* Abnormal edges cannot be split. */
1227 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1228 abort ();
1229
1230 /* We are going to place the new block in front of edge destination.
1231 Avoid existence of fallthru predecessors. */
1232 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1233 {
1234 edge e;
1235
1236 for (e = edge_in->dest->pred; e; e = e->pred_next)
1237 if (e->flags & EDGE_FALLTHRU)
1238 break;
1239
1240 if (e)
1241 force_nonfallthru (e);
1242 }
1243
1244 /* Create the basic block note.
1245
1246 Where we place the note can have a noticeable impact on the generated
1247 code. Consider this cfg:
1248
1249 E
1250 |
1251 0
1252 / \
1253 +->1-->2--->E
1254 | |
1255 +--+
1256
1257 If we need to insert an insn on the edge from block 0 to block 1,
1258 we want to ensure the instructions we insert are outside of any
1259 loop notes that physically sit between block 0 and block 1. Otherwise
1260 we confuse the loop optimizer into thinking the loop is a phony. */
1261
1262 if (edge_in->dest != EXIT_BLOCK_PTR
1263 && PREV_INSN (edge_in->dest->head)
1264 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1265 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1266 == NOTE_INSN_LOOP_BEG)
1267 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1268 before = PREV_INSN (edge_in->dest->head);
1269 else if (edge_in->dest != EXIT_BLOCK_PTR)
1270 before = edge_in->dest->head;
1271 else
1272 before = NULL_RTX;
1273
1274 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1275 bb->count = edge_in->count;
1276 bb->frequency = EDGE_FREQUENCY (edge_in);
1277
1278 /* ??? This info is likely going to be out of date very soon. */
1279 if (edge_in->dest->global_live_at_start)
1280 {
1281 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1282 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1283 COPY_REG_SET (bb->global_live_at_start,
1284 edge_in->dest->global_live_at_start);
1285 COPY_REG_SET (bb->global_live_at_end,
1286 edge_in->dest->global_live_at_start);
1287 }
1288
1289 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1290
1291 /* For non-fallthry edges, we must adjust the predecessor's
1292 jump instruction to target our new block. */
1293 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1294 {
1295 if (!redirect_edge_and_branch (edge_in, bb))
1296 abort ();
1297 }
1298 else
1299 redirect_edge_succ (edge_in, bb);
1300
1301 return bb;
1302 }
1303
1304 /* Queue instructions for insertion on an edge between two basic blocks.
1305 The new instructions and basic blocks (if any) will not appear in the
1306 CFG until commit_edge_insertions is called. */
1307
1308 void
1309 insert_insn_on_edge (pattern, e)
1310 rtx pattern;
1311 edge e;
1312 {
1313 /* We cannot insert instructions on an abnormal critical edge.
1314 It will be easier to find the culprit if we die now. */
1315 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1316 abort ();
1317
1318 if (e->insns == NULL_RTX)
1319 start_sequence ();
1320 else
1321 push_to_sequence (e->insns);
1322
1323 emit_insn (pattern);
1324
1325 e->insns = get_insns ();
1326 end_sequence ();
1327 }
1328
1329 /* Update the CFG for the instructions queued on edge E. */
1330
1331 static void
1332 commit_one_edge_insertion (e, watch_calls)
1333 edge e;
1334 int watch_calls;
1335 {
1336 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1337 basic_block bb = NULL;
1338
1339 /* Pull the insns off the edge now since the edge might go away. */
1340 insns = e->insns;
1341 e->insns = NULL_RTX;
1342
1343 /* Special case -- avoid inserting code between call and storing
1344 its return value. */
1345 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1346 && e->src != ENTRY_BLOCK_PTR
1347 && GET_CODE (e->src->end) == CALL_INSN)
1348 {
1349 rtx next = next_nonnote_insn (e->src->end);
1350
1351 after = e->dest->head;
1352 /* The first insn after the call may be a stack pop, skip it. */
1353 while (next
1354 && keep_with_call_p (next))
1355 {
1356 after = next;
1357 next = next_nonnote_insn (next);
1358 }
1359 bb = e->dest;
1360 }
1361 if (!before && !after)
1362 {
1363 /* Figure out where to put these things. If the destination has
1364 one predecessor, insert there. Except for the exit block. */
1365 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1366 {
1367 bb = e->dest;
1368
1369 /* Get the location correct wrt a code label, and "nice" wrt
1370 a basic block note, and before everything else. */
1371 tmp = bb->head;
1372 if (GET_CODE (tmp) == CODE_LABEL)
1373 tmp = NEXT_INSN (tmp);
1374 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1375 tmp = NEXT_INSN (tmp);
1376 if (tmp == bb->head)
1377 before = tmp;
1378 else if (tmp)
1379 after = PREV_INSN (tmp);
1380 else
1381 after = get_last_insn ();
1382 }
1383
1384 /* If the source has one successor and the edge is not abnormal,
1385 insert there. Except for the entry block. */
1386 else if ((e->flags & EDGE_ABNORMAL) == 0
1387 && e->src->succ->succ_next == NULL
1388 && e->src != ENTRY_BLOCK_PTR)
1389 {
1390 bb = e->src;
1391
1392 /* It is possible to have a non-simple jump here. Consider a target
1393 where some forms of unconditional jumps clobber a register. This
1394 happens on the fr30 for example.
1395
1396 We know this block has a single successor, so we can just emit
1397 the queued insns before the jump. */
1398 if (GET_CODE (bb->end) == JUMP_INSN)
1399 for (before = bb->end;
1400 GET_CODE (PREV_INSN (before)) == NOTE
1401 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1402 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1403 ;
1404 else
1405 {
1406 /* We'd better be fallthru, or we've lost track of what's what. */
1407 if ((e->flags & EDGE_FALLTHRU) == 0)
1408 abort ();
1409
1410 after = bb->end;
1411 }
1412 }
1413 /* Otherwise we must split the edge. */
1414 else
1415 {
1416 bb = split_edge (e);
1417 after = bb->end;
1418 }
1419 }
1420
1421 /* Now that we've found the spot, do the insertion. */
1422
1423 if (before)
1424 {
1425 emit_insn_before (insns, before);
1426 last = prev_nonnote_insn (before);
1427 }
1428 else
1429 last = emit_insn_after (insns, after);
1430
1431 if (returnjump_p (last))
1432 {
1433 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1434 This is not currently a problem because this only happens
1435 for the (single) epilogue, which already has a fallthru edge
1436 to EXIT. */
1437
1438 e = bb->succ;
1439 if (e->dest != EXIT_BLOCK_PTR
1440 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1441 abort ();
1442
1443 e->flags &= ~EDGE_FALLTHRU;
1444 emit_barrier_after (last);
1445
1446 if (before)
1447 delete_insn (before);
1448 }
1449 else if (GET_CODE (last) == JUMP_INSN)
1450 abort ();
1451
1452 /* Mark the basic block for find_sub_basic_blocks. */
1453 bb->aux = &bb->aux;
1454 }
1455
1456 /* Update the CFG for all queued instructions. */
1457
1458 void
1459 commit_edge_insertions ()
1460 {
1461 basic_block bb;
1462 sbitmap blocks;
1463
1464 #ifdef ENABLE_CHECKING
1465 verify_flow_info ();
1466 #endif
1467
1468 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1469 {
1470 edge e, next;
1471
1472 for (e = bb->succ; e; e = next)
1473 {
1474 next = e->succ_next;
1475 if (e->insns)
1476 commit_one_edge_insertion (e, false);
1477 }
1478 }
1479
1480 blocks = sbitmap_alloc (last_basic_block);
1481 sbitmap_zero (blocks);
1482 FOR_EACH_BB (bb)
1483 if (bb->aux)
1484 {
1485 SET_BIT (blocks, bb->index);
1486 /* Check for forgotten bb->aux values before commit_edge_insertions
1487 call. */
1488 if (bb->aux != &bb->aux)
1489 abort ();
1490 bb->aux = NULL;
1491 }
1492 find_many_sub_basic_blocks (blocks);
1493 sbitmap_free (blocks);
1494 }
1495 \f
1496 /* Update the CFG for all queued instructions, taking special care of inserting
1497 code on edges between call and storing its return value. */
1498
1499 void
1500 commit_edge_insertions_watch_calls ()
1501 {
1502 basic_block bb;
1503
1504 #ifdef ENABLE_CHECKING
1505 verify_flow_info ();
1506 #endif
1507
1508 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1509 {
1510 edge e, next;
1511
1512 for (e = bb->succ; e; e = next)
1513 {
1514 next = e->succ_next;
1515 if (e->insns)
1516 commit_one_edge_insertion (e, true);
1517 }
1518 }
1519 }
1520 \f
1521 /* Print out one basic block with live information at start and end. */
1522
1523 void
1524 dump_bb (bb, outf)
1525 basic_block bb;
1526 FILE *outf;
1527 {
1528 rtx insn;
1529 rtx last;
1530 edge e;
1531
1532 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1533 bb->index, bb->loop_depth);
1534 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1535 putc ('\n', outf);
1536
1537 fputs (";; Predecessors: ", outf);
1538 for (e = bb->pred; e; e = e->pred_next)
1539 dump_edge_info (outf, e, 0);
1540 putc ('\n', outf);
1541
1542 fputs (";; Registers live at start:", outf);
1543 dump_regset (bb->global_live_at_start, outf);
1544 putc ('\n', outf);
1545
1546 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1547 insn = NEXT_INSN (insn))
1548 print_rtl_single (outf, insn);
1549
1550 fputs (";; Registers live at end:", outf);
1551 dump_regset (bb->global_live_at_end, outf);
1552 putc ('\n', outf);
1553
1554 fputs (";; Successors: ", outf);
1555 for (e = bb->succ; e; e = e->succ_next)
1556 dump_edge_info (outf, e, 1);
1557 putc ('\n', outf);
1558 }
1559
1560 void
1561 debug_bb (bb)
1562 basic_block bb;
1563 {
1564 dump_bb (bb, stderr);
1565 }
1566
1567 void
1568 debug_bb_n (n)
1569 int n;
1570 {
1571 dump_bb (BASIC_BLOCK (n), stderr);
1572 }
1573 \f
1574 /* Like print_rtl, but also print out live information for the start of each
1575 basic block. */
1576
1577 void
1578 print_rtl_with_bb (outf, rtx_first)
1579 FILE *outf;
1580 rtx rtx_first;
1581 {
1582 rtx tmp_rtx;
1583
1584 if (rtx_first == 0)
1585 fprintf (outf, "(nil)\n");
1586 else
1587 {
1588 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1589 int max_uid = get_max_uid ();
1590 basic_block *start
1591 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1592 basic_block *end
1593 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1594 enum bb_state *in_bb_p
1595 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1596
1597 basic_block bb;
1598
1599 FOR_EACH_BB_REVERSE (bb)
1600 {
1601 rtx x;
1602
1603 start[INSN_UID (bb->head)] = bb;
1604 end[INSN_UID (bb->end)] = bb;
1605 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1606 {
1607 enum bb_state state = IN_MULTIPLE_BB;
1608
1609 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1610 state = IN_ONE_BB;
1611 in_bb_p[INSN_UID (x)] = state;
1612
1613 if (x == bb->end)
1614 break;
1615 }
1616 }
1617
1618 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1619 {
1620 int did_output;
1621
1622 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1623 {
1624 fprintf (outf, ";; Start of basic block %d, registers live:",
1625 bb->index);
1626 dump_regset (bb->global_live_at_start, outf);
1627 putc ('\n', outf);
1628 }
1629
1630 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1631 && GET_CODE (tmp_rtx) != NOTE
1632 && GET_CODE (tmp_rtx) != BARRIER)
1633 fprintf (outf, ";; Insn is not within a basic block\n");
1634 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1635 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1636
1637 did_output = print_rtl_single (outf, tmp_rtx);
1638
1639 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1640 {
1641 fprintf (outf, ";; End of basic block %d, registers live:\n",
1642 bb->index);
1643 dump_regset (bb->global_live_at_end, outf);
1644 putc ('\n', outf);
1645 }
1646
1647 if (did_output)
1648 putc ('\n', outf);
1649 }
1650
1651 free (start);
1652 free (end);
1653 free (in_bb_p);
1654 }
1655
1656 if (current_function_epilogue_delay_list != 0)
1657 {
1658 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1659 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1660 tmp_rtx = XEXP (tmp_rtx, 1))
1661 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1662 }
1663 }
1664 \f
1665 void
1666 update_br_prob_note (bb)
1667 basic_block bb;
1668 {
1669 rtx note;
1670 if (GET_CODE (bb->end) != JUMP_INSN)
1671 return;
1672 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1673 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1674 return;
1675 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1676 }
1677 \f
1678 /* Verify the CFG consistency. This function check some CFG invariants and
1679 aborts when something is wrong. Hope that this function will help to
1680 convert many optimization passes to preserve CFG consistent.
1681
1682 Currently it does following checks:
1683
1684 - test head/end pointers
1685 - overlapping of basic blocks
1686 - edge list correctness
1687 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1688 - tails of basic blocks (ensure that boundary is necessary)
1689 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1690 and NOTE_INSN_BASIC_BLOCK
1691 - check that all insns are in the basic blocks
1692 (except the switch handling code, barriers and notes)
1693 - check that all returns are followed by barriers
1694
1695 In future it can be extended check a lot of other stuff as well
1696 (reachability of basic blocks, life information, etc. etc.). */
1697
1698 void
1699 verify_flow_info ()
1700 {
1701 const int max_uid = get_max_uid ();
1702 const rtx rtx_first = get_insns ();
1703 rtx last_head = get_last_insn ();
1704 basic_block *bb_info, *last_visited;
1705 size_t *edge_checksum;
1706 rtx x;
1707 int num_bb_notes, err = 0;
1708 basic_block bb, last_bb_seen;
1709
1710 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1711 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1712 sizeof (basic_block));
1713 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1714
1715 /* Check bb chain & numbers. */
1716 last_bb_seen = ENTRY_BLOCK_PTR;
1717 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1718 {
1719 if (bb != EXIT_BLOCK_PTR
1720 && bb != BASIC_BLOCK (bb->index))
1721 {
1722 error ("bb %d on wrong place", bb->index);
1723 err = 1;
1724 }
1725
1726 if (bb->prev_bb != last_bb_seen)
1727 {
1728 error ("prev_bb of %d should be %d, not %d",
1729 bb->index, last_bb_seen->index, bb->prev_bb->index);
1730 err = 1;
1731 }
1732
1733 last_bb_seen = bb;
1734 }
1735
1736 FOR_EACH_BB_REVERSE (bb)
1737 {
1738 rtx head = bb->head;
1739 rtx end = bb->end;
1740
1741 /* Verify the end of the basic block is in the INSN chain. */
1742 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1743 if (x == end)
1744 break;
1745
1746 if (!x)
1747 {
1748 error ("end insn %d for block %d not found in the insn stream",
1749 INSN_UID (end), bb->index);
1750 err = 1;
1751 }
1752
1753 /* Work backwards from the end to the head of the basic block
1754 to verify the head is in the RTL chain. */
1755 for (; x != NULL_RTX; x = PREV_INSN (x))
1756 {
1757 /* While walking over the insn chain, verify insns appear
1758 in only one basic block and initialize the BB_INFO array
1759 used by other passes. */
1760 if (bb_info[INSN_UID (x)] != NULL)
1761 {
1762 error ("insn %d is in multiple basic blocks (%d and %d)",
1763 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1764 err = 1;
1765 }
1766
1767 bb_info[INSN_UID (x)] = bb;
1768
1769 if (x == head)
1770 break;
1771 }
1772 if (!x)
1773 {
1774 error ("head insn %d for block %d not found in the insn stream",
1775 INSN_UID (head), bb->index);
1776 err = 1;
1777 }
1778
1779 last_head = x;
1780 }
1781
1782 /* Now check the basic blocks (boundaries etc.) */
1783 FOR_EACH_BB_REVERSE (bb)
1784 {
1785 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1786 edge e;
1787 rtx note;
1788
1789 if (INSN_P (bb->end)
1790 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1791 && bb->succ && bb->succ->succ_next
1792 && any_condjump_p (bb->end))
1793 {
1794 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1795 {
1796 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1797 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1798 err = 1;
1799 }
1800 }
1801 if (bb->count < 0)
1802 {
1803 error ("verify_flow_info: Wrong count of block %i %i",
1804 bb->index, (int)bb->count);
1805 err = 1;
1806 }
1807 if (bb->frequency < 0)
1808 {
1809 error ("verify_flow_info: Wrong frequency of block %i %i",
1810 bb->index, bb->frequency);
1811 err = 1;
1812 }
1813 for (e = bb->succ; e; e = e->succ_next)
1814 {
1815 if (last_visited [e->dest->index + 2] == bb)
1816 {
1817 error ("verify_flow_info: Duplicate edge %i->%i",
1818 e->src->index, e->dest->index);
1819 err = 1;
1820 }
1821 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1822 {
1823 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1824 e->src->index, e->dest->index, e->probability);
1825 err = 1;
1826 }
1827 if (e->count < 0)
1828 {
1829 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1830 e->src->index, e->dest->index, (int)e->count);
1831 err = 1;
1832 }
1833
1834 last_visited [e->dest->index + 2] = bb;
1835
1836 if (e->flags & EDGE_FALLTHRU)
1837 n_fallthru++;
1838
1839 if ((e->flags & ~(EDGE_DFS_BACK | EDGE_CAN_FALLTHRU)) == 0)
1840 n_branch++;
1841
1842 if (e->flags & EDGE_ABNORMAL_CALL)
1843 n_call++;
1844
1845 if (e->flags & EDGE_EH)
1846 n_eh++;
1847 else if (e->flags & EDGE_ABNORMAL)
1848 n_abnormal++;
1849
1850 if ((e->flags & EDGE_FALLTHRU)
1851 && e->src != ENTRY_BLOCK_PTR
1852 && e->dest != EXIT_BLOCK_PTR)
1853 {
1854 rtx insn;
1855
1856 if (e->src->next_bb != e->dest)
1857 {
1858 error
1859 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1860 e->src->index, e->dest->index);
1861 err = 1;
1862 }
1863 else
1864 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1865 insn = NEXT_INSN (insn))
1866 if (GET_CODE (insn) == BARRIER
1867 #ifndef CASE_DROPS_THROUGH
1868 || INSN_P (insn)
1869 #else
1870 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1871 #endif
1872 )
1873 {
1874 error ("verify_flow_info: Incorrect fallthru %i->%i",
1875 e->src->index, e->dest->index);
1876 fatal_insn ("wrong insn in the fallthru edge", insn);
1877 err = 1;
1878 }
1879 }
1880
1881 if (e->src != bb)
1882 {
1883 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1884 bb->index);
1885 fprintf (stderr, "Predecessor: ");
1886 dump_edge_info (stderr, e, 0);
1887 fprintf (stderr, "\nSuccessor: ");
1888 dump_edge_info (stderr, e, 1);
1889 fprintf (stderr, "\n");
1890 err = 1;
1891 }
1892
1893 edge_checksum[e->dest->index + 2] += (size_t) e;
1894 }
1895
1896 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1897 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1898 {
1899 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1900 err = 1;
1901 }
1902 if (n_branch
1903 && (GET_CODE (bb->end) != JUMP_INSN
1904 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1905 || any_condjump_p (bb->end)))))
1906 {
1907 error ("Too many outgoing branch edges from bb %i", bb->index);
1908 err = 1;
1909 }
1910 if (n_fallthru && any_uncondjump_p (bb->end))
1911 {
1912 error ("Fallthru edge after unconditional jump %i", bb->index);
1913 err = 1;
1914 }
1915 if (n_branch != 1 && any_uncondjump_p (bb->end))
1916 {
1917 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1918 err = 1;
1919 }
1920 if (n_branch != 1 && any_condjump_p (bb->end)
1921 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1922 {
1923 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1924 err = 1;
1925 }
1926 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1927 {
1928 error ("Call edges for non-call insn in bb %i", bb->index);
1929 err = 1;
1930 }
1931 if (n_abnormal
1932 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1933 && (GET_CODE (bb->end) != JUMP_INSN
1934 || any_condjump_p (bb->end)
1935 || any_uncondjump_p (bb->end)))
1936 {
1937 error ("Abnormal edges for no purpose in bb %i", bb->index);
1938 err = 1;
1939 }
1940
1941 if (!n_fallthru)
1942 {
1943 rtx insn;
1944
1945 /* Ensure existence of barrier in BB with no fallthru edges. */
1946 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1947 insn = NEXT_INSN (insn))
1948 if (!insn
1949 || (GET_CODE (insn) == NOTE
1950 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1951 {
1952 error ("missing barrier after block %i", bb->index);
1953 err = 1;
1954 break;
1955 }
1956 }
1957
1958 for (e = bb->pred; e; e = e->pred_next)
1959 {
1960 if (e->dest != bb)
1961 {
1962 error ("basic block %d pred edge is corrupted", bb->index);
1963 fputs ("Predecessor: ", stderr);
1964 dump_edge_info (stderr, e, 0);
1965 fputs ("\nSuccessor: ", stderr);
1966 dump_edge_info (stderr, e, 1);
1967 fputc ('\n', stderr);
1968 err = 1;
1969 }
1970 edge_checksum[e->dest->index + 2] -= (size_t) e;
1971 }
1972
1973 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1974 if (BLOCK_FOR_INSN (x) != bb)
1975 {
1976 debug_rtx (x);
1977 if (! BLOCK_FOR_INSN (x))
1978 error
1979 ("insn %d inside basic block %d but block_for_insn is NULL",
1980 INSN_UID (x), bb->index);
1981 else
1982 error
1983 ("insn %d inside basic block %d but block_for_insn is %i",
1984 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1985
1986 err = 1;
1987 }
1988
1989 /* OK pointers are correct. Now check the header of basic
1990 block. It ought to contain optional CODE_LABEL followed
1991 by NOTE_BASIC_BLOCK. */
1992 x = bb->head;
1993 if (GET_CODE (x) == CODE_LABEL)
1994 {
1995 if (bb->end == x)
1996 {
1997 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1998 bb->index);
1999 err = 1;
2000 }
2001
2002 x = NEXT_INSN (x);
2003 }
2004
2005 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2006 {
2007 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2008 bb->index);
2009 err = 1;
2010 }
2011
2012 if (bb->end == x)
2013 /* Do checks for empty blocks her. e */
2014 ;
2015 else
2016 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2017 {
2018 if (NOTE_INSN_BASIC_BLOCK_P (x))
2019 {
2020 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2021 INSN_UID (x), bb->index);
2022 err = 1;
2023 }
2024
2025 if (x == bb->end)
2026 break;
2027
2028 if (control_flow_insn_p (x))
2029 {
2030 error ("in basic block %d:", bb->index);
2031 fatal_insn ("flow control insn inside a basic block", x);
2032 }
2033 }
2034 }
2035
2036 /* Complete edge checksumming for ENTRY and EXIT. */
2037 {
2038 edge e;
2039
2040 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2041 edge_checksum[e->dest->index + 2] += (size_t) e;
2042
2043 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2044 edge_checksum[e->dest->index + 2] -= (size_t) e;
2045 }
2046
2047 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2048 if (edge_checksum[bb->index + 2])
2049 {
2050 error ("basic block %i edge lists are corrupted", bb->index);
2051 err = 1;
2052 }
2053
2054 num_bb_notes = 0;
2055 last_bb_seen = ENTRY_BLOCK_PTR;
2056
2057 for (x = rtx_first; x; x = NEXT_INSN (x))
2058 {
2059 if (NOTE_INSN_BASIC_BLOCK_P (x))
2060 {
2061 bb = NOTE_BASIC_BLOCK (x);
2062
2063 num_bb_notes++;
2064 if (bb != last_bb_seen->next_bb)
2065 internal_error ("basic blocks not numbered consecutively");
2066
2067 last_bb_seen = bb;
2068 }
2069
2070 if (!bb_info[INSN_UID (x)])
2071 {
2072 switch (GET_CODE (x))
2073 {
2074 case BARRIER:
2075 case NOTE:
2076 break;
2077
2078 case CODE_LABEL:
2079 /* An addr_vec is placed outside any block block. */
2080 if (NEXT_INSN (x)
2081 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2082 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2083 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2084 x = NEXT_INSN (x);
2085
2086 /* But in any case, non-deletable labels can appear anywhere. */
2087 break;
2088
2089 default:
2090 fatal_insn ("insn outside basic block", x);
2091 }
2092 }
2093
2094 if (INSN_P (x)
2095 && GET_CODE (x) == JUMP_INSN
2096 && returnjump_p (x) && ! condjump_p (x)
2097 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2098 fatal_insn ("return not followed by barrier", x);
2099 }
2100
2101 if (num_bb_notes != n_basic_blocks)
2102 internal_error
2103 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2104 num_bb_notes, n_basic_blocks);
2105
2106 if (err)
2107 internal_error ("verify_flow_info failed");
2108
2109 /* Clean up. */
2110 free (bb_info);
2111 free (last_visited);
2112 free (edge_checksum);
2113 }
2114 \f
2115 /* Assume that the preceding pass has possibly eliminated jump instructions
2116 or converted the unconditional jumps. Eliminate the edges from CFG.
2117 Return true if any edges are eliminated. */
2118
2119 bool
2120 purge_dead_edges (bb)
2121 basic_block bb;
2122 {
2123 edge e, next;
2124 rtx insn = bb->end, note;
2125 bool purged = false;
2126
2127 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2128 if (GET_CODE (insn) == INSN
2129 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2130 {
2131 rtx eqnote;
2132
2133 if (! may_trap_p (PATTERN (insn))
2134 || ((eqnote = find_reg_equal_equiv_note (insn))
2135 && ! may_trap_p (XEXP (eqnote, 0))))
2136 remove_note (insn, note);
2137 }
2138
2139 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2140 for (e = bb->succ; e; e = next)
2141 {
2142 next = e->succ_next;
2143 if (e->flags & EDGE_EH)
2144 {
2145 if (can_throw_internal (bb->end))
2146 continue;
2147 }
2148 else if (e->flags & EDGE_ABNORMAL_CALL)
2149 {
2150 if (GET_CODE (bb->end) == CALL_INSN
2151 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2152 || INTVAL (XEXP (note, 0)) >= 0))
2153 continue;
2154 }
2155 else
2156 continue;
2157
2158 remove_edge (e);
2159 bb->flags |= BB_DIRTY;
2160 purged = true;
2161 }
2162
2163 if (GET_CODE (insn) == JUMP_INSN)
2164 {
2165 rtx note;
2166 edge b,f;
2167
2168 /* We do care only about conditional jumps and simplejumps. */
2169 if (!any_condjump_p (insn)
2170 && !returnjump_p (insn)
2171 && !simplejump_p (insn))
2172 return purged;
2173
2174 /* Branch probability/prediction notes are defined only for
2175 condjumps. We've possibly turned condjump into simplejump. */
2176 if (simplejump_p (insn))
2177 {
2178 note = find_reg_note (insn, REG_BR_PROB, NULL);
2179 if (note)
2180 remove_note (insn, note);
2181 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2182 remove_note (insn, note);
2183 }
2184
2185 for (e = bb->succ; e; e = next)
2186 {
2187 next = e->succ_next;
2188
2189 /* Avoid abnormal flags to leak from computed jumps turned
2190 into simplejumps. */
2191
2192 e->flags &= ~EDGE_ABNORMAL;
2193
2194 /* See if this edge is one we should keep. */
2195 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2196 /* A conditional jump can fall through into the next
2197 block, so we should keep the edge. */
2198 continue;
2199 else if (e->dest != EXIT_BLOCK_PTR
2200 && e->dest->head == JUMP_LABEL (insn))
2201 /* If the destination block is the target of the jump,
2202 keep the edge. */
2203 continue;
2204 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2205 /* If the destination block is the exit block, and this
2206 instruction is a return, then keep the edge. */
2207 continue;
2208 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2209 /* Keep the edges that correspond to exceptions thrown by
2210 this instruction. */
2211 continue;
2212
2213 /* We do not need this edge. */
2214 bb->flags |= BB_DIRTY;
2215 purged = true;
2216 remove_edge (e);
2217 }
2218
2219 if (!bb->succ || !purged)
2220 return purged;
2221
2222 if (rtl_dump_file)
2223 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2224
2225 if (!optimize)
2226 return purged;
2227
2228 /* Redistribute probabilities. */
2229 if (!bb->succ->succ_next)
2230 {
2231 bb->succ->probability = REG_BR_PROB_BASE;
2232 bb->succ->count = bb->count;
2233 }
2234 else
2235 {
2236 note = find_reg_note (insn, REG_BR_PROB, NULL);
2237 if (!note)
2238 return purged;
2239
2240 b = BRANCH_EDGE (bb);
2241 f = FALLTHRU_EDGE (bb);
2242 b->probability = INTVAL (XEXP (note, 0));
2243 f->probability = REG_BR_PROB_BASE - b->probability;
2244 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2245 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2246 }
2247
2248 return purged;
2249 }
2250
2251 /* If we don't see a jump insn, we don't know exactly why the block would
2252 have been broken at this point. Look for a simple, non-fallthru edge,
2253 as these are only created by conditional branches. If we find such an
2254 edge we know that there used to be a jump here and can then safely
2255 remove all non-fallthru edges. */
2256 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2257 e = e->succ_next)
2258 ;
2259
2260 if (!e)
2261 return purged;
2262
2263 for (e = bb->succ; e; e = next)
2264 {
2265 next = e->succ_next;
2266 if (!(e->flags & EDGE_FALLTHRU))
2267 {
2268 bb->flags |= BB_DIRTY;
2269 remove_edge (e);
2270 purged = true;
2271 }
2272 }
2273
2274 if (!bb->succ || bb->succ->succ_next)
2275 abort ();
2276
2277 bb->succ->probability = REG_BR_PROB_BASE;
2278 bb->succ->count = bb->count;
2279
2280 if (rtl_dump_file)
2281 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2282 bb->index);
2283 return purged;
2284 }
2285
2286 /* Search all basic blocks for potentially dead edges and purge them. Return
2287 true if some edge has been eliminated. */
2288
2289 bool
2290 purge_all_dead_edges (update_life_p)
2291 int update_life_p;
2292 {
2293 int purged = false;
2294 sbitmap blocks = 0;
2295 basic_block bb;
2296
2297 if (update_life_p)
2298 {
2299 blocks = sbitmap_alloc (last_basic_block);
2300 sbitmap_zero (blocks);
2301 }
2302
2303 FOR_EACH_BB (bb)
2304 {
2305 bool purged_here = purge_dead_edges (bb);
2306
2307 purged |= purged_here;
2308 if (purged_here && update_life_p)
2309 SET_BIT (blocks, bb->index);
2310 }
2311
2312 if (update_life_p && purged)
2313 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2314 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2315 | PROP_KILL_DEAD_CODE);
2316
2317 if (update_life_p)
2318 sbitmap_free (blocks);
2319 return purged;
2320 }