Makefile.in (cfg.o, [...]): New.
[gcc.git] / gcc / cfgbuild.c
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks
34 */
35 \f
36 #include "config.h"
37 #include "system.h"
38 #include "tree.h"
39 #include "rtl.h"
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "timevar.h"
49
50 #include "obstack.h"
51 static int count_basic_blocks PARAMS ((rtx));
52 static void find_basic_blocks_1 PARAMS ((rtx));
53 static rtx find_label_refs PARAMS ((rtx, rtx));
54 static void make_edges PARAMS ((rtx, int, int, int));
55 static void make_label_edge PARAMS ((sbitmap *, basic_block,
56 rtx, int));
57 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
58
59 /* Count the basic blocks of the function. */
60
61 static int
62 count_basic_blocks (f)
63 rtx f;
64 {
65 register rtx insn;
66 register RTX_CODE prev_code;
67 register int count = 0;
68 int saw_abnormal_edge = 0;
69
70 prev_code = JUMP_INSN;
71 for (insn = f; insn; insn = NEXT_INSN (insn))
72 {
73 enum rtx_code code = GET_CODE (insn);
74
75 if (code == CODE_LABEL
76 || (GET_RTX_CLASS (code) == 'i'
77 && (prev_code == JUMP_INSN
78 || prev_code == BARRIER
79 || saw_abnormal_edge)))
80 {
81 saw_abnormal_edge = 0;
82 count++;
83 }
84
85 /* Record whether this insn created an edge. */
86 if (code == CALL_INSN)
87 {
88 rtx note;
89
90 /* If there is a nonlocal goto label and the specified
91 region number isn't -1, we have an edge. */
92 if (nonlocal_goto_handler_labels
93 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
94 || INTVAL (XEXP (note, 0)) >= 0))
95 saw_abnormal_edge = 1;
96
97 else if (can_throw_internal (insn))
98 saw_abnormal_edge = 1;
99 }
100 else if (flag_non_call_exceptions
101 && code == INSN
102 && can_throw_internal (insn))
103 saw_abnormal_edge = 1;
104
105 if (code != NOTE)
106 prev_code = code;
107 }
108
109 /* The rest of the compiler works a bit smoother when we don't have to
110 check for the edge case of do-nothing functions with no basic blocks. */
111 if (count == 0)
112 {
113 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
114 count = 1;
115 }
116
117 return count;
118 }
119
120 /* Scan a list of insns for labels referred to other than by jumps.
121 This is used to scan the alternatives of a call placeholder. */
122 static rtx
123 find_label_refs (f, lvl)
124 rtx f;
125 rtx lvl;
126 {
127 rtx insn;
128
129 for (insn = f; insn; insn = NEXT_INSN (insn))
130 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
131 {
132 rtx note;
133
134 /* Make a list of all labels referred to other than by jumps
135 (which just don't have the REG_LABEL notes).
136
137 Make a special exception for labels followed by an ADDR*VEC,
138 as this would be a part of the tablejump setup code.
139
140 Make a special exception to registers loaded with label
141 values just before jump insns that use them. */
142
143 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
144 if (REG_NOTE_KIND (note) == REG_LABEL)
145 {
146 rtx lab = XEXP (note, 0), next;
147
148 if ((next = next_nonnote_insn (lab)) != NULL
149 && GET_CODE (next) == JUMP_INSN
150 && (GET_CODE (PATTERN (next)) == ADDR_VEC
151 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
152 ;
153 else if (GET_CODE (lab) == NOTE)
154 ;
155 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
156 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
157 ;
158 else
159 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
160 }
161 }
162
163 return lvl;
164 }
165 \f
166 /* Create an edge between two basic blocks. FLAGS are auxiliary information
167 about the edge that is accumulated between calls. */
168
169 /* Create an edge from a basic block to a label. */
170
171 static void
172 make_label_edge (edge_cache, src, label, flags)
173 sbitmap *edge_cache;
174 basic_block src;
175 rtx label;
176 int flags;
177 {
178 if (GET_CODE (label) != CODE_LABEL)
179 abort ();
180
181 /* If the label was never emitted, this insn is junk, but avoid a
182 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
183 as a result of a syntax error and a diagnostic has already been
184 printed. */
185
186 if (INSN_UID (label) == 0)
187 return;
188
189 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
190 }
191
192 /* Create the edges generated by INSN in REGION. */
193
194 static void
195 make_eh_edge (edge_cache, src, insn)
196 sbitmap *edge_cache;
197 basic_block src;
198 rtx insn;
199 {
200 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
201 rtx handlers, i;
202
203 handlers = reachable_handlers (insn);
204
205 for (i = handlers; i; i = XEXP (i, 1))
206 make_label_edge (edge_cache, src, XEXP (i, 0),
207 EDGE_ABNORMAL | EDGE_EH | is_call);
208
209 free_INSN_LIST_list (&handlers);
210 }
211 /* Identify the edges between basic blocks MIN to MAX.
212
213 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
214 that are otherwise unreachable may be reachable with a non-local goto.
215
216 BB_EH_END is an array indexed by basic block number in which we record
217 the list of exception regions active at the end of the basic block. */
218
219 static void
220 make_edges (label_value_list, min, max, update_p)
221 rtx label_value_list;
222 int min, max, update_p;
223 {
224 int i;
225 sbitmap *edge_cache = NULL;
226
227 /* Assume no computed jump; revise as we create edges. */
228 current_function_has_computed_jump = 0;
229
230 /* Heavy use of computed goto in machine-generated code can lead to
231 nearly fully-connected CFGs. In that case we spend a significant
232 amount of time searching the edge lists for duplicates. */
233 if (forced_labels || label_value_list)
234 {
235 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
236 sbitmap_vector_zero (edge_cache, n_basic_blocks);
237
238 if (update_p)
239 for (i = min; i <= max; ++i)
240 {
241 edge e;
242 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
243 if (e->dest != EXIT_BLOCK_PTR)
244 SET_BIT (edge_cache[i], e->dest->index);
245 }
246 }
247
248 /* By nature of the way these get numbered, block 0 is always the entry. */
249 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
250
251 for (i = min; i <= max; ++i)
252 {
253 basic_block bb = BASIC_BLOCK (i);
254 rtx insn, x;
255 enum rtx_code code;
256 int force_fallthru = 0;
257
258 if (GET_CODE (bb->head) == CODE_LABEL
259 && LABEL_ALTERNATE_NAME (bb->head))
260 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
261
262 /* Examine the last instruction of the block, and discover the
263 ways we can leave the block. */
264
265 insn = bb->end;
266 code = GET_CODE (insn);
267
268 /* A branch. */
269 if (code == JUMP_INSN)
270 {
271 rtx tmp;
272
273 /* Recognize exception handling placeholders. */
274 if (GET_CODE (PATTERN (insn)) == RESX)
275 make_eh_edge (edge_cache, bb, insn);
276
277 /* Recognize a non-local goto as a branch outside the
278 current function. */
279 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
280 ;
281
282 /* ??? Recognize a tablejump and do the right thing. */
283 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
284 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
285 && GET_CODE (tmp) == JUMP_INSN
286 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
287 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
288 {
289 rtvec vec;
290 int j;
291
292 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
293 vec = XVEC (PATTERN (tmp), 0);
294 else
295 vec = XVEC (PATTERN (tmp), 1);
296
297 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
298 make_label_edge (edge_cache, bb,
299 XEXP (RTVEC_ELT (vec, j), 0), 0);
300
301 /* Some targets (eg, ARM) emit a conditional jump that also
302 contains the out-of-range target. Scan for these and
303 add an edge if necessary. */
304 if ((tmp = single_set (insn)) != NULL
305 && SET_DEST (tmp) == pc_rtx
306 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
307 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
308 make_label_edge (edge_cache, bb,
309 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
310
311 #ifdef CASE_DROPS_THROUGH
312 /* Silly VAXen. The ADDR_VEC is going to be in the way of
313 us naturally detecting fallthru into the next block. */
314 force_fallthru = 1;
315 #endif
316 }
317
318 /* If this is a computed jump, then mark it as reaching
319 everything on the label_value_list and forced_labels list. */
320 else if (computed_jump_p (insn))
321 {
322 current_function_has_computed_jump = 1;
323
324 for (x = label_value_list; x; x = XEXP (x, 1))
325 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
326
327 for (x = forced_labels; x; x = XEXP (x, 1))
328 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
329 }
330
331 /* Returns create an exit out. */
332 else if (returnjump_p (insn))
333 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
334
335 /* Otherwise, we have a plain conditional or unconditional jump. */
336 else
337 {
338 if (! JUMP_LABEL (insn))
339 abort ();
340 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
341 }
342 }
343
344 /* If this is a sibling call insn, then this is in effect a
345 combined call and return, and so we need an edge to the
346 exit block. No need to worry about EH edges, since we
347 wouldn't have created the sibling call in the first place. */
348
349 if (code == CALL_INSN && SIBLING_CALL_P (insn))
350 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
351 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
352
353 /* If this is a CALL_INSN, then mark it as reaching the active EH
354 handler for this CALL_INSN. If we're handling non-call
355 exceptions then any insn can reach any of the active handlers.
356
357 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
358
359 else if (code == CALL_INSN || flag_non_call_exceptions)
360 {
361 /* Add any appropriate EH edges. */
362 make_eh_edge (edge_cache, bb, insn);
363
364 if (code == CALL_INSN && nonlocal_goto_handler_labels)
365 {
366 /* ??? This could be made smarter: in some cases it's possible
367 to tell that certain calls will not do a nonlocal goto.
368
369 For example, if the nested functions that do the nonlocal
370 gotos do not have their addresses taken, then only calls to
371 those functions or to other nested functions that use them
372 could possibly do nonlocal gotos. */
373 /* We do know that a REG_EH_REGION note with a value less
374 than 0 is guaranteed not to perform a non-local goto. */
375 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
376 if (!note || INTVAL (XEXP (note, 0)) >= 0)
377 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
378 make_label_edge (edge_cache, bb, XEXP (x, 0),
379 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
380 }
381 }
382
383 /* Find out if we can drop through to the next block. */
384 insn = next_nonnote_insn (insn);
385 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
386 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
387 else if (i + 1 < n_basic_blocks)
388 {
389 rtx tmp = BLOCK_HEAD (i + 1);
390 if (GET_CODE (tmp) == NOTE)
391 tmp = next_nonnote_insn (tmp);
392 if (force_fallthru || insn == tmp)
393 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
394 }
395 }
396
397 if (edge_cache)
398 sbitmap_vector_free (edge_cache);
399 }
400 \f
401 /* Find all basic blocks of the function whose first insn is F.
402
403 Collect and return a list of labels whose addresses are taken. This
404 will be used in make_edges for use with computed gotos. */
405
406 static void
407 find_basic_blocks_1 (f)
408 rtx f;
409 {
410 register rtx insn, next;
411 int i = 0;
412 rtx bb_note = NULL_RTX;
413 rtx lvl = NULL_RTX;
414 rtx trll = NULL_RTX;
415 rtx head = NULL_RTX;
416 rtx end = NULL_RTX;
417
418 /* We process the instructions in a slightly different way than we did
419 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
420 closed out the previous block, so that it gets attached at the proper
421 place. Since this form should be equivalent to the previous,
422 count_basic_blocks continues to use the old form as a check. */
423
424 for (insn = f; insn; insn = next)
425 {
426 enum rtx_code code = GET_CODE (insn);
427
428 next = NEXT_INSN (insn);
429
430 switch (code)
431 {
432 case NOTE:
433 {
434 int kind = NOTE_LINE_NUMBER (insn);
435
436 /* Look for basic block notes with which to keep the
437 basic_block_info pointers stable. Unthread the note now;
438 we'll put it back at the right place in create_basic_block.
439 Or not at all if we've already found a note in this block. */
440 if (kind == NOTE_INSN_BASIC_BLOCK)
441 {
442 if (bb_note == NULL_RTX)
443 bb_note = insn;
444 else
445 next = flow_delete_insn (insn);
446 }
447 break;
448 }
449
450 case CODE_LABEL:
451 /* A basic block starts at a label. If we've closed one off due
452 to a barrier or some such, no need to do it again. */
453 if (head != NULL_RTX)
454 {
455 create_basic_block (i++, head, end, bb_note);
456 bb_note = NULL_RTX;
457 }
458
459 head = end = insn;
460 break;
461
462 case JUMP_INSN:
463 /* A basic block ends at a jump. */
464 if (head == NULL_RTX)
465 head = insn;
466 else
467 {
468 /* ??? Make a special check for table jumps. The way this
469 happens is truly and amazingly gross. We are about to
470 create a basic block that contains just a code label and
471 an addr*vec jump insn. Worse, an addr_diff_vec creates
472 its own natural loop.
473
474 Prevent this bit of brain damage, pasting things together
475 correctly in make_edges.
476
477 The correct solution involves emitting the table directly
478 on the tablejump instruction as a note, or JUMP_LABEL. */
479
480 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
481 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
482 {
483 head = end = NULL;
484 n_basic_blocks--;
485 break;
486 }
487 }
488 end = insn;
489 goto new_bb_inclusive;
490
491 case BARRIER:
492 /* A basic block ends at a barrier. It may be that an unconditional
493 jump already closed the basic block -- no need to do it again. */
494 if (head == NULL_RTX)
495 break;
496 goto new_bb_exclusive;
497
498 case CALL_INSN:
499 {
500 /* Record whether this call created an edge. */
501 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
502 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
503
504 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
505 {
506 /* Scan each of the alternatives for label refs. */
507 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
508 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
509 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
510 /* Record its tail recursion label, if any. */
511 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
512 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
513 }
514
515 /* A basic block ends at a call that can either throw or
516 do a non-local goto. */
517 if ((nonlocal_goto_handler_labels && region >= 0)
518 || can_throw_internal (insn))
519 {
520 new_bb_inclusive:
521 if (head == NULL_RTX)
522 head = insn;
523 end = insn;
524
525 new_bb_exclusive:
526 create_basic_block (i++, head, end, bb_note);
527 head = end = NULL_RTX;
528 bb_note = NULL_RTX;
529 break;
530 }
531 }
532 /* Fall through. */
533
534 case INSN:
535 /* Non-call exceptions generate new blocks just like calls. */
536 if (flag_non_call_exceptions && can_throw_internal (insn))
537 goto new_bb_inclusive;
538
539 if (head == NULL_RTX)
540 head = insn;
541 end = insn;
542 break;
543
544 default:
545 abort ();
546 }
547
548 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
549 {
550 rtx note;
551
552 /* Make a list of all labels referred to other than by jumps.
553
554 Make a special exception for labels followed by an ADDR*VEC,
555 as this would be a part of the tablejump setup code.
556
557 Make a special exception to registers loaded with label
558 values just before jump insns that use them. */
559
560 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
561 if (REG_NOTE_KIND (note) == REG_LABEL)
562 {
563 rtx lab = XEXP (note, 0), next;
564
565 if ((next = next_nonnote_insn (lab)) != NULL
566 && GET_CODE (next) == JUMP_INSN
567 && (GET_CODE (PATTERN (next)) == ADDR_VEC
568 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
569 ;
570 else if (GET_CODE (lab) == NOTE)
571 ;
572 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
573 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
574 ;
575 else
576 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
577 }
578 }
579 }
580
581 if (head != NULL_RTX)
582 create_basic_block (i++, head, end, bb_note);
583 else if (bb_note)
584 flow_delete_insn (bb_note);
585
586 if (i != n_basic_blocks)
587 abort ();
588
589 label_value_list = lvl;
590 tail_recursion_label_list = trll;
591 }
592
593
594 /* Find basic blocks of the current function.
595 F is the first insn of the function and NREGS the number of register
596 numbers in use. */
597
598 void
599 find_basic_blocks (f, nregs, file)
600 rtx f;
601 int nregs ATTRIBUTE_UNUSED;
602 FILE *file ATTRIBUTE_UNUSED;
603 {
604 int max_uid;
605 timevar_push (TV_CFG);
606
607 /* Flush out existing data. */
608 if (basic_block_info != NULL)
609 {
610 int i;
611
612 clear_edges ();
613
614 /* Clear bb->aux on all extant basic blocks. We'll use this as a
615 tag for reuse during create_basic_block, just in case some pass
616 copies around basic block notes improperly. */
617 for (i = 0; i < n_basic_blocks; ++i)
618 BASIC_BLOCK (i)->aux = NULL;
619
620 VARRAY_FREE (basic_block_info);
621 }
622
623 n_basic_blocks = count_basic_blocks (f);
624
625 /* Size the basic block table. The actual structures will be allocated
626 by find_basic_blocks_1, since we want to keep the structure pointers
627 stable across calls to find_basic_blocks. */
628 /* ??? This whole issue would be much simpler if we called find_basic_blocks
629 exactly once, and thereafter we don't have a single long chain of
630 instructions at all until close to the end of compilation when we
631 actually lay them out. */
632
633 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
634
635 find_basic_blocks_1 (f);
636
637 /* Record the block to which an insn belongs. */
638 /* ??? This should be done another way, by which (perhaps) a label is
639 tagged directly with the basic block that it starts. It is used for
640 more than that currently, but IMO that is the only valid use. */
641
642 max_uid = get_max_uid ();
643 #ifdef AUTO_INC_DEC
644 /* Leave space for insns life_analysis makes in some cases for auto-inc.
645 These cases are rare, so we don't need too much space. */
646 max_uid += max_uid / 10;
647 #endif
648
649 compute_bb_for_insn (max_uid);
650
651 /* Discover the edges of our cfg. */
652 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
653
654 /* Do very simple cleanup now, for the benefit of code that runs between
655 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
656 tidy_fallthru_edges ();
657
658 mark_critical_edges ();
659
660 #ifdef ENABLE_CHECKING
661 verify_flow_info ();
662 #endif
663 timevar_pop (TV_CFG);
664 }
665 \f
666 /* Assume that someone emitted code with control flow instructions to the
667 basic block. Update the data structure. */
668 void
669 find_sub_basic_blocks (bb)
670 basic_block bb;
671 {
672 rtx insn = bb->head;
673 rtx end = bb->end;
674 rtx jump_insn = NULL_RTX;
675 edge falltru = 0;
676 basic_block first_bb = bb;
677 int i;
678
679 if (insn == bb->end)
680 return;
681
682 if (GET_CODE (insn) == CODE_LABEL)
683 insn = NEXT_INSN (insn);
684
685 /* Scan insn chain and try to find new basic block boundaries. */
686 while (1)
687 {
688 enum rtx_code code = GET_CODE (insn);
689 switch (code)
690 {
691 case BARRIER:
692 if (!jump_insn)
693 abort ();
694 break;
695 /* On code label, split current basic block. */
696 case CODE_LABEL:
697 falltru = split_block (bb, PREV_INSN (insn));
698 if (jump_insn)
699 bb->end = jump_insn;
700 bb = falltru->dest;
701 remove_edge (falltru);
702 jump_insn = 0;
703 if (LABEL_ALTERNATE_NAME (insn))
704 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
705 break;
706 case INSN:
707 case JUMP_INSN:
708 /* In case we've previously split insn on the JUMP_INSN, move the
709 block header to proper place. */
710 if (jump_insn)
711 {
712 falltru = split_block (bb, PREV_INSN (insn));
713 bb->end = jump_insn;
714 bb = falltru->dest;
715 remove_edge (falltru);
716 jump_insn = 0;
717 }
718 /* We need some special care for those expressions. */
719 if (GET_CODE (insn) == JUMP_INSN)
720 {
721 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
722 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
723 abort();
724 jump_insn = insn;
725 }
726 break;
727 default:
728 break;
729 }
730 if (insn == end)
731 break;
732 insn = NEXT_INSN (insn);
733 }
734
735 /* In case expander replaced normal insn by sequence terminating by
736 return and barrier, or possibly other sequence not behaving like
737 ordinary jump, we need to take care and move basic block boundary. */
738 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
739 bb->end = jump_insn;
740
741 /* We've possibly replaced the conditional jump by conditional jump
742 followed by cleanup at fallthru edge, so the outgoing edges may
743 be dead. */
744 purge_dead_edges (bb);
745
746 /* Now re-scan and wire in all edges. This expect simple (conditional)
747 jumps at the end of each new basic blocks. */
748 make_edges (NULL, first_bb->index, bb->index, 1);
749
750 /* Update branch probabilities. Expect only (un)conditional jumps
751 to be created with only the forward edges. */
752 for (i = first_bb->index; i <= bb->index; i++)
753 {
754 edge e,f;
755 basic_block b = BASIC_BLOCK (i);
756 if (b != first_bb)
757 {
758 b->count = 0;
759 b->frequency = 0;
760 for (e = b->pred; e; e=e->pred_next)
761 {
762 b->count += e->count;
763 b->frequency += EDGE_FREQUENCY (e);
764 }
765 }
766 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
767 {
768 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
769 int probability;
770
771 if (!note)
772 continue;
773 probability = INTVAL (XEXP (find_reg_note (b->end,
774 REG_BR_PROB,
775 NULL), 0));
776 e = BRANCH_EDGE (b);
777 e->probability = probability;
778 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
779 / REG_BR_PROB_BASE);
780 f = FALLTHRU_EDGE (b);
781 f->probability = REG_BR_PROB_BASE - probability;
782 f->count = b->count - e->count;
783 }
784 if (b->succ && !b->succ->succ_next)
785 {
786 e = b->succ;
787 e->probability = REG_BR_PROB_BASE;
788 e->count = b->count;
789 }
790 }
791 }