Index: ChangeLog
[gcc.git] / gcc / cfgbuild.c
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
34 \f
35 #include "config.h"
36 #include "system.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
48
49 static int count_basic_blocks PARAMS ((rtx));
50 static void find_basic_blocks_1 PARAMS ((rtx));
51 static rtx find_label_refs PARAMS ((rtx, rtx));
52 static void make_edges PARAMS ((rtx, basic_block,
53 basic_block, int));
54 static void make_label_edge PARAMS ((sbitmap *, basic_block,
55 rtx, int));
56 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
57 static void find_bb_boundaries PARAMS ((basic_block));
58 static void compute_outgoing_frequencies PARAMS ((basic_block));
59 static bool inside_basic_block_p PARAMS ((rtx));
60 static bool control_flow_insn_p PARAMS ((rtx));
61 \f
62 /* Return true if insn is something that should be contained inside basic
63 block. */
64
65 static bool
66 inside_basic_block_p (insn)
67 rtx insn;
68 {
69 switch (GET_CODE (insn))
70 {
71 case CODE_LABEL:
72 /* Avoid creating of basic block for jumptables. */
73 return (NEXT_INSN (insn) == 0
74 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
75 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
76 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
77
78 case JUMP_INSN:
79 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
80 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
81
82 case CALL_INSN:
83 case INSN:
84 return true;
85
86 case BARRIER:
87 case NOTE:
88 return false;
89
90 default:
91 abort ();
92 }
93 }
94
95 /* Return true if INSN may cause control flow transfer, so it should be last in
96 the basic block. */
97
98 static bool
99 control_flow_insn_p (insn)
100 rtx insn;
101 {
102 rtx note;
103
104 switch (GET_CODE (insn))
105 {
106 case NOTE:
107 case CODE_LABEL:
108 return false;
109
110 case JUMP_INSN:
111 /* Jump insn always causes control transfer except for tablejumps. */
112 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
113 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
114
115 case CALL_INSN:
116 /* Call insn may return to the nonlocal goto handler. */
117 return ((nonlocal_goto_handler_labels
118 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
119 NULL_RTX))
120 || INTVAL (XEXP (note, 0)) >= 0))
121 /* Or may trap. */
122 || can_throw_internal (insn));
123
124 case INSN:
125 return (flag_non_call_exceptions && can_throw_internal (insn));
126
127 case BARRIER:
128 /* It is nonsence to reach barrier when looking for the
129 end of basic block, but before dead code is eliminated
130 this may happen. */
131 return false;
132
133 default:
134 abort ();
135 }
136 }
137
138 /* Count the basic blocks of the function. */
139
140 static int
141 count_basic_blocks (f)
142 rtx f;
143 {
144 int count = 0;
145 bool saw_insn = false;
146 rtx insn;
147
148 for (insn = f; insn; insn = NEXT_INSN (insn))
149 {
150 /* Code labels and barriers causes curent basic block to be
151 terminated at previous real insn. */
152 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
153 && saw_insn)
154 count++, saw_insn = false;
155
156 /* Start basic block if needed. */
157 if (!saw_insn && inside_basic_block_p (insn))
158 saw_insn = true;
159
160 /* Control flow insn causes current basic block to be terminated. */
161 if (saw_insn && control_flow_insn_p (insn))
162 count++, saw_insn = false;
163 }
164
165 if (saw_insn)
166 count++;
167
168 /* The rest of the compiler works a bit smoother when we don't have to
169 check for the edge case of do-nothing functions with no basic blocks. */
170 if (count == 0)
171 {
172 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
173 count = 1;
174 }
175
176 return count;
177 }
178
179 /* Scan a list of insns for labels referred to other than by jumps.
180 This is used to scan the alternatives of a call placeholder. */
181
182 static rtx
183 find_label_refs (f, lvl)
184 rtx f;
185 rtx lvl;
186 {
187 rtx insn;
188
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
191 {
192 rtx note;
193
194 /* Make a list of all labels referred to other than by jumps
195 (which just don't have the REG_LABEL notes).
196
197 Make a special exception for labels followed by an ADDR*VEC,
198 as this would be a part of the tablejump setup code.
199
200 Make a special exception to registers loaded with label
201 values just before jump insns that use them. */
202
203 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
204 if (REG_NOTE_KIND (note) == REG_LABEL)
205 {
206 rtx lab = XEXP (note, 0), next;
207
208 if ((next = next_nonnote_insn (lab)) != NULL
209 && GET_CODE (next) == JUMP_INSN
210 && (GET_CODE (PATTERN (next)) == ADDR_VEC
211 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
212 ;
213 else if (GET_CODE (lab) == NOTE)
214 ;
215 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
216 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
217 ;
218 else
219 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
220 }
221 }
222
223 return lvl;
224 }
225 \f
226 /* Create an edge between two basic blocks. FLAGS are auxiliary information
227 about the edge that is accumulated between calls. */
228
229 /* Create an edge from a basic block to a label. */
230
231 static void
232 make_label_edge (edge_cache, src, label, flags)
233 sbitmap *edge_cache;
234 basic_block src;
235 rtx label;
236 int flags;
237 {
238 if (GET_CODE (label) != CODE_LABEL)
239 abort ();
240
241 /* If the label was never emitted, this insn is junk, but avoid a
242 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
243 as a result of a syntax error and a diagnostic has already been
244 printed. */
245
246 if (INSN_UID (label) == 0)
247 return;
248
249 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
250 }
251
252 /* Create the edges generated by INSN in REGION. */
253
254 static void
255 make_eh_edge (edge_cache, src, insn)
256 sbitmap *edge_cache;
257 basic_block src;
258 rtx insn;
259 {
260 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
261 rtx handlers, i;
262
263 handlers = reachable_handlers (insn);
264
265 for (i = handlers; i; i = XEXP (i, 1))
266 make_label_edge (edge_cache, src, XEXP (i, 0),
267 EDGE_ABNORMAL | EDGE_EH | is_call);
268
269 free_INSN_LIST_list (&handlers);
270 }
271
272 /* Identify the edges between basic blocks MIN to MAX.
273
274 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
275 that are otherwise unreachable may be reachable with a non-local goto.
276
277 BB_EH_END is an array indexed by basic block number in which we record
278 the list of exception regions active at the end of the basic block. */
279
280 static void
281 make_edges (label_value_list, min, max, update_p)
282 rtx label_value_list;
283 basic_block min, max;
284 int update_p;
285 {
286 basic_block bb;
287 sbitmap *edge_cache = NULL;
288
289 /* Assume no computed jump; revise as we create edges. */
290 current_function_has_computed_jump = 0;
291
292 /* Heavy use of computed goto in machine-generated code can lead to
293 nearly fully-connected CFGs. In that case we spend a significant
294 amount of time searching the edge lists for duplicates. */
295 if (forced_labels || label_value_list)
296 {
297 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
298 sbitmap_vector_zero (edge_cache, last_basic_block);
299
300 if (update_p)
301 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
302 {
303 edge e;
304
305 for (e = bb->succ; e ; e = e->succ_next)
306 if (e->dest != EXIT_BLOCK_PTR)
307 SET_BIT (edge_cache[bb->index], e->dest->index);
308 }
309 }
310
311 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
312 is always the entry. */
313 if (min == ENTRY_BLOCK_PTR->next_bb)
314 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
315 EDGE_FALLTHRU);
316
317 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
318 {
319 rtx insn, x;
320 enum rtx_code code;
321 int force_fallthru = 0;
322
323 if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALT_ENTRY_P (bb->head))
324 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
325
326 /* Examine the last instruction of the block, and discover the
327 ways we can leave the block. */
328
329 insn = bb->end;
330 code = GET_CODE (insn);
331
332 /* A branch. */
333 if (code == JUMP_INSN)
334 {
335 rtx tmp;
336
337 /* Recognize exception handling placeholders. */
338 if (GET_CODE (PATTERN (insn)) == RESX)
339 make_eh_edge (edge_cache, bb, insn);
340
341 /* Recognize a non-local goto as a branch outside the
342 current function. */
343 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
344 ;
345
346 /* ??? Recognize a tablejump and do the right thing. */
347 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
348 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
349 && GET_CODE (tmp) == JUMP_INSN
350 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
351 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
352 {
353 rtvec vec;
354 int j;
355
356 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
357 vec = XVEC (PATTERN (tmp), 0);
358 else
359 vec = XVEC (PATTERN (tmp), 1);
360
361 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
362 make_label_edge (edge_cache, bb,
363 XEXP (RTVEC_ELT (vec, j), 0), 0);
364
365 /* Some targets (eg, ARM) emit a conditional jump that also
366 contains the out-of-range target. Scan for these and
367 add an edge if necessary. */
368 if ((tmp = single_set (insn)) != NULL
369 && SET_DEST (tmp) == pc_rtx
370 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
371 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
372 make_label_edge (edge_cache, bb,
373 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
374
375 #ifdef CASE_DROPS_THROUGH
376 /* Silly VAXen. The ADDR_VEC is going to be in the way of
377 us naturally detecting fallthru into the next block. */
378 force_fallthru = 1;
379 #endif
380 }
381
382 /* If this is a computed jump, then mark it as reaching
383 everything on the label_value_list and forced_labels list. */
384 else if (computed_jump_p (insn))
385 {
386 current_function_has_computed_jump = 1;
387
388 for (x = label_value_list; x; x = XEXP (x, 1))
389 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
390
391 for (x = forced_labels; x; x = XEXP (x, 1))
392 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
393 }
394
395 /* Returns create an exit out. */
396 else if (returnjump_p (insn))
397 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
398
399 /* Otherwise, we have a plain conditional or unconditional jump. */
400 else
401 {
402 if (! JUMP_LABEL (insn))
403 abort ();
404 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
405 }
406 }
407
408 /* If this is a sibling call insn, then this is in effect a combined call
409 and return, and so we need an edge to the exit block. No need to
410 worry about EH edges, since we wouldn't have created the sibling call
411 in the first place. */
412 if (code == CALL_INSN && SIBLING_CALL_P (insn))
413 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
414 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
415
416 /* If this is a CALL_INSN, then mark it as reaching the active EH
417 handler for this CALL_INSN. If we're handling non-call
418 exceptions then any insn can reach any of the active handlers.
419 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
420 else if (code == CALL_INSN || flag_non_call_exceptions)
421 {
422 /* Add any appropriate EH edges. */
423 make_eh_edge (edge_cache, bb, insn);
424
425 if (code == CALL_INSN && nonlocal_goto_handler_labels)
426 {
427 /* ??? This could be made smarter: in some cases it's possible
428 to tell that certain calls will not do a nonlocal goto.
429 For example, if the nested functions that do the nonlocal
430 gotos do not have their addresses taken, then only calls to
431 those functions or to other nested functions that use them
432 could possibly do nonlocal gotos. */
433
434 /* We do know that a REG_EH_REGION note with a value less
435 than 0 is guaranteed not to perform a non-local goto. */
436 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
437
438 if (!note || INTVAL (XEXP (note, 0)) >= 0)
439 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
440 make_label_edge (edge_cache, bb, XEXP (x, 0),
441 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
442 }
443 }
444
445 /* Find out if we can drop through to the next block. */
446 insn = next_nonnote_insn (insn);
447 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
448 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
449 else if (bb->next_bb != EXIT_BLOCK_PTR)
450 {
451 rtx tmp = bb->next_bb->head;
452 if (GET_CODE (tmp) == NOTE)
453 tmp = next_nonnote_insn (tmp);
454 if (force_fallthru || insn == tmp)
455 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
456 }
457 }
458
459 if (edge_cache)
460 sbitmap_vector_free (edge_cache);
461 }
462 \f
463 /* Find all basic blocks of the function whose first insn is F.
464
465 Collect and return a list of labels whose addresses are taken. This
466 will be used in make_edges for use with computed gotos. */
467
468 static void
469 find_basic_blocks_1 (f)
470 rtx f;
471 {
472 rtx insn, next;
473 rtx bb_note = NULL_RTX;
474 rtx lvl = NULL_RTX;
475 rtx trll = NULL_RTX;
476 rtx head = NULL_RTX;
477 rtx end = NULL_RTX;
478 basic_block prev = ENTRY_BLOCK_PTR;
479
480 /* We process the instructions in a slightly different way than we did
481 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
482 closed out the previous block, so that it gets attached at the proper
483 place. Since this form should be equivalent to the previous,
484 count_basic_blocks continues to use the old form as a check. */
485
486 for (insn = f; insn; insn = next)
487 {
488 enum rtx_code code = GET_CODE (insn);
489
490 next = NEXT_INSN (insn);
491
492 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
493 && head)
494 {
495 prev = create_basic_block_structure (head, end, bb_note, prev);
496 head = end = NULL_RTX;
497 bb_note = NULL_RTX;
498 }
499
500 if (inside_basic_block_p (insn))
501 {
502 if (head == NULL_RTX)
503 head = insn;
504 end = insn;
505 }
506
507 if (head && control_flow_insn_p (insn))
508 {
509 prev = create_basic_block_structure (head, end, bb_note, prev);
510 head = end = NULL_RTX;
511 bb_note = NULL_RTX;
512 }
513
514 switch (code)
515 {
516 case NOTE:
517 {
518 int kind = NOTE_LINE_NUMBER (insn);
519
520 /* Look for basic block notes with which to keep the
521 basic_block_info pointers stable. Unthread the note now;
522 we'll put it back at the right place in create_basic_block.
523 Or not at all if we've already found a note in this block. */
524 if (kind == NOTE_INSN_BASIC_BLOCK)
525 {
526 if (bb_note == NULL_RTX)
527 bb_note = insn;
528 else
529 next = delete_insn (insn);
530 }
531 break;
532 }
533
534 case CODE_LABEL:
535 case JUMP_INSN:
536 case INSN:
537 case BARRIER:
538 break;
539
540 case CALL_INSN:
541 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
542 {
543 /* Scan each of the alternatives for label refs. */
544 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
545 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
546 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
547 /* Record its tail recursion label, if any. */
548 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
549 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
550 }
551 break;
552
553 default:
554 abort ();
555 }
556
557 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
558 {
559 rtx note;
560
561 /* Make a list of all labels referred to other than by jumps.
562
563 Make a special exception for labels followed by an ADDR*VEC,
564 as this would be a part of the tablejump setup code.
565
566 Make a special exception to registers loaded with label
567 values just before jump insns that use them. */
568
569 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
570 if (REG_NOTE_KIND (note) == REG_LABEL)
571 {
572 rtx lab = XEXP (note, 0), next;
573
574 if ((next = next_nonnote_insn (lab)) != NULL
575 && GET_CODE (next) == JUMP_INSN
576 && (GET_CODE (PATTERN (next)) == ADDR_VEC
577 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
578 ;
579 else if (GET_CODE (lab) == NOTE)
580 ;
581 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
582 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
583 ;
584 else
585 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
586 }
587 }
588 }
589
590 if (head != NULL_RTX)
591 create_basic_block_structure (head, end, bb_note, prev);
592 else if (bb_note)
593 delete_insn (bb_note);
594
595 if (last_basic_block != n_basic_blocks)
596 abort ();
597
598 label_value_list = lvl;
599 tail_recursion_label_list = trll;
600 clear_aux_for_blocks ();
601 }
602
603
604 /* Find basic blocks of the current function.
605 F is the first insn of the function and NREGS the number of register
606 numbers in use. */
607
608 void
609 find_basic_blocks (f, nregs, file)
610 rtx f;
611 int nregs ATTRIBUTE_UNUSED;
612 FILE *file ATTRIBUTE_UNUSED;
613 {
614 basic_block bb;
615
616 timevar_push (TV_CFG);
617
618 /* Flush out existing data. */
619 if (basic_block_info != NULL)
620 {
621 clear_edges ();
622
623 /* Clear bb->aux on all extant basic blocks. We'll use this as a
624 tag for reuse during create_basic_block, just in case some pass
625 copies around basic block notes improperly. */
626 FOR_EACH_BB (bb)
627 bb->aux = NULL;
628
629 VARRAY_FREE (basic_block_info);
630 }
631
632 n_basic_blocks = count_basic_blocks (f);
633 last_basic_block = 0;
634 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
635 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
636
637 /* Size the basic block table. The actual structures will be allocated
638 by find_basic_blocks_1, since we want to keep the structure pointers
639 stable across calls to find_basic_blocks. */
640 /* ??? This whole issue would be much simpler if we called find_basic_blocks
641 exactly once, and thereafter we don't have a single long chain of
642 instructions at all until close to the end of compilation when we
643 actually lay them out. */
644
645 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
646
647 find_basic_blocks_1 (f);
648
649 /* Discover the edges of our cfg. */
650 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
651
652 /* Do very simple cleanup now, for the benefit of code that runs between
653 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
654 tidy_fallthru_edges ();
655
656 #ifdef ENABLE_CHECKING
657 verify_flow_info ();
658 #endif
659 timevar_pop (TV_CFG);
660 }
661 \f
662 /* State of basic block as seen by find_sub_basic_blocks. */
663 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
664
665 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
666 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
667
668 /* Scan basic block BB for possible BB boundaries inside the block
669 and create new basic blocks in the progress. */
670
671 static void
672 find_bb_boundaries (bb)
673 basic_block bb;
674 {
675 rtx insn = bb->head;
676 rtx end = bb->end;
677 rtx flow_transfer_insn = NULL_RTX;
678 edge fallthru = NULL;
679
680 if (insn == bb->end)
681 return;
682
683 if (GET_CODE (insn) == CODE_LABEL)
684 insn = NEXT_INSN (insn);
685
686 /* Scan insn chain and try to find new basic block boundaries. */
687 while (1)
688 {
689 enum rtx_code code = GET_CODE (insn);
690
691 /* On code label, split current basic block. */
692 if (code == CODE_LABEL)
693 {
694 fallthru = split_block (bb, PREV_INSN (insn));
695 if (flow_transfer_insn)
696 bb->end = flow_transfer_insn;
697
698 bb = fallthru->dest;
699 remove_edge (fallthru);
700 flow_transfer_insn = NULL_RTX;
701 if (LABEL_ALT_ENTRY_P (insn))
702 make_edge (ENTRY_BLOCK_PTR, bb, 0);
703 }
704
705 /* In case we've previously seen an insn that effects a control
706 flow transfer, split the block. */
707 if (flow_transfer_insn && inside_basic_block_p (insn))
708 {
709 fallthru = split_block (bb, PREV_INSN (insn));
710 bb->end = flow_transfer_insn;
711 bb = fallthru->dest;
712 remove_edge (fallthru);
713 flow_transfer_insn = NULL_RTX;
714 }
715
716 if (control_flow_insn_p (insn))
717 flow_transfer_insn = insn;
718 if (insn == end)
719 break;
720 insn = NEXT_INSN (insn);
721 }
722
723 /* In case expander replaced normal insn by sequence terminating by
724 return and barrier, or possibly other sequence not behaving like
725 ordinary jump, we need to take care and move basic block boundary. */
726 if (flow_transfer_insn)
727 bb->end = flow_transfer_insn;
728
729 /* We've possibly replaced the conditional jump by conditional jump
730 followed by cleanup at fallthru edge, so the outgoing edges may
731 be dead. */
732 purge_dead_edges (bb);
733 }
734
735 /* Assume that frequency of basic block B is known. Compute frequencies
736 and probabilities of outgoing edges. */
737
738 static void
739 compute_outgoing_frequencies (b)
740 basic_block b;
741 {
742 edge e, f;
743
744 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
745 {
746 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
747 int probability;
748
749 if (!note)
750 return;
751
752 probability = INTVAL (XEXP (find_reg_note (b->end,
753 REG_BR_PROB, NULL),
754 0));
755 e = BRANCH_EDGE (b);
756 e->probability = probability;
757 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
758 / REG_BR_PROB_BASE);
759 f = FALLTHRU_EDGE (b);
760 f->probability = REG_BR_PROB_BASE - probability;
761 f->count = b->count - e->count;
762 }
763
764 if (b->succ && !b->succ->succ_next)
765 {
766 e = b->succ;
767 e->probability = REG_BR_PROB_BASE;
768 e->count = b->count;
769 }
770 }
771
772 /* Assume that someone emitted code with control flow instructions to the
773 basic block. Update the data structure. */
774
775 void
776 find_many_sub_basic_blocks (blocks)
777 sbitmap blocks;
778 {
779 basic_block bb, min, max;
780
781 FOR_EACH_BB (bb)
782 SET_STATE (bb,
783 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
784
785 FOR_EACH_BB (bb)
786 if (STATE (bb) == BLOCK_TO_SPLIT)
787 find_bb_boundaries (bb);
788
789 FOR_EACH_BB (bb)
790 if (STATE (bb) != BLOCK_ORIGINAL)
791 break;
792
793 min = max = bb;
794 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
795 if (STATE (bb) != BLOCK_ORIGINAL)
796 max = bb;
797
798 /* Now re-scan and wire in all edges. This expect simple (conditional)
799 jumps at the end of each new basic blocks. */
800 make_edges (NULL, min, max, 1);
801
802 /* Update branch probabilities. Expect only (un)conditional jumps
803 to be created with only the forward edges. */
804 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
805 {
806 edge e;
807
808 if (STATE (bb) == BLOCK_ORIGINAL)
809 continue;
810 if (STATE (bb) == BLOCK_NEW)
811 {
812 bb->count = 0;
813 bb->frequency = 0;
814 for (e = bb->pred; e; e=e->pred_next)
815 {
816 bb->count += e->count;
817 bb->frequency += EDGE_FREQUENCY (e);
818 }
819 }
820
821 compute_outgoing_frequencies (bb);
822 }
823
824 FOR_EACH_BB (bb)
825 SET_STATE (bb, 0);
826 }
827
828 /* Like above but for single basic block only. */
829
830 void
831 find_sub_basic_blocks (bb)
832 basic_block bb;
833 {
834 basic_block min, max, b;
835 basic_block next = bb->next_bb;
836
837 min = bb;
838 find_bb_boundaries (bb);
839 max = next->prev_bb;
840
841 /* Now re-scan and wire in all edges. This expect simple (conditional)
842 jumps at the end of each new basic blocks. */
843 make_edges (NULL, min, max, 1);
844
845 /* Update branch probabilities. Expect only (un)conditional jumps
846 to be created with only the forward edges. */
847 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
848 {
849 edge e;
850
851 if (b != min)
852 {
853 b->count = 0;
854 b->frequency = 0;
855 for (e = b->pred; e; e=e->pred_next)
856 {
857 b->count += e->count;
858 b->frequency += EDGE_FREQUENCY (e);
859 }
860 }
861
862 compute_outgoing_frequencies (b);
863 }
864 }