Merge basic-improvements-branch to trunk
[gcc.git] / gcc / cfgbuild.c
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
34 \f
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "rtl.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
43 #include "regs.h"
44 #include "flags.h"
45 #include "output.h"
46 #include "function.h"
47 #include "except.h"
48 #include "toplev.h"
49 #include "timevar.h"
50
51 static int count_basic_blocks PARAMS ((rtx));
52 static void find_basic_blocks_1 PARAMS ((rtx));
53 static rtx find_label_refs PARAMS ((rtx, rtx));
54 static void make_edges PARAMS ((rtx, basic_block,
55 basic_block, int));
56 static void make_label_edge PARAMS ((sbitmap *, basic_block,
57 rtx, int));
58 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
59 static void find_bb_boundaries PARAMS ((basic_block));
60 static void compute_outgoing_frequencies PARAMS ((basic_block));
61 static bool inside_basic_block_p PARAMS ((rtx));
62 static bool control_flow_insn_p PARAMS ((rtx));
63 \f
64 /* Return true if insn is something that should be contained inside basic
65 block. */
66
67 static bool
68 inside_basic_block_p (insn)
69 rtx insn;
70 {
71 switch (GET_CODE (insn))
72 {
73 case CODE_LABEL:
74 /* Avoid creating of basic block for jumptables. */
75 return (NEXT_INSN (insn) == 0
76 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
77 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
78 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
79
80 case JUMP_INSN:
81 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
82 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
83
84 case CALL_INSN:
85 case INSN:
86 return true;
87
88 case BARRIER:
89 case NOTE:
90 return false;
91
92 default:
93 abort ();
94 }
95 }
96
97 /* Return true if INSN may cause control flow transfer, so it should be last in
98 the basic block. */
99
100 static bool
101 control_flow_insn_p (insn)
102 rtx insn;
103 {
104 rtx note;
105
106 switch (GET_CODE (insn))
107 {
108 case NOTE:
109 case CODE_LABEL:
110 return false;
111
112 case JUMP_INSN:
113 /* Jump insn always causes control transfer except for tablejumps. */
114 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
115 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
116
117 case CALL_INSN:
118 /* Call insn may return to the nonlocal goto handler. */
119 return ((nonlocal_goto_handler_labels
120 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
121 NULL_RTX))
122 || INTVAL (XEXP (note, 0)) >= 0))
123 /* Or may trap. */
124 || can_throw_internal (insn));
125
126 case INSN:
127 return (flag_non_call_exceptions && can_throw_internal (insn));
128
129 case BARRIER:
130 /* It is nonsence to reach barrier when looking for the
131 end of basic block, but before dead code is eliminated
132 this may happen. */
133 return false;
134
135 default:
136 abort ();
137 }
138 }
139
140 /* Count the basic blocks of the function. */
141
142 static int
143 count_basic_blocks (f)
144 rtx f;
145 {
146 int count = 0;
147 bool saw_insn = false;
148 rtx insn;
149
150 for (insn = f; insn; insn = NEXT_INSN (insn))
151 {
152 /* Code labels and barriers causes curent basic block to be
153 terminated at previous real insn. */
154 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
155 && saw_insn)
156 count++, saw_insn = false;
157
158 /* Start basic block if needed. */
159 if (!saw_insn && inside_basic_block_p (insn))
160 saw_insn = true;
161
162 /* Control flow insn causes current basic block to be terminated. */
163 if (saw_insn && control_flow_insn_p (insn))
164 count++, saw_insn = false;
165 }
166
167 if (saw_insn)
168 count++;
169
170 /* The rest of the compiler works a bit smoother when we don't have to
171 check for the edge case of do-nothing functions with no basic blocks. */
172 if (count == 0)
173 {
174 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
175 count = 1;
176 }
177
178 return count;
179 }
180
181 /* Scan a list of insns for labels referred to other than by jumps.
182 This is used to scan the alternatives of a call placeholder. */
183
184 static rtx
185 find_label_refs (f, lvl)
186 rtx f;
187 rtx lvl;
188 {
189 rtx insn;
190
191 for (insn = f; insn; insn = NEXT_INSN (insn))
192 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
193 {
194 rtx note;
195
196 /* Make a list of all labels referred to other than by jumps
197 (which just don't have the REG_LABEL notes).
198
199 Make a special exception for labels followed by an ADDR*VEC,
200 as this would be a part of the tablejump setup code.
201
202 Make a special exception to registers loaded with label
203 values just before jump insns that use them. */
204
205 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
206 if (REG_NOTE_KIND (note) == REG_LABEL)
207 {
208 rtx lab = XEXP (note, 0), next;
209
210 if ((next = next_nonnote_insn (lab)) != NULL
211 && GET_CODE (next) == JUMP_INSN
212 && (GET_CODE (PATTERN (next)) == ADDR_VEC
213 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
214 ;
215 else if (GET_CODE (lab) == NOTE)
216 ;
217 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
218 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
219 ;
220 else
221 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
222 }
223 }
224
225 return lvl;
226 }
227 \f
228 /* Create an edge between two basic blocks. FLAGS are auxiliary information
229 about the edge that is accumulated between calls. */
230
231 /* Create an edge from a basic block to a label. */
232
233 static void
234 make_label_edge (edge_cache, src, label, flags)
235 sbitmap *edge_cache;
236 basic_block src;
237 rtx label;
238 int flags;
239 {
240 if (GET_CODE (label) != CODE_LABEL)
241 abort ();
242
243 /* If the label was never emitted, this insn is junk, but avoid a
244 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
245 as a result of a syntax error and a diagnostic has already been
246 printed. */
247
248 if (INSN_UID (label) == 0)
249 return;
250
251 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
252 }
253
254 /* Create the edges generated by INSN in REGION. */
255
256 static void
257 make_eh_edge (edge_cache, src, insn)
258 sbitmap *edge_cache;
259 basic_block src;
260 rtx insn;
261 {
262 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
263 rtx handlers, i;
264
265 handlers = reachable_handlers (insn);
266
267 for (i = handlers; i; i = XEXP (i, 1))
268 make_label_edge (edge_cache, src, XEXP (i, 0),
269 EDGE_ABNORMAL | EDGE_EH | is_call);
270
271 free_INSN_LIST_list (&handlers);
272 }
273
274 /* Identify the edges between basic blocks MIN to MAX.
275
276 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
277 that are otherwise unreachable may be reachable with a non-local goto.
278
279 BB_EH_END is an array indexed by basic block number in which we record
280 the list of exception regions active at the end of the basic block. */
281
282 static void
283 make_edges (label_value_list, min, max, update_p)
284 rtx label_value_list;
285 basic_block min, max;
286 int update_p;
287 {
288 basic_block bb;
289 sbitmap *edge_cache = NULL;
290
291 /* Assume no computed jump; revise as we create edges. */
292 current_function_has_computed_jump = 0;
293
294 /* Heavy use of computed goto in machine-generated code can lead to
295 nearly fully-connected CFGs. In that case we spend a significant
296 amount of time searching the edge lists for duplicates. */
297 if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100)
298 {
299 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
300 sbitmap_vector_zero (edge_cache, last_basic_block);
301
302 if (update_p)
303 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
304 {
305 edge e;
306
307 for (e = bb->succ; e ; e = e->succ_next)
308 if (e->dest != EXIT_BLOCK_PTR)
309 SET_BIT (edge_cache[bb->index], e->dest->index);
310 }
311 }
312
313 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
314 is always the entry. */
315 if (min == ENTRY_BLOCK_PTR->next_bb)
316 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
317 EDGE_FALLTHRU);
318
319 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
320 {
321 rtx insn, x;
322 enum rtx_code code;
323 int force_fallthru = 0;
324
325 if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALT_ENTRY_P (bb->head))
326 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
327
328 /* Examine the last instruction of the block, and discover the
329 ways we can leave the block. */
330
331 insn = bb->end;
332 code = GET_CODE (insn);
333
334 /* A branch. */
335 if (code == JUMP_INSN)
336 {
337 rtx tmp;
338
339 /* Recognize exception handling placeholders. */
340 if (GET_CODE (PATTERN (insn)) == RESX)
341 make_eh_edge (edge_cache, bb, insn);
342
343 /* Recognize a non-local goto as a branch outside the
344 current function. */
345 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
346 ;
347
348 /* ??? Recognize a tablejump and do the right thing. */
349 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
350 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
351 && GET_CODE (tmp) == JUMP_INSN
352 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
353 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
354 {
355 rtvec vec;
356 int j;
357
358 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
359 vec = XVEC (PATTERN (tmp), 0);
360 else
361 vec = XVEC (PATTERN (tmp), 1);
362
363 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
364 make_label_edge (edge_cache, bb,
365 XEXP (RTVEC_ELT (vec, j), 0), 0);
366
367 /* Some targets (eg, ARM) emit a conditional jump that also
368 contains the out-of-range target. Scan for these and
369 add an edge if necessary. */
370 if ((tmp = single_set (insn)) != NULL
371 && SET_DEST (tmp) == pc_rtx
372 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
373 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
374 make_label_edge (edge_cache, bb,
375 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
376
377 #ifdef CASE_DROPS_THROUGH
378 /* Silly VAXen. The ADDR_VEC is going to be in the way of
379 us naturally detecting fallthru into the next block. */
380 force_fallthru = 1;
381 #endif
382 }
383
384 /* If this is a computed jump, then mark it as reaching
385 everything on the label_value_list and forced_labels list. */
386 else if (computed_jump_p (insn))
387 {
388 current_function_has_computed_jump = 1;
389
390 for (x = label_value_list; x; x = XEXP (x, 1))
391 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
392
393 for (x = forced_labels; x; x = XEXP (x, 1))
394 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
395 }
396
397 /* Returns create an exit out. */
398 else if (returnjump_p (insn))
399 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
400
401 /* Otherwise, we have a plain conditional or unconditional jump. */
402 else
403 {
404 if (! JUMP_LABEL (insn))
405 abort ();
406 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
407 }
408 }
409
410 /* If this is a sibling call insn, then this is in effect a combined call
411 and return, and so we need an edge to the exit block. No need to
412 worry about EH edges, since we wouldn't have created the sibling call
413 in the first place. */
414 if (code == CALL_INSN && SIBLING_CALL_P (insn))
415 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
416 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
417
418 /* If this is a CALL_INSN, then mark it as reaching the active EH
419 handler for this CALL_INSN. If we're handling non-call
420 exceptions then any insn can reach any of the active handlers.
421 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
422 else if (code == CALL_INSN || flag_non_call_exceptions)
423 {
424 /* Add any appropriate EH edges. */
425 make_eh_edge (edge_cache, bb, insn);
426
427 if (code == CALL_INSN && nonlocal_goto_handler_labels)
428 {
429 /* ??? This could be made smarter: in some cases it's possible
430 to tell that certain calls will not do a nonlocal goto.
431 For example, if the nested functions that do the nonlocal
432 gotos do not have their addresses taken, then only calls to
433 those functions or to other nested functions that use them
434 could possibly do nonlocal gotos. */
435
436 /* We do know that a REG_EH_REGION note with a value less
437 than 0 is guaranteed not to perform a non-local goto. */
438 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
439
440 if (!note || INTVAL (XEXP (note, 0)) >= 0)
441 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
442 make_label_edge (edge_cache, bb, XEXP (x, 0),
443 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
444 }
445 }
446
447 /* Find out if we can drop through to the next block. */
448 insn = next_nonnote_insn (insn);
449 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
450 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
451 else if (bb->next_bb != EXIT_BLOCK_PTR)
452 {
453 rtx tmp = bb->next_bb->head;
454 if (GET_CODE (tmp) == NOTE)
455 tmp = next_nonnote_insn (tmp);
456 if (force_fallthru || insn == tmp)
457 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
458 }
459 }
460
461 if (edge_cache)
462 sbitmap_vector_free (edge_cache);
463 }
464 \f
465 /* Find all basic blocks of the function whose first insn is F.
466
467 Collect and return a list of labels whose addresses are taken. This
468 will be used in make_edges for use with computed gotos. */
469
470 static void
471 find_basic_blocks_1 (f)
472 rtx f;
473 {
474 rtx insn, next;
475 rtx bb_note = NULL_RTX;
476 rtx lvl = NULL_RTX;
477 rtx trll = NULL_RTX;
478 rtx head = NULL_RTX;
479 rtx end = NULL_RTX;
480 basic_block prev = ENTRY_BLOCK_PTR;
481
482 /* We process the instructions in a slightly different way than we did
483 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
484 closed out the previous block, so that it gets attached at the proper
485 place. Since this form should be equivalent to the previous,
486 count_basic_blocks continues to use the old form as a check. */
487
488 for (insn = f; insn; insn = next)
489 {
490 enum rtx_code code = GET_CODE (insn);
491
492 next = NEXT_INSN (insn);
493
494 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
495 && head)
496 {
497 prev = create_basic_block_structure (head, end, bb_note, prev);
498 head = end = NULL_RTX;
499 bb_note = NULL_RTX;
500 }
501
502 if (inside_basic_block_p (insn))
503 {
504 if (head == NULL_RTX)
505 head = insn;
506 end = insn;
507 }
508
509 if (head && control_flow_insn_p (insn))
510 {
511 prev = create_basic_block_structure (head, end, bb_note, prev);
512 head = end = NULL_RTX;
513 bb_note = NULL_RTX;
514 }
515
516 switch (code)
517 {
518 case NOTE:
519 {
520 int kind = NOTE_LINE_NUMBER (insn);
521
522 /* Look for basic block notes with which to keep the
523 basic_block_info pointers stable. Unthread the note now;
524 we'll put it back at the right place in create_basic_block.
525 Or not at all if we've already found a note in this block. */
526 if (kind == NOTE_INSN_BASIC_BLOCK)
527 {
528 if (bb_note == NULL_RTX)
529 bb_note = insn;
530 else
531 next = delete_insn (insn);
532 }
533 break;
534 }
535
536 case CODE_LABEL:
537 case JUMP_INSN:
538 case INSN:
539 case BARRIER:
540 break;
541
542 case CALL_INSN:
543 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
544 {
545 /* Scan each of the alternatives for label refs. */
546 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
547 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
548 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
549 /* Record its tail recursion label, if any. */
550 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
551 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
552 }
553 break;
554
555 default:
556 abort ();
557 }
558
559 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
560 {
561 rtx note;
562
563 /* Make a list of all labels referred to other than by jumps.
564
565 Make a special exception for labels followed by an ADDR*VEC,
566 as this would be a part of the tablejump setup code.
567
568 Make a special exception to registers loaded with label
569 values just before jump insns that use them. */
570
571 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
572 if (REG_NOTE_KIND (note) == REG_LABEL)
573 {
574 rtx lab = XEXP (note, 0), next;
575
576 if ((next = next_nonnote_insn (lab)) != NULL
577 && GET_CODE (next) == JUMP_INSN
578 && (GET_CODE (PATTERN (next)) == ADDR_VEC
579 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
580 ;
581 else if (GET_CODE (lab) == NOTE)
582 ;
583 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
584 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
585 ;
586 else
587 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
588 }
589 }
590 }
591
592 if (head != NULL_RTX)
593 create_basic_block_structure (head, end, bb_note, prev);
594 else if (bb_note)
595 delete_insn (bb_note);
596
597 if (last_basic_block != n_basic_blocks)
598 abort ();
599
600 label_value_list = lvl;
601 tail_recursion_label_list = trll;
602 clear_aux_for_blocks ();
603 }
604
605
606 /* Find basic blocks of the current function.
607 F is the first insn of the function and NREGS the number of register
608 numbers in use. */
609
610 void
611 find_basic_blocks (f, nregs, file)
612 rtx f;
613 int nregs ATTRIBUTE_UNUSED;
614 FILE *file ATTRIBUTE_UNUSED;
615 {
616 basic_block bb;
617
618 timevar_push (TV_CFG);
619
620 /* Flush out existing data. */
621 if (basic_block_info != NULL)
622 {
623 clear_edges ();
624
625 /* Clear bb->aux on all extant basic blocks. We'll use this as a
626 tag for reuse during create_basic_block, just in case some pass
627 copies around basic block notes improperly. */
628 FOR_EACH_BB (bb)
629 bb->aux = NULL;
630
631 VARRAY_FREE (basic_block_info);
632 }
633
634 n_basic_blocks = count_basic_blocks (f);
635 last_basic_block = 0;
636 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
637 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
638
639 /* Size the basic block table. The actual structures will be allocated
640 by find_basic_blocks_1, since we want to keep the structure pointers
641 stable across calls to find_basic_blocks. */
642 /* ??? This whole issue would be much simpler if we called find_basic_blocks
643 exactly once, and thereafter we don't have a single long chain of
644 instructions at all until close to the end of compilation when we
645 actually lay them out. */
646
647 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
648
649 find_basic_blocks_1 (f);
650
651 /* Discover the edges of our cfg. */
652 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
653
654 /* Do very simple cleanup now, for the benefit of code that runs between
655 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
656 tidy_fallthru_edges ();
657
658 #ifdef ENABLE_CHECKING
659 verify_flow_info ();
660 #endif
661 timevar_pop (TV_CFG);
662 }
663 \f
664 /* State of basic block as seen by find_sub_basic_blocks. */
665 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
666
667 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
668 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
669
670 /* Scan basic block BB for possible BB boundaries inside the block
671 and create new basic blocks in the progress. */
672
673 static void
674 find_bb_boundaries (bb)
675 basic_block bb;
676 {
677 rtx insn = bb->head;
678 rtx end = bb->end;
679 rtx flow_transfer_insn = NULL_RTX;
680 edge fallthru = NULL;
681
682 if (insn == bb->end)
683 return;
684
685 if (GET_CODE (insn) == CODE_LABEL)
686 insn = NEXT_INSN (insn);
687
688 /* Scan insn chain and try to find new basic block boundaries. */
689 while (1)
690 {
691 enum rtx_code code = GET_CODE (insn);
692
693 /* On code label, split current basic block. */
694 if (code == CODE_LABEL)
695 {
696 fallthru = split_block (bb, PREV_INSN (insn));
697 if (flow_transfer_insn)
698 bb->end = flow_transfer_insn;
699
700 bb = fallthru->dest;
701 remove_edge (fallthru);
702 flow_transfer_insn = NULL_RTX;
703 if (LABEL_ALT_ENTRY_P (insn))
704 make_edge (ENTRY_BLOCK_PTR, bb, 0);
705 }
706
707 /* In case we've previously seen an insn that effects a control
708 flow transfer, split the block. */
709 if (flow_transfer_insn && inside_basic_block_p (insn))
710 {
711 fallthru = split_block (bb, PREV_INSN (insn));
712 bb->end = flow_transfer_insn;
713 bb = fallthru->dest;
714 remove_edge (fallthru);
715 flow_transfer_insn = NULL_RTX;
716 }
717
718 if (control_flow_insn_p (insn))
719 flow_transfer_insn = insn;
720 if (insn == end)
721 break;
722 insn = NEXT_INSN (insn);
723 }
724
725 /* In case expander replaced normal insn by sequence terminating by
726 return and barrier, or possibly other sequence not behaving like
727 ordinary jump, we need to take care and move basic block boundary. */
728 if (flow_transfer_insn)
729 bb->end = flow_transfer_insn;
730
731 /* We've possibly replaced the conditional jump by conditional jump
732 followed by cleanup at fallthru edge, so the outgoing edges may
733 be dead. */
734 purge_dead_edges (bb);
735 }
736
737 /* Assume that frequency of basic block B is known. Compute frequencies
738 and probabilities of outgoing edges. */
739
740 static void
741 compute_outgoing_frequencies (b)
742 basic_block b;
743 {
744 edge e, f;
745
746 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
747 {
748 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
749 int probability;
750
751 if (!note)
752 return;
753
754 probability = INTVAL (XEXP (find_reg_note (b->end,
755 REG_BR_PROB, NULL),
756 0));
757 e = BRANCH_EDGE (b);
758 e->probability = probability;
759 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
760 / REG_BR_PROB_BASE);
761 f = FALLTHRU_EDGE (b);
762 f->probability = REG_BR_PROB_BASE - probability;
763 f->count = b->count - e->count;
764 }
765
766 if (b->succ && !b->succ->succ_next)
767 {
768 e = b->succ;
769 e->probability = REG_BR_PROB_BASE;
770 e->count = b->count;
771 }
772 }
773
774 /* Assume that someone emitted code with control flow instructions to the
775 basic block. Update the data structure. */
776
777 void
778 find_many_sub_basic_blocks (blocks)
779 sbitmap blocks;
780 {
781 basic_block bb, min, max;
782
783 FOR_EACH_BB (bb)
784 SET_STATE (bb,
785 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
786
787 FOR_EACH_BB (bb)
788 if (STATE (bb) == BLOCK_TO_SPLIT)
789 find_bb_boundaries (bb);
790
791 FOR_EACH_BB (bb)
792 if (STATE (bb) != BLOCK_ORIGINAL)
793 break;
794
795 min = max = bb;
796 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
797 if (STATE (bb) != BLOCK_ORIGINAL)
798 max = bb;
799
800 /* Now re-scan and wire in all edges. This expect simple (conditional)
801 jumps at the end of each new basic blocks. */
802 make_edges (NULL, min, max, 1);
803
804 /* Update branch probabilities. Expect only (un)conditional jumps
805 to be created with only the forward edges. */
806 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
807 {
808 edge e;
809
810 if (STATE (bb) == BLOCK_ORIGINAL)
811 continue;
812 if (STATE (bb) == BLOCK_NEW)
813 {
814 bb->count = 0;
815 bb->frequency = 0;
816 for (e = bb->pred; e; e=e->pred_next)
817 {
818 bb->count += e->count;
819 bb->frequency += EDGE_FREQUENCY (e);
820 }
821 }
822
823 compute_outgoing_frequencies (bb);
824 }
825
826 FOR_EACH_BB (bb)
827 SET_STATE (bb, 0);
828 }
829
830 /* Like above but for single basic block only. */
831
832 void
833 find_sub_basic_blocks (bb)
834 basic_block bb;
835 {
836 basic_block min, max, b;
837 basic_block next = bb->next_bb;
838
839 min = bb;
840 find_bb_boundaries (bb);
841 max = next->prev_bb;
842
843 /* Now re-scan and wire in all edges. This expect simple (conditional)
844 jumps at the end of each new basic blocks. */
845 make_edges (NULL, min, max, 1);
846
847 /* Update branch probabilities. Expect only (un)conditional jumps
848 to be created with only the forward edges. */
849 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
850 {
851 edge e;
852
853 if (b != min)
854 {
855 b->count = 0;
856 b->frequency = 0;
857 for (e = b->pred; e; e=e->pred_next)
858 {
859 b->count += e->count;
860 b->frequency += EDGE_FREQUENCY (e);
861 }
862 }
863
864 compute_outgoing_frequencies (b);
865 }
866 }