basic_block.h (struct basic_block_def): Added prev_bb and next_bb fields.
[gcc.git] / gcc / cfgbuild.c
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
34 \f
35 #include "config.h"
36 #include "system.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
48 #include "obstack.h"
49
50 static int count_basic_blocks PARAMS ((rtx));
51 static void find_basic_blocks_1 PARAMS ((rtx));
52 static rtx find_label_refs PARAMS ((rtx, rtx));
53 static void make_edges PARAMS ((rtx, int, int, int));
54 static void make_label_edge PARAMS ((sbitmap *, basic_block,
55 rtx, int));
56 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
57 static void find_bb_boundaries PARAMS ((basic_block));
58 static void compute_outgoing_frequencies PARAMS ((basic_block));
59 static bool inside_basic_block_p PARAMS ((rtx));
60 static bool control_flow_insn_p PARAMS ((rtx));
61 \f
62 /* Return true if insn is something that should be contained inside basic
63 block. */
64
65 static bool
66 inside_basic_block_p (insn)
67 rtx insn;
68 {
69 switch (GET_CODE (insn))
70 {
71 case CODE_LABEL:
72 /* Avoid creating of basic block for jumptables. */
73 return (NEXT_INSN (insn) == 0
74 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
75 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
76 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
77
78 case JUMP_INSN:
79 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
80 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
81
82 case CALL_INSN:
83 case INSN:
84 return true;
85
86 case BARRIER:
87 case NOTE:
88 return false;
89
90 default:
91 abort ();
92 }
93 }
94
95 /* Return true if INSN may cause control flow transfer, so it should be last in
96 the basic block. */
97
98 static bool
99 control_flow_insn_p (insn)
100 rtx insn;
101 {
102 rtx note;
103
104 switch (GET_CODE (insn))
105 {
106 case NOTE:
107 case CODE_LABEL:
108 return false;
109
110 case JUMP_INSN:
111 /* Jump insn always causes control transfer except for tablejumps. */
112 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
113 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
114
115 case CALL_INSN:
116 /* Call insn may return to the nonlocal goto handler. */
117 return ((nonlocal_goto_handler_labels
118 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
119 NULL_RTX))
120 || INTVAL (XEXP (note, 0)) >= 0))
121 /* Or may trap. */
122 || can_throw_internal (insn));
123
124 case INSN:
125 return (flag_non_call_exceptions && can_throw_internal (insn));
126
127 case BARRIER:
128 /* It is nonsence to reach barrier when looking for the
129 end of basic block, but before dead code is eliminated
130 this may happen. */
131 return false;
132
133 default:
134 abort ();
135 }
136 }
137
138 /* Count the basic blocks of the function. */
139
140 static int
141 count_basic_blocks (f)
142 rtx f;
143 {
144 int count = 0;
145 bool saw_insn = false;
146 rtx insn;
147
148 for (insn = f; insn; insn = NEXT_INSN (insn))
149 {
150 /* Code labels and barriers causes curent basic block to be
151 terminated at previous real insn. */
152 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
153 && saw_insn)
154 count++, saw_insn = false;
155
156 /* Start basic block if needed. */
157 if (!saw_insn && inside_basic_block_p (insn))
158 saw_insn = true;
159
160 /* Control flow insn causes current basic block to be terminated. */
161 if (saw_insn && control_flow_insn_p (insn))
162 count++, saw_insn = false;
163 }
164
165 if (saw_insn)
166 count++;
167
168 /* The rest of the compiler works a bit smoother when we don't have to
169 check for the edge case of do-nothing functions with no basic blocks. */
170 if (count == 0)
171 {
172 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
173 count = 1;
174 }
175
176 return count;
177 }
178
179 /* Scan a list of insns for labels referred to other than by jumps.
180 This is used to scan the alternatives of a call placeholder. */
181
182 static rtx
183 find_label_refs (f, lvl)
184 rtx f;
185 rtx lvl;
186 {
187 rtx insn;
188
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
191 {
192 rtx note;
193
194 /* Make a list of all labels referred to other than by jumps
195 (which just don't have the REG_LABEL notes).
196
197 Make a special exception for labels followed by an ADDR*VEC,
198 as this would be a part of the tablejump setup code.
199
200 Make a special exception to registers loaded with label
201 values just before jump insns that use them. */
202
203 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
204 if (REG_NOTE_KIND (note) == REG_LABEL)
205 {
206 rtx lab = XEXP (note, 0), next;
207
208 if ((next = next_nonnote_insn (lab)) != NULL
209 && GET_CODE (next) == JUMP_INSN
210 && (GET_CODE (PATTERN (next)) == ADDR_VEC
211 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
212 ;
213 else if (GET_CODE (lab) == NOTE)
214 ;
215 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
216 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
217 ;
218 else
219 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
220 }
221 }
222
223 return lvl;
224 }
225 \f
226 /* Create an edge between two basic blocks. FLAGS are auxiliary information
227 about the edge that is accumulated between calls. */
228
229 /* Create an edge from a basic block to a label. */
230
231 static void
232 make_label_edge (edge_cache, src, label, flags)
233 sbitmap *edge_cache;
234 basic_block src;
235 rtx label;
236 int flags;
237 {
238 if (GET_CODE (label) != CODE_LABEL)
239 abort ();
240
241 /* If the label was never emitted, this insn is junk, but avoid a
242 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
243 as a result of a syntax error and a diagnostic has already been
244 printed. */
245
246 if (INSN_UID (label) == 0)
247 return;
248
249 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
250 }
251
252 /* Create the edges generated by INSN in REGION. */
253
254 static void
255 make_eh_edge (edge_cache, src, insn)
256 sbitmap *edge_cache;
257 basic_block src;
258 rtx insn;
259 {
260 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
261 rtx handlers, i;
262
263 handlers = reachable_handlers (insn);
264
265 for (i = handlers; i; i = XEXP (i, 1))
266 make_label_edge (edge_cache, src, XEXP (i, 0),
267 EDGE_ABNORMAL | EDGE_EH | is_call);
268
269 free_INSN_LIST_list (&handlers);
270 }
271
272 /* Identify the edges between basic blocks MIN to MAX.
273
274 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
275 that are otherwise unreachable may be reachable with a non-local goto.
276
277 BB_EH_END is an array indexed by basic block number in which we record
278 the list of exception regions active at the end of the basic block. */
279
280 static void
281 make_edges (label_value_list, min, max, update_p)
282 rtx label_value_list;
283 int min, max, update_p;
284 {
285 int i;
286 sbitmap *edge_cache = NULL;
287
288 /* Assume no computed jump; revise as we create edges. */
289 current_function_has_computed_jump = 0;
290
291 /* Heavy use of computed goto in machine-generated code can lead to
292 nearly fully-connected CFGs. In that case we spend a significant
293 amount of time searching the edge lists for duplicates. */
294 if (forced_labels || label_value_list)
295 {
296 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
297 sbitmap_vector_zero (edge_cache, n_basic_blocks);
298
299 if (update_p)
300 for (i = min; i <= max; ++i)
301 {
302 edge e;
303
304 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
305 if (e->dest != EXIT_BLOCK_PTR)
306 SET_BIT (edge_cache[i], e->dest->index);
307 }
308 }
309
310 /* By nature of the way these get numbered, block 0 is always the entry. */
311 if (min == 0)
312 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0),
313 EDGE_FALLTHRU);
314
315 for (i = min; i <= max; ++i)
316 {
317 basic_block bb = BASIC_BLOCK (i);
318 rtx insn, x;
319 enum rtx_code code;
320 int force_fallthru = 0;
321
322 if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALTERNATE_NAME (bb->head))
323 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
324
325 /* Examine the last instruction of the block, and discover the
326 ways we can leave the block. */
327
328 insn = bb->end;
329 code = GET_CODE (insn);
330
331 /* A branch. */
332 if (code == JUMP_INSN)
333 {
334 rtx tmp;
335
336 /* Recognize exception handling placeholders. */
337 if (GET_CODE (PATTERN (insn)) == RESX)
338 make_eh_edge (edge_cache, bb, insn);
339
340 /* Recognize a non-local goto as a branch outside the
341 current function. */
342 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
343 ;
344
345 /* ??? Recognize a tablejump and do the right thing. */
346 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
347 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
348 && GET_CODE (tmp) == JUMP_INSN
349 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
350 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
351 {
352 rtvec vec;
353 int j;
354
355 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
356 vec = XVEC (PATTERN (tmp), 0);
357 else
358 vec = XVEC (PATTERN (tmp), 1);
359
360 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
361 make_label_edge (edge_cache, bb,
362 XEXP (RTVEC_ELT (vec, j), 0), 0);
363
364 /* Some targets (eg, ARM) emit a conditional jump that also
365 contains the out-of-range target. Scan for these and
366 add an edge if necessary. */
367 if ((tmp = single_set (insn)) != NULL
368 && SET_DEST (tmp) == pc_rtx
369 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
370 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
371 make_label_edge (edge_cache, bb,
372 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
373
374 #ifdef CASE_DROPS_THROUGH
375 /* Silly VAXen. The ADDR_VEC is going to be in the way of
376 us naturally detecting fallthru into the next block. */
377 force_fallthru = 1;
378 #endif
379 }
380
381 /* If this is a computed jump, then mark it as reaching
382 everything on the label_value_list and forced_labels list. */
383 else if (computed_jump_p (insn))
384 {
385 current_function_has_computed_jump = 1;
386
387 for (x = label_value_list; x; x = XEXP (x, 1))
388 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
389
390 for (x = forced_labels; x; x = XEXP (x, 1))
391 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
392 }
393
394 /* Returns create an exit out. */
395 else if (returnjump_p (insn))
396 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
397
398 /* Otherwise, we have a plain conditional or unconditional jump. */
399 else
400 {
401 if (! JUMP_LABEL (insn))
402 abort ();
403 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
404 }
405 }
406
407 /* If this is a sibling call insn, then this is in effect a combined call
408 and return, and so we need an edge to the exit block. No need to
409 worry about EH edges, since we wouldn't have created the sibling call
410 in the first place. */
411 if (code == CALL_INSN && SIBLING_CALL_P (insn))
412 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
413 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
414
415 /* If this is a CALL_INSN, then mark it as reaching the active EH
416 handler for this CALL_INSN. If we're handling non-call
417 exceptions then any insn can reach any of the active handlers.
418 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
419 else if (code == CALL_INSN || flag_non_call_exceptions)
420 {
421 /* Add any appropriate EH edges. */
422 make_eh_edge (edge_cache, bb, insn);
423
424 if (code == CALL_INSN && nonlocal_goto_handler_labels)
425 {
426 /* ??? This could be made smarter: in some cases it's possible
427 to tell that certain calls will not do a nonlocal goto.
428 For example, if the nested functions that do the nonlocal
429 gotos do not have their addresses taken, then only calls to
430 those functions or to other nested functions that use them
431 could possibly do nonlocal gotos. */
432
433 /* We do know that a REG_EH_REGION note with a value less
434 than 0 is guaranteed not to perform a non-local goto. */
435 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
436
437 if (!note || INTVAL (XEXP (note, 0)) >= 0)
438 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
439 make_label_edge (edge_cache, bb, XEXP (x, 0),
440 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
441 }
442 }
443
444 /* Find out if we can drop through to the next block. */
445 insn = next_nonnote_insn (insn);
446 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
447 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
448 else if (i + 1 < n_basic_blocks)
449 {
450 rtx tmp = BLOCK_HEAD (i + 1);
451 if (GET_CODE (tmp) == NOTE)
452 tmp = next_nonnote_insn (tmp);
453 if (force_fallthru || insn == tmp)
454 cached_make_edge (edge_cache, bb, BASIC_BLOCK (i + 1),
455 EDGE_FALLTHRU);
456 }
457 }
458
459 if (edge_cache)
460 sbitmap_vector_free (edge_cache);
461 }
462 \f
463 /* Find all basic blocks of the function whose first insn is F.
464
465 Collect and return a list of labels whose addresses are taken. This
466 will be used in make_edges for use with computed gotos. */
467
468 static void
469 find_basic_blocks_1 (f)
470 rtx f;
471 {
472 rtx insn, next;
473 int i = 0;
474 rtx bb_note = NULL_RTX;
475 rtx lvl = NULL_RTX;
476 rtx trll = NULL_RTX;
477 rtx head = NULL_RTX;
478 rtx end = NULL_RTX;
479 basic_block prev = ENTRY_BLOCK_PTR;
480
481 /* We process the instructions in a slightly different way than we did
482 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
483 closed out the previous block, so that it gets attached at the proper
484 place. Since this form should be equivalent to the previous,
485 count_basic_blocks continues to use the old form as a check. */
486
487 for (insn = f; insn; insn = next)
488 {
489 enum rtx_code code = GET_CODE (insn);
490
491 next = NEXT_INSN (insn);
492
493 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
494 && head)
495 {
496 prev = create_basic_block_structure (i++, head, end, bb_note, prev);
497 head = end = NULL_RTX;
498 bb_note = NULL_RTX;
499 }
500
501 if (inside_basic_block_p (insn))
502 {
503 if (head == NULL_RTX)
504 head = insn;
505 end = insn;
506 }
507
508 if (head && control_flow_insn_p (insn))
509 {
510 prev = create_basic_block_structure (i++, head, end, bb_note, prev);
511 head = end = NULL_RTX;
512 bb_note = NULL_RTX;
513 }
514
515 switch (code)
516 {
517 case NOTE:
518 {
519 int kind = NOTE_LINE_NUMBER (insn);
520
521 /* Look for basic block notes with which to keep the
522 basic_block_info pointers stable. Unthread the note now;
523 we'll put it back at the right place in create_basic_block.
524 Or not at all if we've already found a note in this block. */
525 if (kind == NOTE_INSN_BASIC_BLOCK)
526 {
527 if (bb_note == NULL_RTX)
528 bb_note = insn;
529 else
530 next = delete_insn (insn);
531 }
532 break;
533 }
534
535 case CODE_LABEL:
536 case JUMP_INSN:
537 case INSN:
538 case BARRIER:
539 break;
540
541 case CALL_INSN:
542 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
543 {
544 /* Scan each of the alternatives for label refs. */
545 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
546 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
547 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
548 /* Record its tail recursion label, if any. */
549 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
550 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
551 }
552 break;
553
554 default:
555 abort ();
556 }
557
558 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
559 {
560 rtx note;
561
562 /* Make a list of all labels referred to other than by jumps.
563
564 Make a special exception for labels followed by an ADDR*VEC,
565 as this would be a part of the tablejump setup code.
566
567 Make a special exception to registers loaded with label
568 values just before jump insns that use them. */
569
570 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
571 if (REG_NOTE_KIND (note) == REG_LABEL)
572 {
573 rtx lab = XEXP (note, 0), next;
574
575 if ((next = next_nonnote_insn (lab)) != NULL
576 && GET_CODE (next) == JUMP_INSN
577 && (GET_CODE (PATTERN (next)) == ADDR_VEC
578 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
579 ;
580 else if (GET_CODE (lab) == NOTE)
581 ;
582 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
583 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
584 ;
585 else
586 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
587 }
588 }
589 }
590
591 if (head != NULL_RTX)
592 create_basic_block_structure (i++, head, end, bb_note, prev);
593 else if (bb_note)
594 delete_insn (bb_note);
595
596 if (i != n_basic_blocks)
597 abort ();
598
599 label_value_list = lvl;
600 tail_recursion_label_list = trll;
601 clear_aux_for_blocks ();
602 }
603
604
605 /* Find basic blocks of the current function.
606 F is the first insn of the function and NREGS the number of register
607 numbers in use. */
608
609 void
610 find_basic_blocks (f, nregs, file)
611 rtx f;
612 int nregs ATTRIBUTE_UNUSED;
613 FILE *file ATTRIBUTE_UNUSED;
614 {
615 int max_uid;
616 timevar_push (TV_CFG);
617
618 basic_block_for_insn = 0;
619
620 /* Flush out existing data. */
621 if (basic_block_info != NULL)
622 {
623 int i;
624
625 clear_edges ();
626
627 /* Clear bb->aux on all extant basic blocks. We'll use this as a
628 tag for reuse during create_basic_block, just in case some pass
629 copies around basic block notes improperly. */
630 for (i = 0; i < n_basic_blocks; ++i)
631 BASIC_BLOCK (i)->aux = NULL;
632
633 VARRAY_FREE (basic_block_info);
634 }
635
636 n_basic_blocks = count_basic_blocks (f);
637 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
638 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
639
640 /* Size the basic block table. The actual structures will be allocated
641 by find_basic_blocks_1, since we want to keep the structure pointers
642 stable across calls to find_basic_blocks. */
643 /* ??? This whole issue would be much simpler if we called find_basic_blocks
644 exactly once, and thereafter we don't have a single long chain of
645 instructions at all until close to the end of compilation when we
646 actually lay them out. */
647
648 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
649
650 find_basic_blocks_1 (f);
651
652 /* Record the block to which an insn belongs. */
653 /* ??? This should be done another way, by which (perhaps) a label is
654 tagged directly with the basic block that it starts. It is used for
655 more than that currently, but IMO that is the only valid use. */
656
657 max_uid = get_max_uid ();
658 #ifdef AUTO_INC_DEC
659 /* Leave space for insns life_analysis makes in some cases for auto-inc.
660 These cases are rare, so we don't need too much space. */
661 max_uid += max_uid / 10;
662 #endif
663
664 compute_bb_for_insn (max_uid);
665
666 /* Discover the edges of our cfg. */
667 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
668
669 /* Do very simple cleanup now, for the benefit of code that runs between
670 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
671 tidy_fallthru_edges ();
672
673 #ifdef ENABLE_CHECKING
674 verify_flow_info ();
675 #endif
676 timevar_pop (TV_CFG);
677 }
678 \f
679 /* State of basic block as seen by find_sub_basic_blocks. */
680 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
681
682 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
683 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
684
685 /* Scan basic block BB for possible BB boundaries inside the block
686 and create new basic blocks in the progress. */
687
688 static void
689 find_bb_boundaries (bb)
690 basic_block bb;
691 {
692 rtx insn = bb->head;
693 rtx end = bb->end;
694 rtx flow_transfer_insn = NULL_RTX;
695 edge fallthru = NULL;
696
697 if (insn == bb->end)
698 return;
699
700 if (GET_CODE (insn) == CODE_LABEL)
701 insn = NEXT_INSN (insn);
702
703 /* Scan insn chain and try to find new basic block boundaries. */
704 while (1)
705 {
706 enum rtx_code code = GET_CODE (insn);
707
708 /* On code label, split current basic block. */
709 if (code == CODE_LABEL)
710 {
711 fallthru = split_block (bb, PREV_INSN (insn));
712 if (flow_transfer_insn)
713 bb->end = flow_transfer_insn;
714
715 bb = fallthru->dest;
716 remove_edge (fallthru);
717 flow_transfer_insn = NULL_RTX;
718 if (LABEL_ALTERNATE_NAME (insn))
719 make_edge (ENTRY_BLOCK_PTR, bb, 0);
720 }
721
722 /* In case we've previously seen an insn that effects a control
723 flow transfer, split the block. */
724 if (flow_transfer_insn && inside_basic_block_p (insn))
725 {
726 fallthru = split_block (bb, PREV_INSN (insn));
727 bb->end = flow_transfer_insn;
728 bb = fallthru->dest;
729 remove_edge (fallthru);
730 flow_transfer_insn = NULL_RTX;
731 }
732
733 if (control_flow_insn_p (insn))
734 flow_transfer_insn = insn;
735 if (insn == end)
736 break;
737 insn = NEXT_INSN (insn);
738 }
739
740 /* In case expander replaced normal insn by sequence terminating by
741 return and barrier, or possibly other sequence not behaving like
742 ordinary jump, we need to take care and move basic block boundary. */
743 if (flow_transfer_insn)
744 bb->end = flow_transfer_insn;
745
746 /* We've possibly replaced the conditional jump by conditional jump
747 followed by cleanup at fallthru edge, so the outgoing edges may
748 be dead. */
749 purge_dead_edges (bb);
750 }
751
752 /* Assume that frequency of basic block B is known. Compute frequencies
753 and probabilities of outgoing edges. */
754
755 static void
756 compute_outgoing_frequencies (b)
757 basic_block b;
758 {
759 edge e, f;
760
761 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
762 {
763 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
764 int probability;
765
766 if (!note)
767 return;
768
769 probability = INTVAL (XEXP (find_reg_note (b->end,
770 REG_BR_PROB, NULL),
771 0));
772 e = BRANCH_EDGE (b);
773 e->probability = probability;
774 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
775 / REG_BR_PROB_BASE);
776 f = FALLTHRU_EDGE (b);
777 f->probability = REG_BR_PROB_BASE - probability;
778 f->count = b->count - e->count;
779 }
780
781 if (b->succ && !b->succ->succ_next)
782 {
783 e = b->succ;
784 e->probability = REG_BR_PROB_BASE;
785 e->count = b->count;
786 }
787 }
788
789 /* Assume that someone emitted code with control flow instructions to the
790 basic block. Update the data structure. */
791
792 void
793 find_many_sub_basic_blocks (blocks)
794 sbitmap blocks;
795 {
796 int i;
797 int min, max;
798
799 for (i = 0; i < n_basic_blocks; i++)
800 SET_STATE (BASIC_BLOCK (i),
801 TEST_BIT (blocks, i) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
802
803 for (i = 0; i < n_basic_blocks; i++)
804 if (STATE (BASIC_BLOCK (i)) == BLOCK_TO_SPLIT)
805 find_bb_boundaries (BASIC_BLOCK (i));
806
807 for (i = 0; i < n_basic_blocks; i++)
808 if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
809 break;
810
811 min = max = i;
812 for (; i < n_basic_blocks; i++)
813 if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
814 max = i;
815
816 /* Now re-scan and wire in all edges. This expect simple (conditional)
817 jumps at the end of each new basic blocks. */
818 make_edges (NULL, min, max, 1);
819
820 /* Update branch probabilities. Expect only (un)conditional jumps
821 to be created with only the forward edges. */
822 for (i = min; i <= max; i++)
823 {
824 edge e;
825 basic_block b = BASIC_BLOCK (i);
826
827 if (STATE (b) == BLOCK_ORIGINAL)
828 continue;
829 if (STATE (b) == BLOCK_NEW)
830 {
831 b->count = 0;
832 b->frequency = 0;
833 for (e = b->pred; e; e=e->pred_next)
834 {
835 b->count += e->count;
836 b->frequency += EDGE_FREQUENCY (e);
837 }
838 }
839
840 compute_outgoing_frequencies (b);
841 }
842
843 for (i = 0; i < n_basic_blocks; i++)
844 SET_STATE (BASIC_BLOCK (i), 0);
845 }
846
847 /* Like above but for single basic block only. */
848
849 void
850 find_sub_basic_blocks (bb)
851 basic_block bb;
852 {
853 int i;
854 int min, max;
855 basic_block next = (bb->index == n_basic_blocks - 1
856 ? NULL : BASIC_BLOCK (bb->index + 1));
857
858 min = bb->index;
859 find_bb_boundaries (bb);
860 max = (next ? next->index : n_basic_blocks) - 1;
861
862 /* Now re-scan and wire in all edges. This expect simple (conditional)
863 jumps at the end of each new basic blocks. */
864 make_edges (NULL, min, max, 1);
865
866 /* Update branch probabilities. Expect only (un)conditional jumps
867 to be created with only the forward edges. */
868 for (i = min; i <= max; i++)
869 {
870 edge e;
871 basic_block b = BASIC_BLOCK (i);
872
873 if (i != min)
874 {
875 b->count = 0;
876 b->frequency = 0;
877 for (e = b->pred; e; e=e->pred_next)
878 {
879 b->count += e->count;
880 b->frequency += EDGE_FREQUENCY (e);
881 }
882 }
883
884 compute_outgoing_frequencies (b);
885 }
886 }