1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
34 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
56 #include "tree-flow.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
61 #include "tree-scalar-evolution.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx
, basic_block
);
76 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
77 static void estimate_loops_at_level (struct loop
*, bitmap
);
78 static void propagate_freq (struct loop
*, bitmap
);
79 static void estimate_bb_frequencies (struct loops
*);
80 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
81 static bool last_basic_block_p (basic_block
);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx
);
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
91 const char *const name
; /* Name used in the debugging dumps. */
92 const int hitrate
; /* Expected hitrate used by
93 predict_insn_def call. */
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
101 /* Recompute hitrate in percent to our representation. */
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info
[]= {
107 #include "predict.def"
109 /* Upper bound on predictors. */
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
118 maybe_hot_bb_p (basic_block bb
)
120 if (profile_info
&& flag_branch_probabilities
122 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
124 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
129 /* Return true in case BB is cold and should be optimized for size. */
132 probably_cold_bb_p (basic_block bb
)
134 if (profile_info
&& flag_branch_probabilities
136 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
138 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
143 /* Return true in case BB is probably never executed. */
145 probably_never_executed_bb_p (basic_block bb
)
147 if (profile_info
&& flag_branch_probabilities
)
148 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
152 /* Return true if the one of outgoing edges is already predicted by
156 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
159 if (!INSN_P (BB_END (bb
)))
161 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
162 if (REG_NOTE_KIND (note
) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
168 /* Return true if the one of outgoing edges is already predicted by
172 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
174 struct edge_prediction
*i
= bb_ann (bb
)->predictions
;
175 for (i
= bb_ann (bb
)->predictions
; i
; i
= i
->next
)
176 if (i
->predictor
== predictor
)
182 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
184 if (!any_condjump_p (insn
))
186 if (!flag_guess_branch_prob
)
190 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
191 gen_rtx_CONCAT (VOIDmode
,
192 GEN_INT ((int) predictor
),
193 GEN_INT ((int) probability
)),
197 /* Predict insn by given predictor. */
200 predict_insn_def (rtx insn
, enum br_predictor predictor
,
201 enum prediction taken
)
203 int probability
= predictor_info
[(int) predictor
].hitrate
;
206 probability
= REG_BR_PROB_BASE
- probability
;
208 predict_insn (insn
, predictor
, probability
);
211 /* Predict edge E with given probability if possible. */
214 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
217 last_insn
= BB_END (e
->src
);
219 /* We can store the branch prediction information only about
220 conditional jumps. */
221 if (!any_condjump_p (last_insn
))
224 /* We always store probability of branching. */
225 if (e
->flags
& EDGE_FALLTHRU
)
226 probability
= REG_BR_PROB_BASE
- probability
;
228 predict_insn (last_insn
, predictor
, probability
);
231 /* Predict edge E with the given PROBABILITY. */
233 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
235 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
237 i
->next
= bb_ann (e
->src
)->predictions
;
238 bb_ann (e
->src
)->predictions
= i
;
239 i
->probability
= probability
;
240 i
->predictor
= predictor
;
244 /* Return true when we can store prediction on insn INSN.
245 At the moment we represent predictions only on conditional
246 jumps, not at computed jump or other complicated cases. */
248 can_predict_insn_p (rtx insn
)
250 return (JUMP_P (insn
)
251 && any_condjump_p (insn
)
252 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
255 /* Predict edge E by given predictor if possible. */
258 predict_edge_def (edge e
, enum br_predictor predictor
,
259 enum prediction taken
)
261 int probability
= predictor_info
[(int) predictor
].hitrate
;
264 probability
= REG_BR_PROB_BASE
- probability
;
266 predict_edge (e
, predictor
, probability
);
269 /* Invert all branch predictions or probability notes in the INSN. This needs
270 to be done each time we invert the condition used by the jump. */
273 invert_br_probabilities (rtx insn
)
277 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
278 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
279 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
280 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
281 XEXP (XEXP (note
, 0), 1)
282 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
285 /* Dump information about the branch prediction to the output file. */
288 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
289 basic_block bb
, int used
)
297 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
298 if (! (e
->flags
& EDGE_FALLTHRU
))
301 fprintf (file
, " %s heuristics%s: %.1f%%",
302 predictor_info
[predictor
].name
,
303 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
307 fprintf (file
, " exec ");
308 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
311 fprintf (file
, " hit ");
312 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
313 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
317 fprintf (file
, "\n");
320 /* We can not predict the probabilities of outgoing edges of bb. Set them
321 evenly and hope for the best. */
323 set_even_probabilities (basic_block bb
)
329 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
330 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
332 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
333 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
334 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
339 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
340 note if not already present. Remove now useless REG_BR_PRED notes. */
343 combine_predictions_for_insn (rtx insn
, basic_block bb
)
348 int best_probability
= PROB_EVEN
;
349 int best_predictor
= END_PREDICTORS
;
350 int combined_probability
= REG_BR_PROB_BASE
/ 2;
352 bool first_match
= false;
355 if (!can_predict_insn_p (insn
))
357 set_even_probabilities (bb
);
361 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
362 pnote
= ®_NOTES (insn
);
364 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
367 /* We implement "first match" heuristics and use probability guessed
368 by predictor with smallest index. */
369 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
370 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
372 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
373 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
376 if (best_predictor
> predictor
)
377 best_probability
= probability
, best_predictor
= predictor
;
379 d
= (combined_probability
* probability
380 + (REG_BR_PROB_BASE
- combined_probability
)
381 * (REG_BR_PROB_BASE
- probability
));
383 /* Use FP math to avoid overflows of 32bit integers. */
385 /* If one probability is 0% and one 100%, avoid division by zero. */
386 combined_probability
= REG_BR_PROB_BASE
/ 2;
388 combined_probability
= (((double) combined_probability
) * probability
389 * REG_BR_PROB_BASE
/ d
+ 0.5);
392 /* Decide which heuristic to use. In case we didn't match anything,
393 use no_prediction heuristic, in case we did match, use either
394 first match or Dempster-Shaffer theory depending on the flags. */
396 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
400 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
401 combined_probability
, bb
, true);
404 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
406 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
411 combined_probability
= best_probability
;
412 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
416 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
418 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
419 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
421 dump_prediction (dump_file
, predictor
, probability
, bb
,
422 !first_match
|| best_predictor
== predictor
);
423 *pnote
= XEXP (*pnote
, 1);
426 pnote
= &XEXP (*pnote
, 1);
432 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
433 GEN_INT (combined_probability
), REG_NOTES (insn
));
435 /* Save the prediction into CFG in case we are seeing non-degenerated
437 if (EDGE_COUNT (bb
->succs
) > 1)
439 BRANCH_EDGE (bb
)->probability
= combined_probability
;
440 FALLTHRU_EDGE (bb
)->probability
441 = REG_BR_PROB_BASE
- combined_probability
;
444 else if (EDGE_COUNT (bb
->succs
) > 1)
446 int prob
= INTVAL (XEXP (prob_note
, 0));
448 BRANCH_EDGE (bb
)->probability
= prob
;
449 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
452 EDGE_SUCC (bb
, 0)->probability
= REG_BR_PROB_BASE
;
455 /* Combine predictions into single probability and store them into CFG.
456 Remove now useless prediction entries. */
459 combine_predictions_for_bb (FILE *file
, basic_block bb
)
461 int best_probability
= PROB_EVEN
;
462 int best_predictor
= END_PREDICTORS
;
463 int combined_probability
= REG_BR_PROB_BASE
/ 2;
465 bool first_match
= false;
467 struct edge_prediction
*pred
;
469 edge e
, first
= NULL
, second
= NULL
;
472 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
473 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
476 if (first
&& !second
)
482 /* When there is no successor or only one choice, prediction is easy.
484 We are lazy for now and predict only basic blocks with two outgoing
485 edges. It is possible to predict generic case too, but we have to
486 ignore first match heuristics and do more involved combining. Implement
491 set_even_probabilities (bb
);
492 bb_ann (bb
)->predictions
= NULL
;
494 fprintf (file
, "%i edges in bb %i predicted to even probabilities\n",
500 fprintf (file
, "Predictions for bb %i\n", bb
->index
);
502 /* We implement "first match" heuristics and use probability guessed
503 by predictor with smallest index. */
504 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
506 int predictor
= pred
->predictor
;
507 int probability
= pred
->probability
;
509 if (pred
->edge
!= first
)
510 probability
= REG_BR_PROB_BASE
- probability
;
513 if (best_predictor
> predictor
)
514 best_probability
= probability
, best_predictor
= predictor
;
516 d
= (combined_probability
* probability
517 + (REG_BR_PROB_BASE
- combined_probability
)
518 * (REG_BR_PROB_BASE
- probability
));
520 /* Use FP math to avoid overflows of 32bit integers. */
522 /* If one probability is 0% and one 100%, avoid division by zero. */
523 combined_probability
= REG_BR_PROB_BASE
/ 2;
525 combined_probability
= (((double) combined_probability
) * probability
526 * REG_BR_PROB_BASE
/ d
+ 0.5);
529 /* Decide which heuristic to use. In case we didn't match anything,
530 use no_prediction heuristic, in case we did match, use either
531 first match or Dempster-Shaffer theory depending on the flags. */
533 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
537 dump_prediction (file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
540 dump_prediction (file
, PRED_DS_THEORY
, combined_probability
, bb
,
542 dump_prediction (file
, PRED_FIRST_MATCH
, best_probability
, bb
,
547 combined_probability
= best_probability
;
548 dump_prediction (file
, PRED_COMBINED
, combined_probability
, bb
, true);
550 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
552 int predictor
= pred
->predictor
;
553 int probability
= pred
->probability
;
555 if (pred
->edge
!= EDGE_SUCC (bb
, 0))
556 probability
= REG_BR_PROB_BASE
- probability
;
557 dump_prediction (file
, predictor
, probability
, bb
,
558 !first_match
|| best_predictor
== predictor
);
560 bb_ann (bb
)->predictions
= NULL
;
564 first
->probability
= combined_probability
;
565 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
569 /* Predict edge probabilities by exploiting loop structure.
570 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
571 RTL otherwise use tree based approach. */
573 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
578 scev_initialize (loops_info
);
580 /* Try to predict out blocks in a loop that are not part of a
582 for (i
= 1; i
< loops_info
->num
; i
++)
584 basic_block bb
, *bbs
;
587 struct loop
*loop
= loops_info
->parray
[i
];
588 struct niter_desc desc
;
589 unsigned HOST_WIDE_INT niter
;
591 flow_loop_scan (loop
, LOOP_EXIT_EDGES
);
592 exits
= loop
->num_exits
;
596 iv_analysis_loop_init (loop
);
597 find_simple_exit (loop
, &desc
);
599 if (desc
.simple_p
&& desc
.const_iter
)
602 niter
= desc
.niter
+ 1;
603 if (niter
== 0) /* We might overflow here. */
606 prob
= (REG_BR_PROB_BASE
607 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
608 /* Branch prediction algorithm gives 0 frequency for everything
609 after the end of loop for loop having 0 probability to finish. */
610 if (prob
== REG_BR_PROB_BASE
)
611 prob
= REG_BR_PROB_BASE
- 1;
612 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
620 struct tree_niter_desc niter_desc
;
622 exits
= get_loop_exit_edges (loop
, &n_exits
);
623 for (j
= 0; j
< n_exits
; j
++)
627 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
))
628 niter
= niter_desc
.niter
;
629 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
630 niter
= loop_niter_by_eval (loop
, exits
[j
]);
632 if (TREE_CODE (niter
) == INTEGER_CST
)
635 if (host_integerp (niter
, 1)
636 && tree_int_cst_lt (niter
,
637 build_int_cstu (NULL_TREE
,
638 REG_BR_PROB_BASE
- 1)))
640 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
641 probability
= (REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
;
646 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
653 bbs
= get_loop_body (loop
);
655 for (j
= 0; j
< loop
->num_nodes
; j
++)
657 int header_found
= 0;
663 /* Bypass loop heuristics on continue statement. These
664 statements construct loops via "non-loop" constructs
665 in the source language and are better to be handled
667 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
668 || predicted_by_p (bb
, PRED_CONTINUE
))
671 /* Loop branch heuristics - predict an edge back to a
672 loop's head as taken. */
673 if (bb
== loop
->latch
)
675 e
= find_edge (loop
->latch
, loop
->header
);
679 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
683 /* Loop exit heuristics - predict an edge exiting the loop if the
684 conditional has no loop header successors as not taken. */
686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
687 if (e
->dest
->index
< 0
688 || !flow_bb_inside_loop_p (loop
, e
->dest
))
692 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
696 /* Free basic blocks from get_loop_body. */
704 /* Attempt to predict probabilities of BB outgoing edges using local
707 bb_estimate_probability_locally (basic_block bb
)
709 rtx last_insn
= BB_END (bb
);
712 if (! can_predict_insn_p (last_insn
))
714 cond
= get_condition (last_insn
, NULL
, false, false);
718 /* Try "pointer heuristic."
719 A comparison ptr == 0 is predicted as false.
720 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
721 if (COMPARISON_P (cond
)
722 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
723 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
725 if (GET_CODE (cond
) == EQ
)
726 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
727 else if (GET_CODE (cond
) == NE
)
728 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
732 /* Try "opcode heuristic."
733 EQ tests are usually false and NE tests are usually true. Also,
734 most quantities are positive, so we can make the appropriate guesses
735 about signed comparisons against zero. */
736 switch (GET_CODE (cond
))
739 /* Unconditional branch. */
740 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
741 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
746 /* Floating point comparisons appears to behave in a very
747 unpredictable way because of special role of = tests in
749 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
751 /* Comparisons with 0 are often used for booleans and there is
752 nothing useful to predict about them. */
753 else if (XEXP (cond
, 1) == const0_rtx
754 || XEXP (cond
, 0) == const0_rtx
)
757 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
762 /* Floating point comparisons appears to behave in a very
763 unpredictable way because of special role of = tests in
765 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
767 /* Comparisons with 0 are often used for booleans and there is
768 nothing useful to predict about them. */
769 else if (XEXP (cond
, 1) == const0_rtx
770 || XEXP (cond
, 0) == const0_rtx
)
773 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
777 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
781 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
786 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
787 || XEXP (cond
, 1) == constm1_rtx
)
788 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
793 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
794 || XEXP (cond
, 1) == constm1_rtx
)
795 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
803 /* Statically estimate the probability that a branch will be taken and produce
804 estimated profile. When profile feedback is present never executed portions
805 of function gets estimated. */
808 estimate_probability (struct loops
*loops_info
)
812 connect_infinite_loops_to_exit ();
813 calculate_dominance_info (CDI_DOMINATORS
);
814 calculate_dominance_info (CDI_POST_DOMINATORS
);
816 predict_loops (loops_info
, true);
820 /* Attempt to predict conditional jumps using a number of heuristics. */
823 rtx last_insn
= BB_END (bb
);
827 if (! can_predict_insn_p (last_insn
))
830 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
832 /* Predict early returns to be probable, as we've already taken
833 care for error returns and other are often used for fast paths
835 if ((e
->dest
== EXIT_BLOCK_PTR
836 || (EDGE_COUNT (e
->dest
->succs
) == 1
837 && EDGE_SUCC (e
->dest
, 0)->dest
== EXIT_BLOCK_PTR
))
838 && !predicted_by_p (bb
, PRED_NULL_RETURN
)
839 && !predicted_by_p (bb
, PRED_CONST_RETURN
)
840 && !predicted_by_p (bb
, PRED_NEGATIVE_RETURN
)
841 && !last_basic_block_p (e
->dest
))
842 predict_edge_def (e
, PRED_EARLY_RETURN
, TAKEN
);
844 /* Look for block we are guarding (i.e. we dominate it,
845 but it doesn't postdominate us). */
846 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
847 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
848 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
852 /* The call heuristic claims that a guarded function call
853 is improbable. This is because such calls are often used
854 to signal exceptional situations such as printing error
856 for (insn
= BB_HEAD (e
->dest
); insn
!= NEXT_INSN (BB_END (e
->dest
));
857 insn
= NEXT_INSN (insn
))
859 /* Constant and pure calls are hardly used to signalize
860 something exceptional. */
861 && ! CONST_OR_PURE_CALL_P (insn
))
863 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
868 bb_estimate_probability_locally (bb
);
871 /* Attach the combined probability to each conditional jump. */
873 combine_predictions_for_insn (BB_END (bb
), bb
);
875 remove_fake_edges ();
876 estimate_bb_frequencies (loops_info
);
877 free_dominance_info (CDI_POST_DOMINATORS
);
878 if (profile_status
== PROFILE_ABSENT
)
879 profile_status
= PROFILE_GUESSED
;
882 /* Set edge->probability for each successor edge of BB. */
884 guess_outgoing_edge_probabilities (basic_block bb
)
886 bb_estimate_probability_locally (bb
);
887 combine_predictions_for_insn (BB_END (bb
), bb
);
890 /* Return constant EXPR will likely have at execution time, NULL if unknown.
891 The function is used by builtin_expect branch predictor so the evidence
892 must come from this construct and additional possible constant folding.
894 We may want to implement more involved value guess (such as value range
895 propagation based prediction), but such tricks shall go to new
899 expr_expected_value (tree expr
, bitmap visited
)
901 if (TREE_CONSTANT (expr
))
903 else if (TREE_CODE (expr
) == SSA_NAME
)
905 tree def
= SSA_NAME_DEF_STMT (expr
);
907 /* If we were already here, break the infinite cycle. */
908 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
910 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
912 if (TREE_CODE (def
) == PHI_NODE
)
914 /* All the arguments of the PHI node must have the same constant
917 tree val
= NULL
, new_val
;
919 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
921 tree arg
= PHI_ARG_DEF (def
, i
);
923 /* If this PHI has itself as an argument, we cannot
924 determine the string length of this argument. However,
925 if we can find an expected constant value for the other
926 PHI args then we can still be sure that this is
927 likely a constant. So be optimistic and just
928 continue with the next argument. */
929 if (arg
== PHI_RESULT (def
))
932 new_val
= expr_expected_value (arg
, visited
);
937 else if (!operand_equal_p (val
, new_val
, false))
942 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
944 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
946 else if (TREE_CODE (expr
) == CALL_EXPR
)
948 tree decl
= get_callee_fndecl (expr
);
951 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
952 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
954 tree arglist
= TREE_OPERAND (expr
, 1);
957 if (arglist
== NULL_TREE
958 || TREE_CHAIN (arglist
) == NULL_TREE
)
960 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
961 if (TREE_CONSTANT (val
))
963 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
966 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
969 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
972 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
975 res
= fold (build (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
));
976 if (TREE_CONSTANT (res
))
980 if (UNARY_CLASS_P (expr
))
983 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
986 res
= fold (build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
));
987 if (TREE_CONSTANT (res
))
994 /* Get rid of all builtin_expect calls we no longer need. */
996 strip_builtin_expect (void)
1001 block_stmt_iterator bi
;
1002 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
1004 tree stmt
= bsi_stmt (bi
);
1008 if (TREE_CODE (stmt
) == MODIFY_EXPR
1009 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
1010 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
1011 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1012 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1013 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
1014 && TREE_CHAIN (arglist
))
1016 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
1023 /* Predict using opcode of the last statement in basic block. */
1025 tree_predict_by_opcode (basic_block bb
)
1027 tree stmt
= last_stmt (bb
);
1036 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
1038 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1039 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1041 cond
= TREE_OPERAND (stmt
, 0);
1042 if (!COMPARISON_CLASS_P (cond
))
1044 op0
= TREE_OPERAND (cond
, 0);
1045 type
= TREE_TYPE (op0
);
1046 visited
= BITMAP_ALLOC (NULL
);
1047 val
= expr_expected_value (cond
, visited
);
1048 BITMAP_FREE (visited
);
1051 if (integer_zerop (val
))
1052 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1054 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1057 /* Try "pointer heuristic."
1058 A comparison ptr == 0 is predicted as false.
1059 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1060 if (POINTER_TYPE_P (type
))
1062 if (TREE_CODE (cond
) == EQ_EXPR
)
1063 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1064 else if (TREE_CODE (cond
) == NE_EXPR
)
1065 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1069 /* Try "opcode heuristic."
1070 EQ tests are usually false and NE tests are usually true. Also,
1071 most quantities are positive, so we can make the appropriate guesses
1072 about signed comparisons against zero. */
1073 switch (TREE_CODE (cond
))
1077 /* Floating point comparisons appears to behave in a very
1078 unpredictable way because of special role of = tests in
1080 if (FLOAT_TYPE_P (type
))
1082 /* Comparisons with 0 are often used for booleans and there is
1083 nothing useful to predict about them. */
1084 else if (integer_zerop (op0
)
1085 || integer_zerop (TREE_OPERAND (cond
, 1)))
1088 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1093 /* Floating point comparisons appears to behave in a very
1094 unpredictable way because of special role of = tests in
1096 if (FLOAT_TYPE_P (type
))
1098 /* Comparisons with 0 are often used for booleans and there is
1099 nothing useful to predict about them. */
1100 else if (integer_zerop (op0
)
1101 || integer_zerop (TREE_OPERAND (cond
, 1)))
1104 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1108 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1111 case UNORDERED_EXPR
:
1112 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1117 if (integer_zerop (TREE_OPERAND (cond
, 1))
1118 || integer_onep (TREE_OPERAND (cond
, 1))
1119 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1120 || real_zerop (TREE_OPERAND (cond
, 1))
1121 || real_onep (TREE_OPERAND (cond
, 1))
1122 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1123 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1128 if (integer_zerop (TREE_OPERAND (cond
, 1))
1129 || integer_onep (TREE_OPERAND (cond
, 1))
1130 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1131 || real_zerop (TREE_OPERAND (cond
, 1))
1132 || real_onep (TREE_OPERAND (cond
, 1))
1133 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1134 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1142 /* Try to guess whether the value of return means error code. */
1143 static enum br_predictor
1144 return_prediction (tree val
, enum prediction
*prediction
)
1148 return PRED_NO_PREDICTION
;
1149 /* Different heuristics for pointers and scalars. */
1150 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1152 /* NULL is usually not returned. */
1153 if (integer_zerop (val
))
1155 *prediction
= NOT_TAKEN
;
1156 return PRED_NULL_RETURN
;
1159 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1161 /* Negative return values are often used to indicate
1163 if (TREE_CODE (val
) == INTEGER_CST
1164 && tree_int_cst_sgn (val
) < 0)
1166 *prediction
= NOT_TAKEN
;
1167 return PRED_NEGATIVE_RETURN
;
1169 /* Constant return values seems to be commonly taken.
1170 Zero/one often represent booleans so exclude them from the
1172 if (TREE_CONSTANT (val
)
1173 && (!integer_zerop (val
) && !integer_onep (val
)))
1175 *prediction
= TAKEN
;
1176 return PRED_NEGATIVE_RETURN
;
1179 return PRED_NO_PREDICTION
;
1182 /* Find the basic block with return expression and look up for possible
1183 return value trying to apply RETURN_PREDICTION heuristics. */
1185 apply_return_prediction (int *heads
)
1191 int phi_num_args
, i
;
1192 enum br_predictor pred
;
1193 enum prediction direction
;
1196 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1198 return_stmt
= last_stmt (e
->src
);
1199 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1204 return_val
= TREE_OPERAND (return_stmt
, 0);
1207 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1208 return_val
= TREE_OPERAND (return_val
, 1);
1209 if (TREE_CODE (return_val
) != SSA_NAME
1210 || !SSA_NAME_DEF_STMT (return_val
)
1211 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1213 phi
= SSA_NAME_DEF_STMT (return_val
);
1216 tree next
= PHI_CHAIN (phi
);
1217 if (PHI_RESULT (phi
) == return_val
)
1223 phi_num_args
= PHI_NUM_ARGS (phi
);
1224 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1226 /* Avoid the degenerate case where all return values form the function
1227 belongs to same category (ie they are all positive constants)
1228 so we can hardly say something about them. */
1229 for (i
= 1; i
< phi_num_args
; i
++)
1230 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1232 if (i
!= phi_num_args
)
1233 for (i
= 0; i
< phi_num_args
; i
++)
1235 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1236 if (pred
!= PRED_NO_PREDICTION
)
1237 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1242 /* Look for basic block that contains unlikely to happen events
1243 (such as noreturn calls) and mark all paths leading to execution
1244 of this basic blocks as unlikely. */
1247 tree_bb_level_predictions (void)
1252 heads
= xmalloc (sizeof (int) * last_basic_block
);
1253 memset (heads
, -1, sizeof (int) * last_basic_block
);
1254 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1256 apply_return_prediction (heads
);
1260 block_stmt_iterator bsi
= bsi_last (bb
);
1262 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1264 tree stmt
= bsi_stmt (bsi
);
1265 switch (TREE_CODE (stmt
))
1268 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1270 stmt
= TREE_OPERAND (stmt
, 1);
1276 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1277 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1289 /* Predict branch probabilities and estimate profile of the tree CFG. */
1291 tree_estimate_probability (void)
1294 struct loops loops_info
;
1296 flow_loops_find (&loops_info
, LOOP_TREE
);
1297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1298 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1300 add_noreturn_fake_exit_edges ();
1301 connect_infinite_loops_to_exit ();
1302 calculate_dominance_info (CDI_DOMINATORS
);
1303 calculate_dominance_info (CDI_POST_DOMINATORS
);
1305 tree_bb_level_predictions ();
1307 mark_irreducible_loops (&loops_info
);
1308 predict_loops (&loops_info
, false);
1315 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1317 /* Predict early returns to be probable, as we've already taken
1318 care for error returns and other cases are often used for
1319 fast paths trought function. */
1320 if (e
->dest
== EXIT_BLOCK_PTR
1321 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1322 && EDGE_COUNT (bb
->preds
) > 1)
1327 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1328 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1329 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1330 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1331 && !last_basic_block_p (e1
->src
))
1332 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1335 /* Look for block we are guarding (ie we dominate it,
1336 but it doesn't postdominate us). */
1337 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1338 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1339 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1341 block_stmt_iterator bi
;
1343 /* The call heuristic claims that a guarded function call
1344 is improbable. This is because such calls are often used
1345 to signal exceptional situations such as printing error
1347 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1350 tree stmt
= bsi_stmt (bi
);
1351 if ((TREE_CODE (stmt
) == CALL_EXPR
1352 || (TREE_CODE (stmt
) == MODIFY_EXPR
1353 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1354 /* Constant and pure calls are hardly used to signalize
1355 something exceptional. */
1356 && TREE_SIDE_EFFECTS (stmt
))
1358 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1364 tree_predict_by_opcode (bb
);
1367 combine_predictions_for_bb (dump_file
, bb
);
1369 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1370 strip_builtin_expect ();
1371 estimate_bb_frequencies (&loops_info
);
1372 free_dominance_info (CDI_POST_DOMINATORS
);
1373 remove_fake_exit_edges ();
1374 flow_loops_free (&loops_info
);
1375 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1376 dump_tree_cfg (dump_file
, dump_flags
);
1377 if (profile_status
== PROFILE_ABSENT
)
1378 profile_status
= PROFILE_GUESSED
;
1381 /* __builtin_expect dropped tokens into the insn stream describing expected
1382 values of registers. Generate branch probabilities based off these
1386 expected_value_to_br_prob (void)
1388 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1390 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1392 switch (GET_CODE (insn
))
1395 /* Look for expected value notes. */
1396 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1398 ev
= NOTE_EXPECTED_VALUE (insn
);
1399 ev_reg
= XEXP (ev
, 0);
1405 /* Never propagate across labels. */
1410 /* Look for simple conditional branches. If we haven't got an
1411 expected value yet, no point going further. */
1412 if (!JUMP_P (insn
) || ev
== NULL_RTX
1413 || ! any_condjump_p (insn
))
1418 /* Look for insns that clobber the EV register. */
1419 if (ev
&& reg_set_p (ev_reg
, insn
))
1424 /* Collect the branch condition, hopefully relative to EV_REG. */
1425 /* ??? At present we'll miss things like
1426 (expected_value (eq r70 0))
1428 (set r80 (lt r70 r71))
1429 (set pc (if_then_else (ne r80 0) ...))
1430 as canonicalize_condition will render this to us as
1432 Could use cselib to try and reduce this further. */
1433 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1434 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1436 if (! cond
|| XEXP (cond
, 0) != ev_reg
1437 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1440 /* Substitute and simplify. Given that the expression we're
1441 building involves two constants, we should wind up with either
1443 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1444 XEXP (ev
, 1), XEXP (cond
, 1));
1445 cond
= simplify_rtx (cond
);
1447 /* Turn the condition into a scaled branch probability. */
1448 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
1450 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1451 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1455 /* Check whether this is the last basic block of function. Commonly
1456 there is one extra common cleanup block. */
1458 last_basic_block_p (basic_block bb
)
1460 if (bb
== EXIT_BLOCK_PTR
)
1463 return (bb
->next_bb
== EXIT_BLOCK_PTR
1464 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1465 && EDGE_COUNT (bb
->succs
) == 1
1466 && EDGE_SUCC (bb
, 0)->dest
->next_bb
== EXIT_BLOCK_PTR
));
1469 /* Sets branch probabilities according to PREDiction and
1470 FLAGS. HEADS[bb->index] should be index of basic block in that we
1471 need to alter branch predictions (i.e. the first of our dominators
1472 such that we do not post-dominate it) (but we fill this information
1473 on demand, so -1 may be there in case this was not needed yet). */
1476 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1477 enum prediction taken
)
1483 if (heads
[bb
->index
] < 0)
1485 /* This is first time we need this field in heads array; so
1486 find first dominator that we do not post-dominate (we are
1487 using already known members of heads array). */
1488 basic_block ai
= bb
;
1489 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1492 while (heads
[next_ai
->index
] < 0)
1494 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1496 heads
[next_ai
->index
] = ai
->index
;
1498 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1500 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1501 head
= next_ai
->index
;
1503 head
= heads
[next_ai
->index
];
1504 while (next_ai
!= bb
)
1507 if (heads
[ai
->index
] == ENTRY_BLOCK
)
1508 ai
= ENTRY_BLOCK_PTR
;
1510 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1511 heads
[next_ai
->index
] = head
;
1514 y
= heads
[bb
->index
];
1516 /* Now find the edge that leads to our branch and aply the prediction. */
1518 if (y
== last_basic_block
)
1520 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1521 if (e
->dest
->index
>= 0
1522 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1523 predict_edge_def (e
, pred
, taken
);
1526 /* This is used to carry information about basic blocks. It is
1527 attached to the AUX field of the standard CFG block. */
1529 typedef struct block_info_def
1531 /* Estimated frequency of execution of basic_block. */
1534 /* To keep queue of basic blocks to process. */
1537 /* Number of predecessors we need to visit first. */
1541 /* Similar information for edges. */
1542 typedef struct edge_info_def
1544 /* In case edge is an loopback edge, the probability edge will be reached
1545 in case header is. Estimated number of iterations of the loop can be
1546 then computed as 1 / (1 - back_edge_prob). */
1547 sreal back_edge_prob
;
1548 /* True if the edge is an loopback edge in the natural loop. */
1549 unsigned int back_edge
:1;
1552 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1553 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1555 /* Helper function for estimate_bb_frequencies.
1556 Propagate the frequencies for LOOP. */
1559 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1561 basic_block head
= loop
->header
;
1569 /* For each basic block we need to visit count number of his predecessors
1570 we need to visit first. */
1571 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1576 /* The outermost "loop" includes the exit block, which we can not
1577 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1578 directly. Do the same for the entry block. */
1579 if (i
== (unsigned)ENTRY_BLOCK
)
1580 bb
= ENTRY_BLOCK_PTR
;
1581 else if (i
== (unsigned)EXIT_BLOCK
)
1582 bb
= EXIT_BLOCK_PTR
;
1584 bb
= BASIC_BLOCK (i
);
1586 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1588 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1590 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1592 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1594 "Irreducible region hit, ignoring edge to %i->%i\n",
1595 e
->src
->index
, bb
->index
);
1597 BLOCK_INFO (bb
)->npredecessors
= count
;
1600 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1602 for (bb
= head
; bb
; bb
= nextbb
)
1605 sreal cyclic_probability
, frequency
;
1607 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1608 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1610 nextbb
= BLOCK_INFO (bb
)->next
;
1611 BLOCK_INFO (bb
)->next
= NULL
;
1613 /* Compute frequency of basic block. */
1616 #ifdef ENABLE_CHECKING
1617 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1618 if (bitmap_bit_p (tovisit
, e
->src
->index
)
1619 && !(e
->flags
& EDGE_DFS_BACK
))
1623 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1624 if (EDGE_INFO (e
)->back_edge
)
1626 sreal_add (&cyclic_probability
, &cyclic_probability
,
1627 &EDGE_INFO (e
)->back_edge_prob
);
1629 else if (!(e
->flags
& EDGE_DFS_BACK
))
1633 /* frequency += (e->probability
1634 * BLOCK_INFO (e->src)->frequency /
1635 REG_BR_PROB_BASE); */
1637 sreal_init (&tmp
, e
->probability
, 0);
1638 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1639 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1640 sreal_add (&frequency
, &frequency
, &tmp
);
1643 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1645 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1646 sizeof (frequency
));
1650 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1652 memcpy (&cyclic_probability
, &real_almost_one
,
1653 sizeof (real_almost_one
));
1656 /* BLOCK_INFO (bb)->frequency = frequency
1657 / (1 - cyclic_probability) */
1659 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1660 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1661 &frequency
, &cyclic_probability
);
1665 bitmap_clear_bit (tovisit
, bb
->index
);
1667 e
= find_edge (bb
, head
);
1672 /* EDGE_INFO (e)->back_edge_prob
1673 = ((e->probability * BLOCK_INFO (bb)->frequency)
1674 / REG_BR_PROB_BASE); */
1676 sreal_init (&tmp
, e
->probability
, 0);
1677 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1678 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1679 &tmp
, &real_inv_br_prob_base
);
1682 /* Propagate to successor blocks. */
1683 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1684 if (!(e
->flags
& EDGE_DFS_BACK
)
1685 && BLOCK_INFO (e
->dest
)->npredecessors
)
1687 BLOCK_INFO (e
->dest
)->npredecessors
--;
1688 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1693 BLOCK_INFO (last
)->next
= e
->dest
;
1701 /* Estimate probabilities of loopback edges in loops at same nest level. */
1704 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1708 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1714 estimate_loops_at_level (loop
->inner
, tovisit
);
1716 /* Do not do this for dummy function loop. */
1717 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1719 /* Find current loop back edge and mark it. */
1720 e
= loop_latch_edge (loop
);
1721 EDGE_INFO (e
)->back_edge
= 1;
1724 bbs
= get_loop_body (loop
);
1725 for (i
= 0; i
< loop
->num_nodes
; i
++)
1726 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1728 propagate_freq (loop
, tovisit
);
1732 /* Convert counts measured by profile driven feedback to frequencies.
1733 Return nonzero iff there was any nonzero execution count. */
1736 counts_to_freqs (void)
1738 gcov_type count_max
, true_count_max
= 0;
1742 true_count_max
= MAX (bb
->count
, true_count_max
);
1744 count_max
= MAX (true_count_max
, 1);
1745 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1746 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1747 return true_count_max
;
1750 /* Return true if function is likely to be expensive, so there is no point to
1751 optimize performance of prologue, epilogue or do inlining at the expense
1752 of code size growth. THRESHOLD is the limit of number of instructions
1753 function can execute at average to be still considered not expensive. */
1756 expensive_function_p (int threshold
)
1758 unsigned int sum
= 0;
1762 /* We can not compute accurately for large thresholds due to scaled
1764 if (threshold
> BB_FREQ_MAX
)
1767 /* Frequencies are out of range. This either means that function contains
1768 internal loop executing more than BB_FREQ_MAX times or profile feedback
1769 is available and function has not been executed at all. */
1770 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1773 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1774 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1779 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1780 insn
= NEXT_INSN (insn
))
1781 if (active_insn_p (insn
))
1783 sum
+= bb
->frequency
;
1792 /* Estimate basic blocks frequency by given branch probabilities. */
1795 estimate_bb_frequencies (struct loops
*loops
)
1800 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1802 static int real_values_initialized
= 0;
1805 if (!real_values_initialized
)
1807 real_values_initialized
= 1;
1808 sreal_init (&real_zero
, 0, 0);
1809 sreal_init (&real_one
, 1, 0);
1810 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1811 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1812 sreal_init (&real_one_half
, 1, -1);
1813 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1814 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1817 mark_dfs_back_edges ();
1819 EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->probability
= REG_BR_PROB_BASE
;
1821 /* Set up block info for each basic block. */
1822 tovisit
= BITMAP_ALLOC (NULL
);
1823 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1824 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1825 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1830 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1832 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1833 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1834 &EDGE_INFO (e
)->back_edge_prob
,
1835 &real_inv_br_prob_base
);
1839 /* First compute probabilities locally for each loop from innermost
1840 to outermost to examine probabilities for back edges. */
1841 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1843 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1845 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1846 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1848 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1849 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1853 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1854 sreal_add (&tmp
, &tmp
, &real_one_half
);
1855 bb
->frequency
= sreal_to_int (&tmp
);
1858 free_aux_for_blocks ();
1859 free_aux_for_edges ();
1860 BITMAP_FREE (tovisit
);
1862 compute_function_frequency ();
1863 if (flag_reorder_functions
)
1864 choose_function_section ();
1867 /* Decide whether function is hot, cold or unlikely executed. */
1869 compute_function_frequency (void)
1873 if (!profile_info
|| !flag_branch_probabilities
)
1875 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1878 if (maybe_hot_bb_p (bb
))
1880 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1883 if (!probably_never_executed_bb_p (bb
))
1884 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1888 /* Choose appropriate section for the function. */
1890 choose_function_section (void)
1892 if (DECL_SECTION_NAME (current_function_decl
)
1893 || !targetm
.have_named_sections
1894 /* Theoretically we can split the gnu.linkonce text section too,
1895 but this requires more work as the frequency needs to match
1896 for all generated objects so we need to merge the frequency
1897 of all instances. For now just never set frequency for these. */
1898 || DECL_ONE_ONLY (current_function_decl
))
1901 /* If we are doing the partitioning optimization, let the optimization
1902 choose the correct section into which to put things. */
1904 if (flag_reorder_blocks_and_partition
)
1907 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1908 DECL_SECTION_NAME (current_function_decl
) =
1909 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1910 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1911 DECL_SECTION_NAME (current_function_decl
) =
1912 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1913 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1917 struct tree_opt_pass pass_profile
=
1919 "profile", /* name */
1921 tree_estimate_probability
, /* execute */
1924 0, /* static_pass_number */
1925 TV_BRANCH_PROB
, /* tv_id */
1926 PROP_cfg
, /* properties_required */
1927 0, /* properties_provided */
1928 0, /* properties_destroyed */
1929 0, /* todo_flags_start */
1930 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */