1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
34 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
56 #include "tree-flow.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
61 #include "tree-scalar-evolution.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx
, basic_block
);
76 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
77 static void estimate_loops_at_level (struct loop
*, bitmap
);
78 static void propagate_freq (struct loop
*, bitmap
);
79 static void estimate_bb_frequencies (struct loops
*);
80 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
81 static bool last_basic_block_p (basic_block
);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx
);
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
91 const char *const name
; /* Name used in the debugging dumps. */
92 const int hitrate
; /* Expected hitrate used by
93 predict_insn_def call. */
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
101 /* Recompute hitrate in percent to our representation. */
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info
[]= {
107 #include "predict.def"
109 /* Upper bound on predictors. */
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
118 maybe_hot_bb_p (basic_block bb
)
120 if (profile_info
&& flag_branch_probabilities
122 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
124 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
129 /* Return true in case BB is cold and should be optimized for size. */
132 probably_cold_bb_p (basic_block bb
)
134 if (profile_info
&& flag_branch_probabilities
136 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
138 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
143 /* Return true in case BB is probably never executed. */
145 probably_never_executed_bb_p (basic_block bb
)
147 if (profile_info
&& flag_branch_probabilities
)
148 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
152 /* Return true if the one of outgoing edges is already predicted by
156 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
159 if (!INSN_P (BB_END (bb
)))
161 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
162 if (REG_NOTE_KIND (note
) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
168 /* Return true if the one of outgoing edges is already predicted by
172 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
174 struct edge_prediction
*i
= bb_ann (bb
)->predictions
;
175 for (i
= bb_ann (bb
)->predictions
; i
; i
= i
->next
)
176 if (i
->predictor
== predictor
)
182 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
184 if (!any_condjump_p (insn
))
186 if (!flag_guess_branch_prob
)
190 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
191 gen_rtx_CONCAT (VOIDmode
,
192 GEN_INT ((int) predictor
),
193 GEN_INT ((int) probability
)),
197 /* Predict insn by given predictor. */
200 predict_insn_def (rtx insn
, enum br_predictor predictor
,
201 enum prediction taken
)
203 int probability
= predictor_info
[(int) predictor
].hitrate
;
206 probability
= REG_BR_PROB_BASE
- probability
;
208 predict_insn (insn
, predictor
, probability
);
211 /* Predict edge E with given probability if possible. */
214 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
217 last_insn
= BB_END (e
->src
);
219 /* We can store the branch prediction information only about
220 conditional jumps. */
221 if (!any_condjump_p (last_insn
))
224 /* We always store probability of branching. */
225 if (e
->flags
& EDGE_FALLTHRU
)
226 probability
= REG_BR_PROB_BASE
- probability
;
228 predict_insn (last_insn
, predictor
, probability
);
231 /* Predict edge E with the given PROBABILITY. */
233 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
235 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
237 i
->next
= bb_ann (e
->src
)->predictions
;
238 bb_ann (e
->src
)->predictions
= i
;
239 i
->probability
= probability
;
240 i
->predictor
= predictor
;
244 /* Return true when we can store prediction on insn INSN.
245 At the moment we represent predictions only on conditional
246 jumps, not at computed jump or other complicated cases. */
248 can_predict_insn_p (rtx insn
)
250 return (JUMP_P (insn
)
251 && any_condjump_p (insn
)
252 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
255 /* Predict edge E by given predictor if possible. */
258 predict_edge_def (edge e
, enum br_predictor predictor
,
259 enum prediction taken
)
261 int probability
= predictor_info
[(int) predictor
].hitrate
;
264 probability
= REG_BR_PROB_BASE
- probability
;
266 predict_edge (e
, predictor
, probability
);
269 /* Invert all branch predictions or probability notes in the INSN. This needs
270 to be done each time we invert the condition used by the jump. */
273 invert_br_probabilities (rtx insn
)
277 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
278 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
279 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
280 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
281 XEXP (XEXP (note
, 0), 1)
282 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
285 /* Dump information about the branch prediction to the output file. */
288 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
289 basic_block bb
, int used
)
297 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
298 if (! (e
->flags
& EDGE_FALLTHRU
))
301 fprintf (file
, " %s heuristics%s: %.1f%%",
302 predictor_info
[predictor
].name
,
303 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
307 fprintf (file
, " exec ");
308 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
311 fprintf (file
, " hit ");
312 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
313 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
317 fprintf (file
, "\n");
320 /* We can not predict the probabilities of outgoing edges of bb. Set them
321 evenly and hope for the best. */
323 set_even_probabilities (basic_block bb
)
329 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
330 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
332 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
333 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
334 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
339 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
340 note if not already present. Remove now useless REG_BR_PRED notes. */
343 combine_predictions_for_insn (rtx insn
, basic_block bb
)
348 int best_probability
= PROB_EVEN
;
349 int best_predictor
= END_PREDICTORS
;
350 int combined_probability
= REG_BR_PROB_BASE
/ 2;
352 bool first_match
= false;
355 if (!can_predict_insn_p (insn
))
357 set_even_probabilities (bb
);
361 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
362 pnote
= ®_NOTES (insn
);
364 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
367 /* We implement "first match" heuristics and use probability guessed
368 by predictor with smallest index. */
369 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
370 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
372 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
373 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
376 if (best_predictor
> predictor
)
377 best_probability
= probability
, best_predictor
= predictor
;
379 d
= (combined_probability
* probability
380 + (REG_BR_PROB_BASE
- combined_probability
)
381 * (REG_BR_PROB_BASE
- probability
));
383 /* Use FP math to avoid overflows of 32bit integers. */
385 /* If one probability is 0% and one 100%, avoid division by zero. */
386 combined_probability
= REG_BR_PROB_BASE
/ 2;
388 combined_probability
= (((double) combined_probability
) * probability
389 * REG_BR_PROB_BASE
/ d
+ 0.5);
392 /* Decide which heuristic to use. In case we didn't match anything,
393 use no_prediction heuristic, in case we did match, use either
394 first match or Dempster-Shaffer theory depending on the flags. */
396 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
400 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
401 combined_probability
, bb
, true);
404 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
406 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
411 combined_probability
= best_probability
;
412 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
416 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
418 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
419 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
421 dump_prediction (dump_file
, predictor
, probability
, bb
,
422 !first_match
|| best_predictor
== predictor
);
423 *pnote
= XEXP (*pnote
, 1);
426 pnote
= &XEXP (*pnote
, 1);
432 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
433 GEN_INT (combined_probability
), REG_NOTES (insn
));
435 /* Save the prediction into CFG in case we are seeing non-degenerated
437 if (EDGE_COUNT (bb
->succs
) > 1)
439 BRANCH_EDGE (bb
)->probability
= combined_probability
;
440 FALLTHRU_EDGE (bb
)->probability
441 = REG_BR_PROB_BASE
- combined_probability
;
444 else if (EDGE_COUNT (bb
->succs
) > 1)
446 int prob
= INTVAL (XEXP (prob_note
, 0));
448 BRANCH_EDGE (bb
)->probability
= prob
;
449 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
452 EDGE_SUCC (bb
, 0)->probability
= REG_BR_PROB_BASE
;
455 /* Combine predictions into single probability and store them into CFG.
456 Remove now useless prediction entries. */
459 combine_predictions_for_bb (FILE *file
, basic_block bb
)
461 int best_probability
= PROB_EVEN
;
462 int best_predictor
= END_PREDICTORS
;
463 int combined_probability
= REG_BR_PROB_BASE
/ 2;
465 bool first_match
= false;
467 struct edge_prediction
*pred
;
469 edge e
, first
= NULL
, second
= NULL
;
472 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
473 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
476 if (first
&& !second
)
482 /* When there is no successor or only one choice, prediction is easy.
484 We are lazy for now and predict only basic blocks with two outgoing
485 edges. It is possible to predict generic case too, but we have to
486 ignore first match heuristics and do more involved combining. Implement
491 set_even_probabilities (bb
);
492 bb_ann (bb
)->predictions
= NULL
;
494 fprintf (file
, "%i edges in bb %i predicted to even probabilities\n",
500 fprintf (file
, "Predictions for bb %i\n", bb
->index
);
502 /* We implement "first match" heuristics and use probability guessed
503 by predictor with smallest index. */
504 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
506 int predictor
= pred
->predictor
;
507 int probability
= pred
->probability
;
509 if (pred
->edge
!= first
)
510 probability
= REG_BR_PROB_BASE
- probability
;
513 if (best_predictor
> predictor
)
514 best_probability
= probability
, best_predictor
= predictor
;
516 d
= (combined_probability
* probability
517 + (REG_BR_PROB_BASE
- combined_probability
)
518 * (REG_BR_PROB_BASE
- probability
));
520 /* Use FP math to avoid overflows of 32bit integers. */
522 /* If one probability is 0% and one 100%, avoid division by zero. */
523 combined_probability
= REG_BR_PROB_BASE
/ 2;
525 combined_probability
= (((double) combined_probability
) * probability
526 * REG_BR_PROB_BASE
/ d
+ 0.5);
529 /* Decide which heuristic to use. In case we didn't match anything,
530 use no_prediction heuristic, in case we did match, use either
531 first match or Dempster-Shaffer theory depending on the flags. */
533 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
537 dump_prediction (file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
540 dump_prediction (file
, PRED_DS_THEORY
, combined_probability
, bb
,
542 dump_prediction (file
, PRED_FIRST_MATCH
, best_probability
, bb
,
547 combined_probability
= best_probability
;
548 dump_prediction (file
, PRED_COMBINED
, combined_probability
, bb
, true);
550 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
552 int predictor
= pred
->predictor
;
553 int probability
= pred
->probability
;
555 if (pred
->edge
!= EDGE_SUCC (bb
, 0))
556 probability
= REG_BR_PROB_BASE
- probability
;
557 dump_prediction (file
, predictor
, probability
, bb
,
558 !first_match
|| best_predictor
== predictor
);
560 bb_ann (bb
)->predictions
= NULL
;
564 first
->probability
= combined_probability
;
565 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
569 /* Predict edge probabilities by exploiting loop structure.
570 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
571 RTL otherwise use tree based approach. */
573 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
578 scev_initialize (loops_info
);
580 /* Try to predict out blocks in a loop that are not part of a
582 for (i
= 1; i
< loops_info
->num
; i
++)
584 basic_block bb
, *bbs
;
587 struct loop
*loop
= loops_info
->parray
[i
];
588 struct niter_desc desc
;
589 unsigned HOST_WIDE_INT niter
;
592 exits
= get_loop_exit_edges (loop
, &n_exits
);
596 iv_analysis_loop_init (loop
);
597 find_simple_exit (loop
, &desc
);
599 if (desc
.simple_p
&& desc
.const_iter
)
602 niter
= desc
.niter
+ 1;
603 if (niter
== 0) /* We might overflow here. */
606 prob
= (REG_BR_PROB_BASE
607 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
608 /* Branch prediction algorithm gives 0 frequency for everything
609 after the end of loop for loop having 0 probability to finish. */
610 if (prob
== REG_BR_PROB_BASE
)
611 prob
= REG_BR_PROB_BASE
- 1;
612 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
618 struct tree_niter_desc niter_desc
;
620 for (j
= 0; j
< n_exits
; j
++)
624 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
))
625 niter
= niter_desc
.niter
;
626 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
627 niter
= loop_niter_by_eval (loop
, exits
[j
]);
629 if (TREE_CODE (niter
) == INTEGER_CST
)
632 if (host_integerp (niter
, 1)
633 && tree_int_cst_lt (niter
,
634 build_int_cstu (NULL_TREE
,
635 REG_BR_PROB_BASE
- 1)))
637 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
638 probability
= (REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
;
643 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
650 bbs
= get_loop_body (loop
);
652 for (j
= 0; j
< loop
->num_nodes
; j
++)
654 int header_found
= 0;
660 /* Bypass loop heuristics on continue statement. These
661 statements construct loops via "non-loop" constructs
662 in the source language and are better to be handled
664 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
665 || predicted_by_p (bb
, PRED_CONTINUE
))
668 /* Loop branch heuristics - predict an edge back to a
669 loop's head as taken. */
670 if (bb
== loop
->latch
)
672 e
= find_edge (loop
->latch
, loop
->header
);
676 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
680 /* Loop exit heuristics - predict an edge exiting the loop if the
681 conditional has no loop header successors as not taken. */
683 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
684 if (e
->dest
->index
< 0
685 || !flow_bb_inside_loop_p (loop
, e
->dest
))
689 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
693 /* Free basic blocks from get_loop_body. */
701 /* Attempt to predict probabilities of BB outgoing edges using local
704 bb_estimate_probability_locally (basic_block bb
)
706 rtx last_insn
= BB_END (bb
);
709 if (! can_predict_insn_p (last_insn
))
711 cond
= get_condition (last_insn
, NULL
, false, false);
715 /* Try "pointer heuristic."
716 A comparison ptr == 0 is predicted as false.
717 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
718 if (COMPARISON_P (cond
)
719 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
720 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
722 if (GET_CODE (cond
) == EQ
)
723 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
724 else if (GET_CODE (cond
) == NE
)
725 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
729 /* Try "opcode heuristic."
730 EQ tests are usually false and NE tests are usually true. Also,
731 most quantities are positive, so we can make the appropriate guesses
732 about signed comparisons against zero. */
733 switch (GET_CODE (cond
))
736 /* Unconditional branch. */
737 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
738 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
743 /* Floating point comparisons appears to behave in a very
744 unpredictable way because of special role of = tests in
746 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
748 /* Comparisons with 0 are often used for booleans and there is
749 nothing useful to predict about them. */
750 else if (XEXP (cond
, 1) == const0_rtx
751 || XEXP (cond
, 0) == const0_rtx
)
754 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
759 /* Floating point comparisons appears to behave in a very
760 unpredictable way because of special role of = tests in
762 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
764 /* Comparisons with 0 are often used for booleans and there is
765 nothing useful to predict about them. */
766 else if (XEXP (cond
, 1) == const0_rtx
767 || XEXP (cond
, 0) == const0_rtx
)
770 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
774 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
778 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
783 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
784 || XEXP (cond
, 1) == constm1_rtx
)
785 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
790 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
791 || XEXP (cond
, 1) == constm1_rtx
)
792 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
800 /* Statically estimate the probability that a branch will be taken and produce
801 estimated profile. When profile feedback is present never executed portions
802 of function gets estimated. */
805 estimate_probability (struct loops
*loops_info
)
809 connect_infinite_loops_to_exit ();
810 calculate_dominance_info (CDI_DOMINATORS
);
811 calculate_dominance_info (CDI_POST_DOMINATORS
);
813 predict_loops (loops_info
, true);
817 /* Attempt to predict conditional jumps using a number of heuristics. */
820 rtx last_insn
= BB_END (bb
);
824 if (! can_predict_insn_p (last_insn
))
827 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
829 /* Predict early returns to be probable, as we've already taken
830 care for error returns and other are often used for fast paths
832 if ((e
->dest
== EXIT_BLOCK_PTR
833 || (EDGE_COUNT (e
->dest
->succs
) == 1
834 && EDGE_SUCC (e
->dest
, 0)->dest
== EXIT_BLOCK_PTR
))
835 && !predicted_by_p (bb
, PRED_NULL_RETURN
)
836 && !predicted_by_p (bb
, PRED_CONST_RETURN
)
837 && !predicted_by_p (bb
, PRED_NEGATIVE_RETURN
)
838 && !last_basic_block_p (e
->dest
))
839 predict_edge_def (e
, PRED_EARLY_RETURN
, TAKEN
);
841 /* Look for block we are guarding (i.e. we dominate it,
842 but it doesn't postdominate us). */
843 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
844 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
845 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
849 /* The call heuristic claims that a guarded function call
850 is improbable. This is because such calls are often used
851 to signal exceptional situations such as printing error
853 for (insn
= BB_HEAD (e
->dest
); insn
!= NEXT_INSN (BB_END (e
->dest
));
854 insn
= NEXT_INSN (insn
))
856 /* Constant and pure calls are hardly used to signalize
857 something exceptional. */
858 && ! CONST_OR_PURE_CALL_P (insn
))
860 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
865 bb_estimate_probability_locally (bb
);
868 /* Attach the combined probability to each conditional jump. */
870 combine_predictions_for_insn (BB_END (bb
), bb
);
872 remove_fake_edges ();
873 estimate_bb_frequencies (loops_info
);
874 free_dominance_info (CDI_POST_DOMINATORS
);
875 if (profile_status
== PROFILE_ABSENT
)
876 profile_status
= PROFILE_GUESSED
;
879 /* Set edge->probability for each successor edge of BB. */
881 guess_outgoing_edge_probabilities (basic_block bb
)
883 bb_estimate_probability_locally (bb
);
884 combine_predictions_for_insn (BB_END (bb
), bb
);
887 /* Return constant EXPR will likely have at execution time, NULL if unknown.
888 The function is used by builtin_expect branch predictor so the evidence
889 must come from this construct and additional possible constant folding.
891 We may want to implement more involved value guess (such as value range
892 propagation based prediction), but such tricks shall go to new
896 expr_expected_value (tree expr
, bitmap visited
)
898 if (TREE_CONSTANT (expr
))
900 else if (TREE_CODE (expr
) == SSA_NAME
)
902 tree def
= SSA_NAME_DEF_STMT (expr
);
904 /* If we were already here, break the infinite cycle. */
905 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
907 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
909 if (TREE_CODE (def
) == PHI_NODE
)
911 /* All the arguments of the PHI node must have the same constant
914 tree val
= NULL
, new_val
;
916 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
918 tree arg
= PHI_ARG_DEF (def
, i
);
920 /* If this PHI has itself as an argument, we cannot
921 determine the string length of this argument. However,
922 if we can find an expected constant value for the other
923 PHI args then we can still be sure that this is
924 likely a constant. So be optimistic and just
925 continue with the next argument. */
926 if (arg
== PHI_RESULT (def
))
929 new_val
= expr_expected_value (arg
, visited
);
934 else if (!operand_equal_p (val
, new_val
, false))
939 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
941 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
943 else if (TREE_CODE (expr
) == CALL_EXPR
)
945 tree decl
= get_callee_fndecl (expr
);
948 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
949 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
951 tree arglist
= TREE_OPERAND (expr
, 1);
954 if (arglist
== NULL_TREE
955 || TREE_CHAIN (arglist
) == NULL_TREE
)
957 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
958 if (TREE_CONSTANT (val
))
960 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
963 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
966 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
969 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
972 res
= fold (build (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
));
973 if (TREE_CONSTANT (res
))
977 if (UNARY_CLASS_P (expr
))
980 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
983 res
= fold (build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
));
984 if (TREE_CONSTANT (res
))
991 /* Get rid of all builtin_expect calls we no longer need. */
993 strip_builtin_expect (void)
998 block_stmt_iterator bi
;
999 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
1001 tree stmt
= bsi_stmt (bi
);
1005 if (TREE_CODE (stmt
) == MODIFY_EXPR
1006 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
1007 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
1008 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1009 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1010 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
1011 && TREE_CHAIN (arglist
))
1013 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
1020 /* Predict using opcode of the last statement in basic block. */
1022 tree_predict_by_opcode (basic_block bb
)
1024 tree stmt
= last_stmt (bb
);
1033 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
1035 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1036 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1038 cond
= TREE_OPERAND (stmt
, 0);
1039 if (!COMPARISON_CLASS_P (cond
))
1041 op0
= TREE_OPERAND (cond
, 0);
1042 type
= TREE_TYPE (op0
);
1043 visited
= BITMAP_ALLOC (NULL
);
1044 val
= expr_expected_value (cond
, visited
);
1045 BITMAP_FREE (visited
);
1048 if (integer_zerop (val
))
1049 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1051 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1054 /* Try "pointer heuristic."
1055 A comparison ptr == 0 is predicted as false.
1056 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1057 if (POINTER_TYPE_P (type
))
1059 if (TREE_CODE (cond
) == EQ_EXPR
)
1060 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1061 else if (TREE_CODE (cond
) == NE_EXPR
)
1062 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1066 /* Try "opcode heuristic."
1067 EQ tests are usually false and NE tests are usually true. Also,
1068 most quantities are positive, so we can make the appropriate guesses
1069 about signed comparisons against zero. */
1070 switch (TREE_CODE (cond
))
1074 /* Floating point comparisons appears to behave in a very
1075 unpredictable way because of special role of = tests in
1077 if (FLOAT_TYPE_P (type
))
1079 /* Comparisons with 0 are often used for booleans and there is
1080 nothing useful to predict about them. */
1081 else if (integer_zerop (op0
)
1082 || integer_zerop (TREE_OPERAND (cond
, 1)))
1085 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1090 /* Floating point comparisons appears to behave in a very
1091 unpredictable way because of special role of = tests in
1093 if (FLOAT_TYPE_P (type
))
1095 /* Comparisons with 0 are often used for booleans and there is
1096 nothing useful to predict about them. */
1097 else if (integer_zerop (op0
)
1098 || integer_zerop (TREE_OPERAND (cond
, 1)))
1101 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1105 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1108 case UNORDERED_EXPR
:
1109 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1114 if (integer_zerop (TREE_OPERAND (cond
, 1))
1115 || integer_onep (TREE_OPERAND (cond
, 1))
1116 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1117 || real_zerop (TREE_OPERAND (cond
, 1))
1118 || real_onep (TREE_OPERAND (cond
, 1))
1119 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1120 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1125 if (integer_zerop (TREE_OPERAND (cond
, 1))
1126 || integer_onep (TREE_OPERAND (cond
, 1))
1127 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1128 || real_zerop (TREE_OPERAND (cond
, 1))
1129 || real_onep (TREE_OPERAND (cond
, 1))
1130 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1131 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1139 /* Try to guess whether the value of return means error code. */
1140 static enum br_predictor
1141 return_prediction (tree val
, enum prediction
*prediction
)
1145 return PRED_NO_PREDICTION
;
1146 /* Different heuristics for pointers and scalars. */
1147 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1149 /* NULL is usually not returned. */
1150 if (integer_zerop (val
))
1152 *prediction
= NOT_TAKEN
;
1153 return PRED_NULL_RETURN
;
1156 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1158 /* Negative return values are often used to indicate
1160 if (TREE_CODE (val
) == INTEGER_CST
1161 && tree_int_cst_sgn (val
) < 0)
1163 *prediction
= NOT_TAKEN
;
1164 return PRED_NEGATIVE_RETURN
;
1166 /* Constant return values seems to be commonly taken.
1167 Zero/one often represent booleans so exclude them from the
1169 if (TREE_CONSTANT (val
)
1170 && (!integer_zerop (val
) && !integer_onep (val
)))
1172 *prediction
= TAKEN
;
1173 return PRED_NEGATIVE_RETURN
;
1176 return PRED_NO_PREDICTION
;
1179 /* Find the basic block with return expression and look up for possible
1180 return value trying to apply RETURN_PREDICTION heuristics. */
1182 apply_return_prediction (int *heads
)
1188 int phi_num_args
, i
;
1189 enum br_predictor pred
;
1190 enum prediction direction
;
1193 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1195 return_stmt
= last_stmt (e
->src
);
1196 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1201 return_val
= TREE_OPERAND (return_stmt
, 0);
1204 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1205 return_val
= TREE_OPERAND (return_val
, 1);
1206 if (TREE_CODE (return_val
) != SSA_NAME
1207 || !SSA_NAME_DEF_STMT (return_val
)
1208 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1210 phi
= SSA_NAME_DEF_STMT (return_val
);
1213 tree next
= PHI_CHAIN (phi
);
1214 if (PHI_RESULT (phi
) == return_val
)
1220 phi_num_args
= PHI_NUM_ARGS (phi
);
1221 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1223 /* Avoid the degenerate case where all return values form the function
1224 belongs to same category (ie they are all positive constants)
1225 so we can hardly say something about them. */
1226 for (i
= 1; i
< phi_num_args
; i
++)
1227 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1229 if (i
!= phi_num_args
)
1230 for (i
= 0; i
< phi_num_args
; i
++)
1232 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1233 if (pred
!= PRED_NO_PREDICTION
)
1234 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1239 /* Look for basic block that contains unlikely to happen events
1240 (such as noreturn calls) and mark all paths leading to execution
1241 of this basic blocks as unlikely. */
1244 tree_bb_level_predictions (void)
1249 heads
= xmalloc (sizeof (int) * last_basic_block
);
1250 memset (heads
, -1, sizeof (int) * last_basic_block
);
1251 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1253 apply_return_prediction (heads
);
1257 block_stmt_iterator bsi
= bsi_last (bb
);
1259 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1261 tree stmt
= bsi_stmt (bsi
);
1262 switch (TREE_CODE (stmt
))
1265 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1267 stmt
= TREE_OPERAND (stmt
, 1);
1273 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1274 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1286 /* Predict branch probabilities and estimate profile of the tree CFG. */
1288 tree_estimate_probability (void)
1291 struct loops loops_info
;
1293 flow_loops_find (&loops_info
);
1294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1295 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1297 add_noreturn_fake_exit_edges ();
1298 connect_infinite_loops_to_exit ();
1299 calculate_dominance_info (CDI_DOMINATORS
);
1300 calculate_dominance_info (CDI_POST_DOMINATORS
);
1302 tree_bb_level_predictions ();
1304 mark_irreducible_loops (&loops_info
);
1305 predict_loops (&loops_info
, false);
1312 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1314 /* Predict early returns to be probable, as we've already taken
1315 care for error returns and other cases are often used for
1316 fast paths trought function. */
1317 if (e
->dest
== EXIT_BLOCK_PTR
1318 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1319 && EDGE_COUNT (bb
->preds
) > 1)
1324 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1325 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1326 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1327 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1328 && !last_basic_block_p (e1
->src
))
1329 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1332 /* Look for block we are guarding (ie we dominate it,
1333 but it doesn't postdominate us). */
1334 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1335 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1336 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1338 block_stmt_iterator bi
;
1340 /* The call heuristic claims that a guarded function call
1341 is improbable. This is because such calls are often used
1342 to signal exceptional situations such as printing error
1344 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1347 tree stmt
= bsi_stmt (bi
);
1348 if ((TREE_CODE (stmt
) == CALL_EXPR
1349 || (TREE_CODE (stmt
) == MODIFY_EXPR
1350 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1351 /* Constant and pure calls are hardly used to signalize
1352 something exceptional. */
1353 && TREE_SIDE_EFFECTS (stmt
))
1355 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1361 tree_predict_by_opcode (bb
);
1364 combine_predictions_for_bb (dump_file
, bb
);
1366 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1367 strip_builtin_expect ();
1368 estimate_bb_frequencies (&loops_info
);
1369 free_dominance_info (CDI_POST_DOMINATORS
);
1370 remove_fake_exit_edges ();
1371 flow_loops_free (&loops_info
);
1372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1373 dump_tree_cfg (dump_file
, dump_flags
);
1374 if (profile_status
== PROFILE_ABSENT
)
1375 profile_status
= PROFILE_GUESSED
;
1378 /* __builtin_expect dropped tokens into the insn stream describing expected
1379 values of registers. Generate branch probabilities based off these
1383 expected_value_to_br_prob (void)
1385 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1387 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1389 switch (GET_CODE (insn
))
1392 /* Look for expected value notes. */
1393 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1395 ev
= NOTE_EXPECTED_VALUE (insn
);
1396 ev_reg
= XEXP (ev
, 0);
1402 /* Never propagate across labels. */
1407 /* Look for simple conditional branches. If we haven't got an
1408 expected value yet, no point going further. */
1409 if (!JUMP_P (insn
) || ev
== NULL_RTX
1410 || ! any_condjump_p (insn
))
1415 /* Look for insns that clobber the EV register. */
1416 if (ev
&& reg_set_p (ev_reg
, insn
))
1421 /* Collect the branch condition, hopefully relative to EV_REG. */
1422 /* ??? At present we'll miss things like
1423 (expected_value (eq r70 0))
1425 (set r80 (lt r70 r71))
1426 (set pc (if_then_else (ne r80 0) ...))
1427 as canonicalize_condition will render this to us as
1429 Could use cselib to try and reduce this further. */
1430 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1431 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1433 if (! cond
|| XEXP (cond
, 0) != ev_reg
1434 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1437 /* Substitute and simplify. Given that the expression we're
1438 building involves two constants, we should wind up with either
1440 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1441 XEXP (ev
, 1), XEXP (cond
, 1));
1442 cond
= simplify_rtx (cond
);
1444 /* Turn the condition into a scaled branch probability. */
1445 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
1447 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1448 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1452 /* Check whether this is the last basic block of function. Commonly
1453 there is one extra common cleanup block. */
1455 last_basic_block_p (basic_block bb
)
1457 if (bb
== EXIT_BLOCK_PTR
)
1460 return (bb
->next_bb
== EXIT_BLOCK_PTR
1461 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1462 && EDGE_COUNT (bb
->succs
) == 1
1463 && EDGE_SUCC (bb
, 0)->dest
->next_bb
== EXIT_BLOCK_PTR
));
1466 /* Sets branch probabilities according to PREDiction and
1467 FLAGS. HEADS[bb->index] should be index of basic block in that we
1468 need to alter branch predictions (i.e. the first of our dominators
1469 such that we do not post-dominate it) (but we fill this information
1470 on demand, so -1 may be there in case this was not needed yet). */
1473 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1474 enum prediction taken
)
1480 if (heads
[bb
->index
] < 0)
1482 /* This is first time we need this field in heads array; so
1483 find first dominator that we do not post-dominate (we are
1484 using already known members of heads array). */
1485 basic_block ai
= bb
;
1486 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1489 while (heads
[next_ai
->index
] < 0)
1491 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1493 heads
[next_ai
->index
] = ai
->index
;
1495 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1497 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1498 head
= next_ai
->index
;
1500 head
= heads
[next_ai
->index
];
1501 while (next_ai
!= bb
)
1504 if (heads
[ai
->index
] == ENTRY_BLOCK
)
1505 ai
= ENTRY_BLOCK_PTR
;
1507 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1508 heads
[next_ai
->index
] = head
;
1511 y
= heads
[bb
->index
];
1513 /* Now find the edge that leads to our branch and aply the prediction. */
1515 if (y
== last_basic_block
)
1517 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1518 if (e
->dest
->index
>= 0
1519 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1520 predict_edge_def (e
, pred
, taken
);
1523 /* This is used to carry information about basic blocks. It is
1524 attached to the AUX field of the standard CFG block. */
1526 typedef struct block_info_def
1528 /* Estimated frequency of execution of basic_block. */
1531 /* To keep queue of basic blocks to process. */
1534 /* Number of predecessors we need to visit first. */
1538 /* Similar information for edges. */
1539 typedef struct edge_info_def
1541 /* In case edge is an loopback edge, the probability edge will be reached
1542 in case header is. Estimated number of iterations of the loop can be
1543 then computed as 1 / (1 - back_edge_prob). */
1544 sreal back_edge_prob
;
1545 /* True if the edge is an loopback edge in the natural loop. */
1546 unsigned int back_edge
:1;
1549 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1550 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1552 /* Helper function for estimate_bb_frequencies.
1553 Propagate the frequencies for LOOP. */
1556 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1558 basic_block head
= loop
->header
;
1566 /* For each basic block we need to visit count number of his predecessors
1567 we need to visit first. */
1568 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1573 /* The outermost "loop" includes the exit block, which we can not
1574 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1575 directly. Do the same for the entry block. */
1576 if (i
== (unsigned)ENTRY_BLOCK
)
1577 bb
= ENTRY_BLOCK_PTR
;
1578 else if (i
== (unsigned)EXIT_BLOCK
)
1579 bb
= EXIT_BLOCK_PTR
;
1581 bb
= BASIC_BLOCK (i
);
1583 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1585 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1587 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1589 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1591 "Irreducible region hit, ignoring edge to %i->%i\n",
1592 e
->src
->index
, bb
->index
);
1594 BLOCK_INFO (bb
)->npredecessors
= count
;
1597 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1599 for (bb
= head
; bb
; bb
= nextbb
)
1602 sreal cyclic_probability
, frequency
;
1604 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1605 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1607 nextbb
= BLOCK_INFO (bb
)->next
;
1608 BLOCK_INFO (bb
)->next
= NULL
;
1610 /* Compute frequency of basic block. */
1613 #ifdef ENABLE_CHECKING
1614 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1615 if (bitmap_bit_p (tovisit
, e
->src
->index
)
1616 && !(e
->flags
& EDGE_DFS_BACK
))
1620 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1621 if (EDGE_INFO (e
)->back_edge
)
1623 sreal_add (&cyclic_probability
, &cyclic_probability
,
1624 &EDGE_INFO (e
)->back_edge_prob
);
1626 else if (!(e
->flags
& EDGE_DFS_BACK
))
1630 /* frequency += (e->probability
1631 * BLOCK_INFO (e->src)->frequency /
1632 REG_BR_PROB_BASE); */
1634 sreal_init (&tmp
, e
->probability
, 0);
1635 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1636 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1637 sreal_add (&frequency
, &frequency
, &tmp
);
1640 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1642 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1643 sizeof (frequency
));
1647 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1649 memcpy (&cyclic_probability
, &real_almost_one
,
1650 sizeof (real_almost_one
));
1653 /* BLOCK_INFO (bb)->frequency = frequency
1654 / (1 - cyclic_probability) */
1656 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1657 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1658 &frequency
, &cyclic_probability
);
1662 bitmap_clear_bit (tovisit
, bb
->index
);
1664 e
= find_edge (bb
, head
);
1669 /* EDGE_INFO (e)->back_edge_prob
1670 = ((e->probability * BLOCK_INFO (bb)->frequency)
1671 / REG_BR_PROB_BASE); */
1673 sreal_init (&tmp
, e
->probability
, 0);
1674 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1675 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1676 &tmp
, &real_inv_br_prob_base
);
1679 /* Propagate to successor blocks. */
1680 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1681 if (!(e
->flags
& EDGE_DFS_BACK
)
1682 && BLOCK_INFO (e
->dest
)->npredecessors
)
1684 BLOCK_INFO (e
->dest
)->npredecessors
--;
1685 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1690 BLOCK_INFO (last
)->next
= e
->dest
;
1698 /* Estimate probabilities of loopback edges in loops at same nest level. */
1701 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1705 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1711 estimate_loops_at_level (loop
->inner
, tovisit
);
1713 /* Do not do this for dummy function loop. */
1714 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1716 /* Find current loop back edge and mark it. */
1717 e
= loop_latch_edge (loop
);
1718 EDGE_INFO (e
)->back_edge
= 1;
1721 bbs
= get_loop_body (loop
);
1722 for (i
= 0; i
< loop
->num_nodes
; i
++)
1723 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1725 propagate_freq (loop
, tovisit
);
1729 /* Convert counts measured by profile driven feedback to frequencies.
1730 Return nonzero iff there was any nonzero execution count. */
1733 counts_to_freqs (void)
1735 gcov_type count_max
, true_count_max
= 0;
1739 true_count_max
= MAX (bb
->count
, true_count_max
);
1741 count_max
= MAX (true_count_max
, 1);
1742 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1743 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1744 return true_count_max
;
1747 /* Return true if function is likely to be expensive, so there is no point to
1748 optimize performance of prologue, epilogue or do inlining at the expense
1749 of code size growth. THRESHOLD is the limit of number of instructions
1750 function can execute at average to be still considered not expensive. */
1753 expensive_function_p (int threshold
)
1755 unsigned int sum
= 0;
1759 /* We can not compute accurately for large thresholds due to scaled
1761 if (threshold
> BB_FREQ_MAX
)
1764 /* Frequencies are out of range. This either means that function contains
1765 internal loop executing more than BB_FREQ_MAX times or profile feedback
1766 is available and function has not been executed at all. */
1767 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1770 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1771 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1776 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1777 insn
= NEXT_INSN (insn
))
1778 if (active_insn_p (insn
))
1780 sum
+= bb
->frequency
;
1789 /* Estimate basic blocks frequency by given branch probabilities. */
1792 estimate_bb_frequencies (struct loops
*loops
)
1797 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1799 static int real_values_initialized
= 0;
1802 if (!real_values_initialized
)
1804 real_values_initialized
= 1;
1805 sreal_init (&real_zero
, 0, 0);
1806 sreal_init (&real_one
, 1, 0);
1807 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1808 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1809 sreal_init (&real_one_half
, 1, -1);
1810 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1811 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1814 mark_dfs_back_edges ();
1816 EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->probability
= REG_BR_PROB_BASE
;
1818 /* Set up block info for each basic block. */
1819 tovisit
= BITMAP_ALLOC (NULL
);
1820 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1821 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1822 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1827 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1829 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1830 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1831 &EDGE_INFO (e
)->back_edge_prob
,
1832 &real_inv_br_prob_base
);
1836 /* First compute probabilities locally for each loop from innermost
1837 to outermost to examine probabilities for back edges. */
1838 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1840 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1842 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1843 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1845 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1846 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1850 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1851 sreal_add (&tmp
, &tmp
, &real_one_half
);
1852 bb
->frequency
= sreal_to_int (&tmp
);
1855 free_aux_for_blocks ();
1856 free_aux_for_edges ();
1857 BITMAP_FREE (tovisit
);
1859 compute_function_frequency ();
1860 if (flag_reorder_functions
)
1861 choose_function_section ();
1864 /* Decide whether function is hot, cold or unlikely executed. */
1866 compute_function_frequency (void)
1870 if (!profile_info
|| !flag_branch_probabilities
)
1872 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1875 if (maybe_hot_bb_p (bb
))
1877 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1880 if (!probably_never_executed_bb_p (bb
))
1881 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1885 /* Choose appropriate section for the function. */
1887 choose_function_section (void)
1889 if (DECL_SECTION_NAME (current_function_decl
)
1890 || !targetm
.have_named_sections
1891 /* Theoretically we can split the gnu.linkonce text section too,
1892 but this requires more work as the frequency needs to match
1893 for all generated objects so we need to merge the frequency
1894 of all instances. For now just never set frequency for these. */
1895 || DECL_ONE_ONLY (current_function_decl
))
1898 /* If we are doing the partitioning optimization, let the optimization
1899 choose the correct section into which to put things. */
1901 if (flag_reorder_blocks_and_partition
)
1904 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1905 DECL_SECTION_NAME (current_function_decl
) =
1906 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1907 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1908 DECL_SECTION_NAME (current_function_decl
) =
1909 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1910 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1914 struct tree_opt_pass pass_profile
=
1916 "profile", /* name */
1918 tree_estimate_probability
, /* execute */
1921 0, /* static_pass_number */
1922 TV_BRANCH_PROB
, /* tv_id */
1923 PROP_cfg
, /* properties_required */
1924 0, /* properties_provided */
1925 0, /* properties_destroyed */
1926 0, /* todo_flags_start */
1927 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */