1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
33 #include "coretypes.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
55 #include "tree-flow.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
60 #include "tree-scalar-evolution.h"
62 #include "pointer-set.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx
, basic_block
);
76 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
77 static void predict_paths_leading_to (basic_block
, enum br_predictor
, enum prediction
);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (const_rtx
);
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
87 const char *const name
; /* Name used in the debugging dumps. */
88 const int hitrate
; /* Expected hitrate used by
89 predict_insn_def call. */
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
97 /* Recompute hitrate in percent to our representation. */
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info
[]= {
103 #include "predict.def"
105 /* Upper bound on predictors. */
110 /* Return TRUE if frequency FREQ is considered to be hot. */
112 maybe_hot_frequency_p (int freq
)
114 if (!profile_info
|| !flag_branch_probabilities
)
116 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
118 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
121 if (profile_status
== PROFILE_ABSENT
)
123 if (freq
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
128 /* Return true in case BB can be CPU intensive and should be optimized
129 for maximal performance. */
132 maybe_hot_bb_p (const_basic_block bb
)
134 if (profile_info
&& flag_branch_probabilities
136 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
138 return maybe_hot_frequency_p (bb
->frequency
);
141 /* Return true in case BB can be CPU intensive and should be optimized
142 for maximal performance. */
145 maybe_hot_edge_p (edge e
)
147 if (profile_info
&& flag_branch_probabilities
149 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
151 return maybe_hot_frequency_p (EDGE_FREQUENCY (e
));
154 /* Return true in case BB is cold and should be optimized for size. */
157 probably_cold_bb_p (const_basic_block bb
)
159 if (profile_info
&& flag_branch_probabilities
161 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
163 if ((!profile_info
|| !flag_branch_probabilities
)
164 && cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
166 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
171 /* Return true in case BB is probably never executed. */
173 probably_never_executed_bb_p (const_basic_block bb
)
175 if (profile_info
&& flag_branch_probabilities
)
176 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
177 if ((!profile_info
|| !flag_branch_probabilities
)
178 && cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
183 /* Return true when current function should always be optimized for size. */
186 optimize_function_for_size_p (struct function
*fun
)
188 return (optimize_size
189 || fun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
);
192 /* Return true when current function should always be optimized for speed. */
195 optimize_function_for_speed_p (struct function
*fun
)
197 return !optimize_function_for_size_p (fun
);
200 /* Return TRUE when BB should be optimized for size. */
203 optimize_bb_for_size_p (basic_block bb
)
205 return optimize_function_for_size_p (cfun
) || !maybe_hot_bb_p (bb
);
208 /* Return TRUE when BB should be optimized for speed. */
211 optimize_bb_for_speed_p (basic_block bb
)
213 return !optimize_bb_for_size_p (bb
);
216 /* Return TRUE when BB should be optimized for size. */
219 optimize_edge_for_size_p (edge e
)
221 return optimize_function_for_size_p (cfun
) || !maybe_hot_edge_p (e
);
224 /* Return TRUE when BB should be optimized for speed. */
227 optimize_edge_for_speed_p (edge e
)
229 return !optimize_edge_for_size_p (e
);
232 /* Return TRUE when BB should be optimized for size. */
235 optimize_insn_for_size_p (void)
237 return optimize_function_for_size_p (cfun
) || !crtl
->maybe_hot_insn_p
;
240 /* Return TRUE when BB should be optimized for speed. */
243 optimize_insn_for_speed_p (void)
245 return !optimize_insn_for_size_p ();
248 /* Set RTL expansion for BB profile. */
251 rtl_profile_for_bb (basic_block bb
)
253 crtl
->maybe_hot_insn_p
= maybe_hot_bb_p (bb
);
256 /* Set RTL expansion for edge profile. */
259 rtl_profile_for_edge (edge e
)
261 crtl
->maybe_hot_insn_p
= maybe_hot_edge_p (e
);
264 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
266 default_rtl_profile (void)
268 crtl
->maybe_hot_insn_p
= true;
271 /* Return true if the one of outgoing edges is already predicted by
275 rtl_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
278 if (!INSN_P (BB_END (bb
)))
280 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
281 if (REG_NOTE_KIND (note
) == REG_BR_PRED
282 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
287 /* This map contains for a basic block the list of predictions for the
290 static struct pointer_map_t
*bb_predictions
;
292 /* Return true if the one of outgoing edges is already predicted by
296 gimple_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
298 struct edge_prediction
*i
;
299 void **preds
= pointer_map_contains (bb_predictions
, bb
);
304 for (i
= (struct edge_prediction
*) *preds
; i
; i
= i
->ep_next
)
305 if (i
->ep_predictor
== predictor
)
310 /* Return true when the probability of edge is reliable.
312 The profile guessing code is good at predicting branch outcome (ie.
313 taken/not taken), that is predicted right slightly over 75% of time.
314 It is however notoriously poor on predicting the probability itself.
315 In general the profile appear a lot flatter (with probabilities closer
316 to 50%) than the reality so it is bad idea to use it to drive optimization
317 such as those disabling dynamic branch prediction for well predictable
320 There are two exceptions - edges leading to noreturn edges and edges
321 predicted by number of iterations heuristics are predicted well. This macro
322 should be able to distinguish those, but at the moment it simply check for
323 noreturn heuristic that is only one giving probability over 99% or bellow
324 1%. In future we might want to propagate reliability information across the
325 CFG if we find this information useful on multiple places. */
327 probability_reliable_p (int prob
)
329 return (profile_status
== PROFILE_READ
330 || (profile_status
== PROFILE_GUESSED
331 && (prob
<= HITRATE (1) || prob
>= HITRATE (99))));
334 /* Same predicate as above, working on edges. */
336 edge_probability_reliable_p (const_edge e
)
338 return probability_reliable_p (e
->probability
);
341 /* Same predicate as edge_probability_reliable_p, working on notes. */
343 br_prob_note_reliable_p (const_rtx note
)
345 gcc_assert (REG_NOTE_KIND (note
) == REG_BR_PROB
);
346 return probability_reliable_p (INTVAL (XEXP (note
, 0)));
350 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
352 gcc_assert (any_condjump_p (insn
));
353 if (!flag_guess_branch_prob
)
356 add_reg_note (insn
, REG_BR_PRED
,
357 gen_rtx_CONCAT (VOIDmode
,
358 GEN_INT ((int) predictor
),
359 GEN_INT ((int) probability
)));
362 /* Predict insn by given predictor. */
365 predict_insn_def (rtx insn
, enum br_predictor predictor
,
366 enum prediction taken
)
368 int probability
= predictor_info
[(int) predictor
].hitrate
;
371 probability
= REG_BR_PROB_BASE
- probability
;
373 predict_insn (insn
, predictor
, probability
);
376 /* Predict edge E with given probability if possible. */
379 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
382 last_insn
= BB_END (e
->src
);
384 /* We can store the branch prediction information only about
385 conditional jumps. */
386 if (!any_condjump_p (last_insn
))
389 /* We always store probability of branching. */
390 if (e
->flags
& EDGE_FALLTHRU
)
391 probability
= REG_BR_PROB_BASE
- probability
;
393 predict_insn (last_insn
, predictor
, probability
);
396 /* Predict edge E with the given PROBABILITY. */
398 gimple_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
400 gcc_assert (profile_status
!= PROFILE_GUESSED
);
401 if ((e
->src
!= ENTRY_BLOCK_PTR
&& EDGE_COUNT (e
->src
->succs
) > 1)
402 && flag_guess_branch_prob
&& optimize
)
404 struct edge_prediction
*i
= XNEW (struct edge_prediction
);
405 void **preds
= pointer_map_insert (bb_predictions
, e
->src
);
407 i
->ep_next
= (struct edge_prediction
*) *preds
;
409 i
->ep_probability
= probability
;
410 i
->ep_predictor
= predictor
;
415 /* Remove all predictions on given basic block that are attached
418 remove_predictions_associated_with_edge (edge e
)
425 preds
= pointer_map_contains (bb_predictions
, e
->src
);
429 struct edge_prediction
**prediction
= (struct edge_prediction
**) preds
;
430 struct edge_prediction
*next
;
434 if ((*prediction
)->ep_edge
== e
)
436 next
= (*prediction
)->ep_next
;
441 prediction
= &((*prediction
)->ep_next
);
446 /* Clears the list of predictions stored for BB. */
449 clear_bb_predictions (basic_block bb
)
451 void **preds
= pointer_map_contains (bb_predictions
, bb
);
452 struct edge_prediction
*pred
, *next
;
457 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= next
)
459 next
= pred
->ep_next
;
465 /* Return true when we can store prediction on insn INSN.
466 At the moment we represent predictions only on conditional
467 jumps, not at computed jump or other complicated cases. */
469 can_predict_insn_p (const_rtx insn
)
471 return (JUMP_P (insn
)
472 && any_condjump_p (insn
)
473 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
476 /* Predict edge E by given predictor if possible. */
479 predict_edge_def (edge e
, enum br_predictor predictor
,
480 enum prediction taken
)
482 int probability
= predictor_info
[(int) predictor
].hitrate
;
485 probability
= REG_BR_PROB_BASE
- probability
;
487 predict_edge (e
, predictor
, probability
);
490 /* Invert all branch predictions or probability notes in the INSN. This needs
491 to be done each time we invert the condition used by the jump. */
494 invert_br_probabilities (rtx insn
)
498 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
499 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
500 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
501 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
502 XEXP (XEXP (note
, 0), 1)
503 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
506 /* Dump information about the branch prediction to the output file. */
509 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
510 basic_block bb
, int used
)
518 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
519 if (! (e
->flags
& EDGE_FALLTHRU
))
522 fprintf (file
, " %s heuristics%s: %.1f%%",
523 predictor_info
[predictor
].name
,
524 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
528 fprintf (file
, " exec ");
529 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
532 fprintf (file
, " hit ");
533 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
534 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
538 fprintf (file
, "\n");
541 /* We can not predict the probabilities of outgoing edges of bb. Set them
542 evenly and hope for the best. */
544 set_even_probabilities (basic_block bb
)
550 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
551 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
553 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
554 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
555 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
560 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
561 note if not already present. Remove now useless REG_BR_PRED notes. */
564 combine_predictions_for_insn (rtx insn
, basic_block bb
)
569 int best_probability
= PROB_EVEN
;
570 int best_predictor
= END_PREDICTORS
;
571 int combined_probability
= REG_BR_PROB_BASE
/ 2;
573 bool first_match
= false;
576 if (!can_predict_insn_p (insn
))
578 set_even_probabilities (bb
);
582 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
583 pnote
= ®_NOTES (insn
);
585 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
588 /* We implement "first match" heuristics and use probability guessed
589 by predictor with smallest index. */
590 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
591 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
593 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
594 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
597 if (best_predictor
> predictor
)
598 best_probability
= probability
, best_predictor
= predictor
;
600 d
= (combined_probability
* probability
601 + (REG_BR_PROB_BASE
- combined_probability
)
602 * (REG_BR_PROB_BASE
- probability
));
604 /* Use FP math to avoid overflows of 32bit integers. */
606 /* If one probability is 0% and one 100%, avoid division by zero. */
607 combined_probability
= REG_BR_PROB_BASE
/ 2;
609 combined_probability
= (((double) combined_probability
) * probability
610 * REG_BR_PROB_BASE
/ d
+ 0.5);
613 /* Decide which heuristic to use. In case we didn't match anything,
614 use no_prediction heuristic, in case we did match, use either
615 first match or Dempster-Shaffer theory depending on the flags. */
617 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
621 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
622 combined_probability
, bb
, true);
625 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
627 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
632 combined_probability
= best_probability
;
633 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
637 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
639 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
640 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
642 dump_prediction (dump_file
, predictor
, probability
, bb
,
643 !first_match
|| best_predictor
== predictor
);
644 *pnote
= XEXP (*pnote
, 1);
647 pnote
= &XEXP (*pnote
, 1);
652 add_reg_note (insn
, REG_BR_PROB
, GEN_INT (combined_probability
));
654 /* Save the prediction into CFG in case we are seeing non-degenerated
656 if (!single_succ_p (bb
))
658 BRANCH_EDGE (bb
)->probability
= combined_probability
;
659 FALLTHRU_EDGE (bb
)->probability
660 = REG_BR_PROB_BASE
- combined_probability
;
663 else if (!single_succ_p (bb
))
665 int prob
= INTVAL (XEXP (prob_note
, 0));
667 BRANCH_EDGE (bb
)->probability
= prob
;
668 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
671 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
674 /* Combine predictions into single probability and store them into CFG.
675 Remove now useless prediction entries. */
678 combine_predictions_for_bb (basic_block bb
)
680 int best_probability
= PROB_EVEN
;
681 int best_predictor
= END_PREDICTORS
;
682 int combined_probability
= REG_BR_PROB_BASE
/ 2;
684 bool first_match
= false;
686 struct edge_prediction
*pred
;
688 edge e
, first
= NULL
, second
= NULL
;
692 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
693 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
696 if (first
&& !second
)
702 /* When there is no successor or only one choice, prediction is easy.
704 We are lazy for now and predict only basic blocks with two outgoing
705 edges. It is possible to predict generic case too, but we have to
706 ignore first match heuristics and do more involved combining. Implement
711 set_even_probabilities (bb
);
712 clear_bb_predictions (bb
);
714 fprintf (dump_file
, "%i edges in bb %i predicted to even probabilities\n",
720 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
722 preds
= pointer_map_contains (bb_predictions
, bb
);
725 /* We implement "first match" heuristics and use probability guessed
726 by predictor with smallest index. */
727 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
729 int predictor
= pred
->ep_predictor
;
730 int probability
= pred
->ep_probability
;
732 if (pred
->ep_edge
!= first
)
733 probability
= REG_BR_PROB_BASE
- probability
;
736 if (best_predictor
> predictor
)
737 best_probability
= probability
, best_predictor
= predictor
;
739 d
= (combined_probability
* probability
740 + (REG_BR_PROB_BASE
- combined_probability
)
741 * (REG_BR_PROB_BASE
- probability
));
743 /* Use FP math to avoid overflows of 32bit integers. */
745 /* If one probability is 0% and one 100%, avoid division by zero. */
746 combined_probability
= REG_BR_PROB_BASE
/ 2;
748 combined_probability
= (((double) combined_probability
)
750 * REG_BR_PROB_BASE
/ d
+ 0.5);
754 /* Decide which heuristic to use. In case we didn't match anything,
755 use no_prediction heuristic, in case we did match, use either
756 first match or Dempster-Shaffer theory depending on the flags. */
758 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
762 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
765 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
767 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
772 combined_probability
= best_probability
;
773 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
777 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
779 int predictor
= pred
->ep_predictor
;
780 int probability
= pred
->ep_probability
;
782 if (pred
->ep_edge
!= EDGE_SUCC (bb
, 0))
783 probability
= REG_BR_PROB_BASE
- probability
;
784 dump_prediction (dump_file
, predictor
, probability
, bb
,
785 !first_match
|| best_predictor
== predictor
);
788 clear_bb_predictions (bb
);
792 first
->probability
= combined_probability
;
793 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
797 /* Predict edge probabilities by exploiting loop structure. */
807 /* Try to predict out blocks in a loop that are not part of a
809 FOR_EACH_LOOP (li
, loop
, 0)
811 basic_block bb
, *bbs
;
813 VEC (edge
, heap
) *exits
;
814 struct tree_niter_desc niter_desc
;
817 exits
= get_loop_exit_edges (loop
);
818 n_exits
= VEC_length (edge
, exits
);
820 for (j
= 0; VEC_iterate (edge
, exits
, j
, ex
); j
++)
823 HOST_WIDE_INT nitercst
;
824 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
826 enum br_predictor predictor
;
828 if (number_of_iterations_exit (loop
, ex
, &niter_desc
, false))
829 niter
= niter_desc
.niter
;
830 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
831 niter
= loop_niter_by_eval (loop
, ex
);
833 if (TREE_CODE (niter
) == INTEGER_CST
)
835 if (host_integerp (niter
, 1)
836 && compare_tree_int (niter
, max
-1) == -1)
837 nitercst
= tree_low_cst (niter
, 1) + 1;
840 predictor
= PRED_LOOP_ITERATIONS
;
842 /* If we have just one exit and we can derive some information about
843 the number of iterations of the loop from the statements inside
844 the loop, use it to predict this exit. */
845 else if (n_exits
== 1)
847 nitercst
= estimated_loop_iterations_int (loop
, false);
853 predictor
= PRED_LOOP_ITERATIONS_GUESSED
;
858 probability
= ((REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
);
859 predict_edge (ex
, predictor
, probability
);
861 VEC_free (edge
, heap
, exits
);
863 bbs
= get_loop_body (loop
);
865 for (j
= 0; j
< loop
->num_nodes
; j
++)
867 int header_found
= 0;
873 /* Bypass loop heuristics on continue statement. These
874 statements construct loops via "non-loop" constructs
875 in the source language and are better to be handled
877 if (predicted_by_p (bb
, PRED_CONTINUE
))
880 /* Loop branch heuristics - predict an edge back to a
881 loop's head as taken. */
882 if (bb
== loop
->latch
)
884 e
= find_edge (loop
->latch
, loop
->header
);
888 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
892 /* Loop exit heuristics - predict an edge exiting the loop if the
893 conditional has no loop header successors as not taken. */
895 /* If we already used more reliable loop exit predictors, do not
896 bother with PRED_LOOP_EXIT. */
897 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS_GUESSED
)
898 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS
))
900 /* For loop with many exits we don't want to predict all exits
901 with the pretty large probability, because if all exits are
902 considered in row, the loop would be predicted to iterate
903 almost never. The code to divide probability by number of
904 exits is very rough. It should compute the number of exits
905 taken in each patch through function (not the overall number
906 of exits that might be a lot higher for loops with wide switch
907 statements in them) and compute n-th square root.
909 We limit the minimal probability by 2% to avoid
910 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
911 as this was causing regression in perl benchmark containing such
914 int probability
= ((REG_BR_PROB_BASE
915 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
917 if (probability
< HITRATE (2))
918 probability
= HITRATE (2);
919 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
920 if (e
->dest
->index
< NUM_FIXED_BLOCKS
921 || !flow_bb_inside_loop_p (loop
, e
->dest
))
922 predict_edge (e
, PRED_LOOP_EXIT
, probability
);
926 /* Free basic blocks from get_loop_body. */
933 /* Attempt to predict probabilities of BB outgoing edges using local
936 bb_estimate_probability_locally (basic_block bb
)
938 rtx last_insn
= BB_END (bb
);
941 if (! can_predict_insn_p (last_insn
))
943 cond
= get_condition (last_insn
, NULL
, false, false);
947 /* Try "pointer heuristic."
948 A comparison ptr == 0 is predicted as false.
949 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
950 if (COMPARISON_P (cond
)
951 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
952 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
954 if (GET_CODE (cond
) == EQ
)
955 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
956 else if (GET_CODE (cond
) == NE
)
957 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
961 /* Try "opcode heuristic."
962 EQ tests are usually false and NE tests are usually true. Also,
963 most quantities are positive, so we can make the appropriate guesses
964 about signed comparisons against zero. */
965 switch (GET_CODE (cond
))
968 /* Unconditional branch. */
969 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
970 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
975 /* Floating point comparisons appears to behave in a very
976 unpredictable way because of special role of = tests in
978 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
980 /* Comparisons with 0 are often used for booleans and there is
981 nothing useful to predict about them. */
982 else if (XEXP (cond
, 1) == const0_rtx
983 || XEXP (cond
, 0) == const0_rtx
)
986 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
991 /* Floating point comparisons appears to behave in a very
992 unpredictable way because of special role of = tests in
994 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
996 /* Comparisons with 0 are often used for booleans and there is
997 nothing useful to predict about them. */
998 else if (XEXP (cond
, 1) == const0_rtx
999 || XEXP (cond
, 0) == const0_rtx
)
1002 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
1006 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
1010 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
1015 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1016 || XEXP (cond
, 1) == constm1_rtx
)
1017 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
1022 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1023 || XEXP (cond
, 1) == constm1_rtx
)
1024 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
1032 /* Set edge->probability for each successor edge of BB. */
1034 guess_outgoing_edge_probabilities (basic_block bb
)
1036 bb_estimate_probability_locally (bb
);
1037 combine_predictions_for_insn (BB_END (bb
), bb
);
1040 static tree
expr_expected_value (tree
, bitmap
);
1042 /* Helper function for expr_expected_value. */
1045 expr_expected_value_1 (tree type
, tree op0
, enum tree_code code
, tree op1
, bitmap visited
)
1049 if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
1051 if (TREE_CONSTANT (op0
))
1054 if (code
!= SSA_NAME
)
1057 def
= SSA_NAME_DEF_STMT (op0
);
1059 /* If we were already here, break the infinite cycle. */
1060 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (op0
)))
1062 bitmap_set_bit (visited
, SSA_NAME_VERSION (op0
));
1064 if (gimple_code (def
) == GIMPLE_PHI
)
1066 /* All the arguments of the PHI node must have the same constant
1068 int i
, n
= gimple_phi_num_args (def
);
1069 tree val
= NULL
, new_val
;
1071 for (i
= 0; i
< n
; i
++)
1073 tree arg
= PHI_ARG_DEF (def
, i
);
1075 /* If this PHI has itself as an argument, we cannot
1076 determine the string length of this argument. However,
1077 if we can find an expected constant value for the other
1078 PHI args then we can still be sure that this is
1079 likely a constant. So be optimistic and just
1080 continue with the next argument. */
1081 if (arg
== PHI_RESULT (def
))
1084 new_val
= expr_expected_value (arg
, visited
);
1089 else if (!operand_equal_p (val
, new_val
, false))
1094 if (is_gimple_assign (def
))
1096 if (gimple_assign_lhs (def
) != op0
)
1099 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def
)),
1100 gimple_assign_rhs1 (def
),
1101 gimple_assign_rhs_code (def
),
1102 gimple_assign_rhs2 (def
),
1106 if (is_gimple_call (def
))
1108 tree decl
= gimple_call_fndecl (def
);
1111 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1112 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
1116 if (gimple_call_num_args (def
) != 2)
1118 val
= gimple_call_arg (def
, 0);
1119 if (TREE_CONSTANT (val
))
1121 return gimple_call_arg (def
, 1);
1128 if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
1131 op0
= expr_expected_value (op0
, visited
);
1134 op1
= expr_expected_value (op1
, visited
);
1137 res
= fold_build2 (code
, type
, op0
, op1
);
1138 if (TREE_CONSTANT (res
))
1142 if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
1145 op0
= expr_expected_value (op0
, visited
);
1148 res
= fold_build1 (code
, type
, op0
);
1149 if (TREE_CONSTANT (res
))
1156 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1157 The function is used by builtin_expect branch predictor so the evidence
1158 must come from this construct and additional possible constant folding.
1160 We may want to implement more involved value guess (such as value range
1161 propagation based prediction), but such tricks shall go to new
1165 expr_expected_value (tree expr
, bitmap visited
)
1167 enum tree_code code
;
1170 if (TREE_CONSTANT (expr
))
1173 extract_ops_from_tree (expr
, &code
, &op0
, &op1
);
1174 return expr_expected_value_1 (TREE_TYPE (expr
),
1175 op0
, code
, op1
, visited
);
1179 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1180 we no longer need. */
1182 strip_predict_hints (void)
1190 gimple_stmt_iterator bi
;
1191 for (bi
= gsi_start_bb (bb
); !gsi_end_p (bi
);)
1193 gimple stmt
= gsi_stmt (bi
);
1195 if (gimple_code (stmt
) == GIMPLE_PREDICT
)
1197 gsi_remove (&bi
, true);
1200 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1202 tree fndecl
= gimple_call_fndecl (stmt
);
1205 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1206 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1207 && gimple_call_num_args (stmt
) == 2)
1209 var
= gimple_call_lhs (stmt
);
1210 ass_stmt
= gimple_build_assign (var
, gimple_call_arg (stmt
, 0));
1212 gsi_replace (&bi
, ass_stmt
, true);
1221 /* Predict using opcode of the last statement in basic block. */
1223 tree_predict_by_opcode (basic_block bb
)
1225 gimple stmt
= last_stmt (bb
);
1234 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1236 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1237 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1239 op0
= gimple_cond_lhs (stmt
);
1240 op1
= gimple_cond_rhs (stmt
);
1241 cmp
= gimple_cond_code (stmt
);
1242 type
= TREE_TYPE (op0
);
1243 visited
= BITMAP_ALLOC (NULL
);
1244 val
= expr_expected_value_1 (boolean_type_node
, op0
, cmp
, op1
, visited
);
1245 BITMAP_FREE (visited
);
1248 if (integer_zerop (val
))
1249 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1251 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1254 /* Try "pointer heuristic."
1255 A comparison ptr == 0 is predicted as false.
1256 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1257 if (POINTER_TYPE_P (type
))
1260 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1261 else if (cmp
== NE_EXPR
)
1262 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1266 /* Try "opcode heuristic."
1267 EQ tests are usually false and NE tests are usually true. Also,
1268 most quantities are positive, so we can make the appropriate guesses
1269 about signed comparisons against zero. */
1274 /* Floating point comparisons appears to behave in a very
1275 unpredictable way because of special role of = tests in
1277 if (FLOAT_TYPE_P (type
))
1279 /* Comparisons with 0 are often used for booleans and there is
1280 nothing useful to predict about them. */
1281 else if (integer_zerop (op0
) || integer_zerop (op1
))
1284 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1289 /* Floating point comparisons appears to behave in a very
1290 unpredictable way because of special role of = tests in
1292 if (FLOAT_TYPE_P (type
))
1294 /* Comparisons with 0 are often used for booleans and there is
1295 nothing useful to predict about them. */
1296 else if (integer_zerop (op0
)
1297 || integer_zerop (op1
))
1300 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1304 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1307 case UNORDERED_EXPR
:
1308 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1313 if (integer_zerop (op1
)
1314 || integer_onep (op1
)
1315 || integer_all_onesp (op1
)
1318 || real_minus_onep (op1
))
1319 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1324 if (integer_zerop (op1
)
1325 || integer_onep (op1
)
1326 || integer_all_onesp (op1
)
1329 || real_minus_onep (op1
))
1330 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1338 /* Try to guess whether the value of return means error code. */
1340 static enum br_predictor
1341 return_prediction (tree val
, enum prediction
*prediction
)
1345 return PRED_NO_PREDICTION
;
1346 /* Different heuristics for pointers and scalars. */
1347 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1349 /* NULL is usually not returned. */
1350 if (integer_zerop (val
))
1352 *prediction
= NOT_TAKEN
;
1353 return PRED_NULL_RETURN
;
1356 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1358 /* Negative return values are often used to indicate
1360 if (TREE_CODE (val
) == INTEGER_CST
1361 && tree_int_cst_sgn (val
) < 0)
1363 *prediction
= NOT_TAKEN
;
1364 return PRED_NEGATIVE_RETURN
;
1366 /* Constant return values seems to be commonly taken.
1367 Zero/one often represent booleans so exclude them from the
1369 if (TREE_CONSTANT (val
)
1370 && (!integer_zerop (val
) && !integer_onep (val
)))
1372 *prediction
= TAKEN
;
1373 return PRED_CONST_RETURN
;
1376 return PRED_NO_PREDICTION
;
1379 /* Find the basic block with return expression and look up for possible
1380 return value trying to apply RETURN_PREDICTION heuristics. */
1382 apply_return_prediction (void)
1384 gimple return_stmt
= NULL
;
1388 int phi_num_args
, i
;
1389 enum br_predictor pred
;
1390 enum prediction direction
;
1393 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1395 return_stmt
= last_stmt (e
->src
);
1397 && gimple_code (return_stmt
) == GIMPLE_RETURN
)
1402 return_val
= gimple_return_retval (return_stmt
);
1405 if (TREE_CODE (return_val
) != SSA_NAME
1406 || !SSA_NAME_DEF_STMT (return_val
)
1407 || gimple_code (SSA_NAME_DEF_STMT (return_val
)) != GIMPLE_PHI
)
1409 phi
= SSA_NAME_DEF_STMT (return_val
);
1410 phi_num_args
= gimple_phi_num_args (phi
);
1411 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1413 /* Avoid the degenerate case where all return values form the function
1414 belongs to same category (ie they are all positive constants)
1415 so we can hardly say something about them. */
1416 for (i
= 1; i
< phi_num_args
; i
++)
1417 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1419 if (i
!= phi_num_args
)
1420 for (i
= 0; i
< phi_num_args
; i
++)
1422 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1423 if (pred
!= PRED_NO_PREDICTION
)
1424 predict_paths_leading_to (gimple_phi_arg_edge (phi
, i
)->src
, pred
,
1429 /* Look for basic block that contains unlikely to happen events
1430 (such as noreturn calls) and mark all paths leading to execution
1431 of this basic blocks as unlikely. */
1434 tree_bb_level_predictions (void)
1438 apply_return_prediction ();
1442 gimple_stmt_iterator gsi
;
1444 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1446 gimple stmt
= gsi_stmt (gsi
);
1449 if (is_gimple_call (stmt
))
1451 if (gimple_call_flags (stmt
) & ECF_NORETURN
)
1452 predict_paths_leading_to (bb
, PRED_NORETURN
,
1454 decl
= gimple_call_fndecl (stmt
);
1456 && lookup_attribute ("cold",
1457 DECL_ATTRIBUTES (decl
)))
1458 predict_paths_leading_to (bb
, PRED_COLD_FUNCTION
,
1461 else if (gimple_code (stmt
) == GIMPLE_PREDICT
)
1463 predict_paths_leading_to (bb
, gimple_predict_predictor (stmt
),
1464 gimple_predict_outcome (stmt
));
1465 /* Keep GIMPLE_PREDICT around so early inlining will propagate
1466 hints to callers. */
1472 #ifdef ENABLE_CHECKING
1474 /* Callback for pointer_map_traverse, asserts that the pointer map is
1478 assert_is_empty (const void *key ATTRIBUTE_UNUSED
, void **value
,
1479 void *data ATTRIBUTE_UNUSED
)
1481 gcc_assert (!*value
);
1486 /* Predict branch probabilities and estimate profile of the tree CFG. */
1488 tree_estimate_probability (void)
1492 loop_optimizer_init (0);
1493 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1494 flow_loops_dump (dump_file
, NULL
, 0);
1496 add_noreturn_fake_exit_edges ();
1497 connect_infinite_loops_to_exit ();
1498 /* We use loop_niter_by_eval, which requires that the loops have
1500 create_preheaders (CP_SIMPLE_PREHEADERS
);
1501 calculate_dominance_info (CDI_POST_DOMINATORS
);
1503 bb_predictions
= pointer_map_create ();
1504 tree_bb_level_predictions ();
1506 mark_irreducible_loops ();
1507 record_loop_exits ();
1508 if (number_of_loops () > 1)
1516 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1518 /* Predict early returns to be probable, as we've already taken
1519 care for error returns and other cases are often used for
1520 fast paths through function.
1522 Since we've already removed the return statements, we are
1523 looking for CFG like:
1533 if (e
->dest
!= bb
->next_bb
1534 && e
->dest
!= EXIT_BLOCK_PTR
1535 && single_succ_p (e
->dest
)
1536 && single_succ_edge (e
->dest
)->dest
== EXIT_BLOCK_PTR
1537 && gimple_code (last_stmt (e
->dest
)) == GIMPLE_RETURN
)
1542 if (single_succ_p (bb
))
1544 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1545 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1546 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1547 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
))
1548 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1551 if (!predicted_by_p (e
->src
, PRED_NULL_RETURN
)
1552 && !predicted_by_p (e
->src
, PRED_CONST_RETURN
)
1553 && !predicted_by_p (e
->src
, PRED_NEGATIVE_RETURN
))
1554 predict_edge_def (e
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1557 /* Look for block we are guarding (ie we dominate it,
1558 but it doesn't postdominate us). */
1559 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1560 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1561 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1563 gimple_stmt_iterator bi
;
1565 /* The call heuristic claims that a guarded function call
1566 is improbable. This is because such calls are often used
1567 to signal exceptional situations such as printing error
1569 for (bi
= gsi_start_bb (e
->dest
); !gsi_end_p (bi
);
1572 gimple stmt
= gsi_stmt (bi
);
1573 if (is_gimple_call (stmt
)
1574 /* Constant and pure calls are hardly used to signalize
1575 something exceptional. */
1576 && gimple_has_side_effects (stmt
))
1578 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1584 tree_predict_by_opcode (bb
);
1587 combine_predictions_for_bb (bb
);
1589 #ifdef ENABLE_CHECKING
1590 pointer_map_traverse (bb_predictions
, assert_is_empty
, NULL
);
1592 pointer_map_destroy (bb_predictions
);
1593 bb_predictions
= NULL
;
1595 estimate_bb_frequencies ();
1596 free_dominance_info (CDI_POST_DOMINATORS
);
1597 remove_fake_exit_edges ();
1598 loop_optimizer_finalize ();
1599 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1600 gimple_dump_cfg (dump_file
, dump_flags
);
1601 if (profile_status
== PROFILE_ABSENT
)
1602 profile_status
= PROFILE_GUESSED
;
1606 /* Predict edges to successors of CUR whose sources are not postdominated by
1607 BB by PRED and recurse to all postdominators. */
1610 predict_paths_for_bb (basic_block cur
, basic_block bb
,
1611 enum br_predictor pred
,
1612 enum prediction taken
)
1618 /* We are looking for all edges forming edge cut induced by
1619 set of all blocks postdominated by BB. */
1620 FOR_EACH_EDGE (e
, ei
, cur
->preds
)
1621 if (e
->src
->index
>= NUM_FIXED_BLOCKS
1622 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, bb
))
1624 gcc_assert (bb
== cur
|| dominated_by_p (CDI_POST_DOMINATORS
, cur
, bb
));
1625 predict_edge_def (e
, pred
, taken
);
1627 for (son
= first_dom_son (CDI_POST_DOMINATORS
, cur
);
1629 son
= next_dom_son (CDI_POST_DOMINATORS
, son
))
1630 predict_paths_for_bb (son
, bb
, pred
, taken
);
1633 /* Sets branch probabilities according to PREDiction and
1637 predict_paths_leading_to (basic_block bb
, enum br_predictor pred
,
1638 enum prediction taken
)
1640 predict_paths_for_bb (bb
, bb
, pred
, taken
);
1643 /* This is used to carry information about basic blocks. It is
1644 attached to the AUX field of the standard CFG block. */
1646 typedef struct block_info_def
1648 /* Estimated frequency of execution of basic_block. */
1651 /* To keep queue of basic blocks to process. */
1654 /* Number of predecessors we need to visit first. */
1658 /* Similar information for edges. */
1659 typedef struct edge_info_def
1661 /* In case edge is a loopback edge, the probability edge will be reached
1662 in case header is. Estimated number of iterations of the loop can be
1663 then computed as 1 / (1 - back_edge_prob). */
1664 sreal back_edge_prob
;
1665 /* True if the edge is a loopback edge in the natural loop. */
1666 unsigned int back_edge
:1;
1669 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1670 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1672 /* Helper function for estimate_bb_frequencies.
1673 Propagate the frequencies in blocks marked in
1674 TOVISIT, starting in HEAD. */
1677 propagate_freq (basic_block head
, bitmap tovisit
)
1686 /* For each basic block we need to visit count number of his predecessors
1687 we need to visit first. */
1688 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1693 /* The outermost "loop" includes the exit block, which we can not
1694 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1695 directly. Do the same for the entry block. */
1696 bb
= BASIC_BLOCK (i
);
1698 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1700 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1702 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1704 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1706 "Irreducible region hit, ignoring edge to %i->%i\n",
1707 e
->src
->index
, bb
->index
);
1709 BLOCK_INFO (bb
)->npredecessors
= count
;
1712 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1714 for (bb
= head
; bb
; bb
= nextbb
)
1717 sreal cyclic_probability
, frequency
;
1719 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1720 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1722 nextbb
= BLOCK_INFO (bb
)->next
;
1723 BLOCK_INFO (bb
)->next
= NULL
;
1725 /* Compute frequency of basic block. */
1728 #ifdef ENABLE_CHECKING
1729 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1730 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
1731 || (e
->flags
& EDGE_DFS_BACK
));
1734 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1735 if (EDGE_INFO (e
)->back_edge
)
1737 sreal_add (&cyclic_probability
, &cyclic_probability
,
1738 &EDGE_INFO (e
)->back_edge_prob
);
1740 else if (!(e
->flags
& EDGE_DFS_BACK
))
1744 /* frequency += (e->probability
1745 * BLOCK_INFO (e->src)->frequency /
1746 REG_BR_PROB_BASE); */
1748 sreal_init (&tmp
, e
->probability
, 0);
1749 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1750 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1751 sreal_add (&frequency
, &frequency
, &tmp
);
1754 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1756 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1757 sizeof (frequency
));
1761 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1763 memcpy (&cyclic_probability
, &real_almost_one
,
1764 sizeof (real_almost_one
));
1767 /* BLOCK_INFO (bb)->frequency = frequency
1768 / (1 - cyclic_probability) */
1770 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1771 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1772 &frequency
, &cyclic_probability
);
1776 bitmap_clear_bit (tovisit
, bb
->index
);
1778 e
= find_edge (bb
, head
);
1783 /* EDGE_INFO (e)->back_edge_prob
1784 = ((e->probability * BLOCK_INFO (bb)->frequency)
1785 / REG_BR_PROB_BASE); */
1787 sreal_init (&tmp
, e
->probability
, 0);
1788 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1789 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1790 &tmp
, &real_inv_br_prob_base
);
1793 /* Propagate to successor blocks. */
1794 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1795 if (!(e
->flags
& EDGE_DFS_BACK
)
1796 && BLOCK_INFO (e
->dest
)->npredecessors
)
1798 BLOCK_INFO (e
->dest
)->npredecessors
--;
1799 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1804 BLOCK_INFO (last
)->next
= e
->dest
;
1812 /* Estimate probabilities of loopback edges in loops at same nest level. */
1815 estimate_loops_at_level (struct loop
*first_loop
)
1819 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1824 bitmap tovisit
= BITMAP_ALLOC (NULL
);
1826 estimate_loops_at_level (loop
->inner
);
1828 /* Find current loop back edge and mark it. */
1829 e
= loop_latch_edge (loop
);
1830 EDGE_INFO (e
)->back_edge
= 1;
1832 bbs
= get_loop_body (loop
);
1833 for (i
= 0; i
< loop
->num_nodes
; i
++)
1834 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1836 propagate_freq (loop
->header
, tovisit
);
1837 BITMAP_FREE (tovisit
);
1841 /* Propagates frequencies through structure of loops. */
1844 estimate_loops (void)
1846 bitmap tovisit
= BITMAP_ALLOC (NULL
);
1849 /* Start by estimating the frequencies in the loops. */
1850 if (number_of_loops () > 1)
1851 estimate_loops_at_level (current_loops
->tree_root
->inner
);
1853 /* Now propagate the frequencies through all the blocks. */
1856 bitmap_set_bit (tovisit
, bb
->index
);
1858 propagate_freq (ENTRY_BLOCK_PTR
, tovisit
);
1859 BITMAP_FREE (tovisit
);
1862 /* Convert counts measured by profile driven feedback to frequencies.
1863 Return nonzero iff there was any nonzero execution count. */
1866 counts_to_freqs (void)
1868 gcov_type count_max
, true_count_max
= 0;
1872 true_count_max
= MAX (bb
->count
, true_count_max
);
1874 count_max
= MAX (true_count_max
, 1);
1875 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1876 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1878 return true_count_max
;
1881 /* Return true if function is likely to be expensive, so there is no point to
1882 optimize performance of prologue, epilogue or do inlining at the expense
1883 of code size growth. THRESHOLD is the limit of number of instructions
1884 function can execute at average to be still considered not expensive. */
1887 expensive_function_p (int threshold
)
1889 unsigned int sum
= 0;
1893 /* We can not compute accurately for large thresholds due to scaled
1895 gcc_assert (threshold
<= BB_FREQ_MAX
);
1897 /* Frequencies are out of range. This either means that function contains
1898 internal loop executing more than BB_FREQ_MAX times or profile feedback
1899 is available and function has not been executed at all. */
1900 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1903 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1904 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1909 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1910 insn
= NEXT_INSN (insn
))
1911 if (active_insn_p (insn
))
1913 sum
+= bb
->frequency
;
1922 /* Estimate basic blocks frequency by given branch probabilities. */
1925 estimate_bb_frequencies (void)
1930 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1932 static int real_values_initialized
= 0;
1934 if (!real_values_initialized
)
1936 real_values_initialized
= 1;
1937 sreal_init (&real_zero
, 0, 0);
1938 sreal_init (&real_one
, 1, 0);
1939 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1940 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1941 sreal_init (&real_one_half
, 1, -1);
1942 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1943 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1946 mark_dfs_back_edges ();
1948 single_succ_edge (ENTRY_BLOCK_PTR
)->probability
= REG_BR_PROB_BASE
;
1950 /* Set up block info for each basic block. */
1951 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1952 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1953 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1958 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1960 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1961 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1962 &EDGE_INFO (e
)->back_edge_prob
,
1963 &real_inv_br_prob_base
);
1967 /* First compute probabilities locally for each loop from innermost
1968 to outermost to examine probabilities for back edges. */
1971 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1973 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1974 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1976 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1977 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1981 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1982 sreal_add (&tmp
, &tmp
, &real_one_half
);
1983 bb
->frequency
= sreal_to_int (&tmp
);
1986 free_aux_for_blocks ();
1987 free_aux_for_edges ();
1989 compute_function_frequency ();
1990 if (flag_reorder_functions
)
1991 choose_function_section ();
1994 /* Decide whether function is hot, cold or unlikely executed. */
1996 compute_function_frequency (void)
2000 if (!profile_info
|| !flag_branch_probabilities
)
2002 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl
))
2004 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
2005 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl
))
2007 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
2010 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
2013 if (maybe_hot_bb_p (bb
))
2015 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
2018 if (!probably_never_executed_bb_p (bb
))
2019 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
2023 /* Choose appropriate section for the function. */
2025 choose_function_section (void)
2027 if (DECL_SECTION_NAME (current_function_decl
)
2028 || !targetm
.have_named_sections
2029 /* Theoretically we can split the gnu.linkonce text section too,
2030 but this requires more work as the frequency needs to match
2031 for all generated objects so we need to merge the frequency
2032 of all instances. For now just never set frequency for these. */
2033 || DECL_ONE_ONLY (current_function_decl
))
2036 /* If we are doing the partitioning optimization, let the optimization
2037 choose the correct section into which to put things. */
2039 if (flag_reorder_blocks_and_partition
)
2042 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
2043 DECL_SECTION_NAME (current_function_decl
) =
2044 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
2045 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
2046 DECL_SECTION_NAME (current_function_decl
) =
2047 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
2048 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
2052 gate_estimate_probability (void)
2054 return flag_guess_branch_prob
;
2057 /* Build PREDICT_EXPR. */
2059 build_predict_expr (enum br_predictor predictor
, enum prediction taken
)
2061 tree t
= build1 (PREDICT_EXPR
, void_type_node
,
2062 build_int_cst (NULL
, predictor
));
2063 PREDICT_EXPR_OUTCOME (t
) = taken
;
2068 predictor_name (enum br_predictor predictor
)
2070 return predictor_info
[predictor
].name
;
2073 struct gimple_opt_pass pass_profile
=
2077 "profile", /* name */
2078 gate_estimate_probability
, /* gate */
2079 tree_estimate_probability
, /* execute */
2082 0, /* static_pass_number */
2083 TV_BRANCH_PROB
, /* tv_id */
2084 PROP_cfg
, /* properties_required */
2085 0, /* properties_provided */
2086 0, /* properties_destroyed */
2087 0, /* todo_flags_start */
2088 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
2092 struct gimple_opt_pass pass_strip_predict_hints
=
2098 strip_predict_hints
, /* execute */
2101 0, /* static_pass_number */
2102 TV_BRANCH_PROB
, /* tv_id */
2103 PROP_cfg
, /* properties_required */
2104 0, /* properties_provided */
2105 0, /* properties_destroyed */
2106 0, /* todo_flags_start */
2107 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */