1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
33 #include "coretypes.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
55 #include "tree-flow.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
60 #include "tree-scalar-evolution.h"
63 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
65 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
66 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
68 /* Random guesstimation given names. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74 static void combine_predictions_for_insn (rtx
, basic_block
);
75 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
76 static void estimate_loops_at_level (struct loop
*, bitmap
);
77 static void propagate_freq (struct loop
*, bitmap
);
78 static void estimate_bb_frequencies (struct loops
*);
79 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
80 static bool last_basic_block_p (basic_block
);
81 static void compute_function_frequency (void);
82 static void choose_function_section (void);
83 static bool can_predict_insn_p (rtx
);
85 /* Information we hold about each branch predictor.
86 Filled using information from predict.def. */
90 const char *const name
; /* Name used in the debugging dumps. */
91 const int hitrate
; /* Expected hitrate used by
92 predict_insn_def call. */
96 /* Use given predictor without Dempster-Shaffer theory if it matches
97 using first_match heuristics. */
98 #define PRED_FLAG_FIRST_MATCH 1
100 /* Recompute hitrate in percent to our representation. */
102 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
105 static const struct predictor_info predictor_info
[]= {
106 #include "predict.def"
108 /* Upper bound on predictors. */
113 /* Return true in case BB can be CPU intensive and should be optimized
114 for maximal performance. */
117 maybe_hot_bb_p (basic_block bb
)
119 if (profile_info
&& flag_branch_probabilities
121 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
123 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
128 /* Return true in case BB is cold and should be optimized for size. */
131 probably_cold_bb_p (basic_block bb
)
133 if (profile_info
&& flag_branch_probabilities
135 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
137 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
142 /* Return true in case BB is probably never executed. */
144 probably_never_executed_bb_p (basic_block bb
)
146 if (profile_info
&& flag_branch_probabilities
)
147 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
151 /* Return true if the one of outgoing edges is already predicted by
155 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
158 if (!INSN_P (BB_END (bb
)))
160 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
161 if (REG_NOTE_KIND (note
) == REG_BR_PRED
162 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
167 /* Return true if the one of outgoing edges is already predicted by
171 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
173 struct edge_prediction
*i
= bb_ann (bb
)->predictions
;
174 for (i
= bb_ann (bb
)->predictions
; i
; i
= i
->next
)
175 if (i
->predictor
== predictor
)
181 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
183 if (!any_condjump_p (insn
))
185 if (!flag_guess_branch_prob
)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
190 gen_rtx_CONCAT (VOIDmode
,
191 GEN_INT ((int) predictor
),
192 GEN_INT ((int) probability
)),
196 /* Predict insn by given predictor. */
199 predict_insn_def (rtx insn
, enum br_predictor predictor
,
200 enum prediction taken
)
202 int probability
= predictor_info
[(int) predictor
].hitrate
;
205 probability
= REG_BR_PROB_BASE
- probability
;
207 predict_insn (insn
, predictor
, probability
);
210 /* Predict edge E with given probability if possible. */
213 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
216 last_insn
= BB_END (e
->src
);
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn
))
223 /* We always store probability of branching. */
224 if (e
->flags
& EDGE_FALLTHRU
)
225 probability
= REG_BR_PROB_BASE
- probability
;
227 predict_insn (last_insn
, predictor
, probability
);
230 /* Predict edge E with the given PROBABILITY. */
232 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
234 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
236 i
->next
= bb_ann (e
->src
)->predictions
;
237 bb_ann (e
->src
)->predictions
= i
;
238 i
->probability
= probability
;
239 i
->predictor
= predictor
;
243 /* Return true when we can store prediction on insn INSN.
244 At the moment we represent predictions only on conditional
245 jumps, not at computed jump or other complicated cases. */
247 can_predict_insn_p (rtx insn
)
249 return (JUMP_P (insn
)
250 && any_condjump_p (insn
)
251 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
254 /* Predict edge E by given predictor if possible. */
257 predict_edge_def (edge e
, enum br_predictor predictor
,
258 enum prediction taken
)
260 int probability
= predictor_info
[(int) predictor
].hitrate
;
263 probability
= REG_BR_PROB_BASE
- probability
;
265 predict_edge (e
, predictor
, probability
);
268 /* Invert all branch predictions or probability notes in the INSN. This needs
269 to be done each time we invert the condition used by the jump. */
272 invert_br_probabilities (rtx insn
)
276 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
277 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
278 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
279 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
280 XEXP (XEXP (note
, 0), 1)
281 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
284 /* Dump information about the branch prediction to the output file. */
287 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
288 basic_block bb
, int used
)
296 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
297 if (! (e
->flags
& EDGE_FALLTHRU
))
300 fprintf (file
, " %s heuristics%s: %.1f%%",
301 predictor_info
[predictor
].name
,
302 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
306 fprintf (file
, " exec ");
307 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
310 fprintf (file
, " hit ");
311 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
312 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
316 fprintf (file
, "\n");
319 /* We can not predict the probabilities of outgoing edges of bb. Set them
320 evenly and hope for the best. */
322 set_even_probabilities (basic_block bb
)
328 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
329 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
331 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
332 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
333 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
338 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
339 note if not already present. Remove now useless REG_BR_PRED notes. */
342 combine_predictions_for_insn (rtx insn
, basic_block bb
)
347 int best_probability
= PROB_EVEN
;
348 int best_predictor
= END_PREDICTORS
;
349 int combined_probability
= REG_BR_PROB_BASE
/ 2;
351 bool first_match
= false;
354 if (!can_predict_insn_p (insn
))
356 set_even_probabilities (bb
);
360 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
361 pnote
= ®_NOTES (insn
);
363 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
366 /* We implement "first match" heuristics and use probability guessed
367 by predictor with smallest index. */
368 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
369 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
371 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
372 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
375 if (best_predictor
> predictor
)
376 best_probability
= probability
, best_predictor
= predictor
;
378 d
= (combined_probability
* probability
379 + (REG_BR_PROB_BASE
- combined_probability
)
380 * (REG_BR_PROB_BASE
- probability
));
382 /* Use FP math to avoid overflows of 32bit integers. */
384 /* If one probability is 0% and one 100%, avoid division by zero. */
385 combined_probability
= REG_BR_PROB_BASE
/ 2;
387 combined_probability
= (((double) combined_probability
) * probability
388 * REG_BR_PROB_BASE
/ d
+ 0.5);
391 /* Decide which heuristic to use. In case we didn't match anything,
392 use no_prediction heuristic, in case we did match, use either
393 first match or Dempster-Shaffer theory depending on the flags. */
395 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
399 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
400 combined_probability
, bb
, true);
403 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
405 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
410 combined_probability
= best_probability
;
411 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
415 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
417 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
418 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
420 dump_prediction (dump_file
, predictor
, probability
, bb
,
421 !first_match
|| best_predictor
== predictor
);
422 *pnote
= XEXP (*pnote
, 1);
425 pnote
= &XEXP (*pnote
, 1);
431 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
432 GEN_INT (combined_probability
), REG_NOTES (insn
));
434 /* Save the prediction into CFG in case we are seeing non-degenerated
436 if (EDGE_COUNT (bb
->succs
) > 1)
438 BRANCH_EDGE (bb
)->probability
= combined_probability
;
439 FALLTHRU_EDGE (bb
)->probability
440 = REG_BR_PROB_BASE
- combined_probability
;
443 else if (EDGE_COUNT (bb
->succs
) > 1)
445 int prob
= INTVAL (XEXP (prob_note
, 0));
447 BRANCH_EDGE (bb
)->probability
= prob
;
448 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
451 EDGE_SUCC (bb
, 0)->probability
= REG_BR_PROB_BASE
;
454 /* Combine predictions into single probability and store them into CFG.
455 Remove now useless prediction entries. */
458 combine_predictions_for_bb (FILE *file
, basic_block bb
)
460 int best_probability
= PROB_EVEN
;
461 int best_predictor
= END_PREDICTORS
;
462 int combined_probability
= REG_BR_PROB_BASE
/ 2;
464 bool first_match
= false;
466 struct edge_prediction
*pred
;
468 edge e
, first
= NULL
, second
= NULL
;
471 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
472 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
475 if (first
&& !second
)
481 /* When there is no successor or only one choice, prediction is easy.
483 We are lazy for now and predict only basic blocks with two outgoing
484 edges. It is possible to predict generic case too, but we have to
485 ignore first match heuristics and do more involved combining. Implement
490 set_even_probabilities (bb
);
491 bb_ann (bb
)->predictions
= NULL
;
493 fprintf (file
, "%i edges in bb %i predicted to even probabilities\n",
499 fprintf (file
, "Predictions for bb %i\n", bb
->index
);
501 /* We implement "first match" heuristics and use probability guessed
502 by predictor with smallest index. */
503 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
505 int predictor
= pred
->predictor
;
506 int probability
= pred
->probability
;
508 if (pred
->edge
!= first
)
509 probability
= REG_BR_PROB_BASE
- probability
;
512 if (best_predictor
> predictor
)
513 best_probability
= probability
, best_predictor
= predictor
;
515 d
= (combined_probability
* probability
516 + (REG_BR_PROB_BASE
- combined_probability
)
517 * (REG_BR_PROB_BASE
- probability
));
519 /* Use FP math to avoid overflows of 32bit integers. */
521 /* If one probability is 0% and one 100%, avoid division by zero. */
522 combined_probability
= REG_BR_PROB_BASE
/ 2;
524 combined_probability
= (((double) combined_probability
) * probability
525 * REG_BR_PROB_BASE
/ d
+ 0.5);
528 /* Decide which heuristic to use. In case we didn't match anything,
529 use no_prediction heuristic, in case we did match, use either
530 first match or Dempster-Shaffer theory depending on the flags. */
532 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
536 dump_prediction (file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
539 dump_prediction (file
, PRED_DS_THEORY
, combined_probability
, bb
,
541 dump_prediction (file
, PRED_FIRST_MATCH
, best_probability
, bb
,
546 combined_probability
= best_probability
;
547 dump_prediction (file
, PRED_COMBINED
, combined_probability
, bb
, true);
549 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
551 int predictor
= pred
->predictor
;
552 int probability
= pred
->probability
;
554 if (pred
->edge
!= EDGE_SUCC (bb
, 0))
555 probability
= REG_BR_PROB_BASE
- probability
;
556 dump_prediction (file
, predictor
, probability
, bb
,
557 !first_match
|| best_predictor
== predictor
);
559 bb_ann (bb
)->predictions
= NULL
;
563 first
->probability
= combined_probability
;
564 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
568 /* Predict edge probabilities by exploiting loop structure.
569 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
570 RTL otherwise use tree based approach. */
572 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
577 scev_initialize (loops_info
);
579 /* Try to predict out blocks in a loop that are not part of a
581 for (i
= 1; i
< loops_info
->num
; i
++)
583 basic_block bb
, *bbs
;
586 struct loop
*loop
= loops_info
->parray
[i
];
587 struct niter_desc desc
;
588 unsigned HOST_WIDE_INT niter
;
590 flow_loop_scan (loop
, LOOP_EXIT_EDGES
);
591 exits
= loop
->num_exits
;
595 iv_analysis_loop_init (loop
);
596 find_simple_exit (loop
, &desc
);
598 if (desc
.simple_p
&& desc
.const_iter
)
601 niter
= desc
.niter
+ 1;
602 if (niter
== 0) /* We might overflow here. */
605 prob
= (REG_BR_PROB_BASE
606 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
607 /* Branch prediction algorithm gives 0 frequency for everything
608 after the end of loop for loop having 0 probability to finish. */
609 if (prob
== REG_BR_PROB_BASE
)
610 prob
= REG_BR_PROB_BASE
- 1;
611 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
619 struct tree_niter_desc niter_desc
;
621 exits
= get_loop_exit_edges (loop
, &n_exits
);
622 for (j
= 0; j
< n_exits
; j
++)
626 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
))
627 niter
= niter_desc
.niter
;
628 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
629 niter
= loop_niter_by_eval (loop
, exits
[j
]);
631 if (TREE_CODE (niter
) == INTEGER_CST
)
634 if (host_integerp (niter
, 1)
635 && tree_int_cst_lt (niter
,
636 build_int_cstu (NULL_TREE
,
637 REG_BR_PROB_BASE
- 1)))
639 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
640 probability
= (REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
;
645 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
652 bbs
= get_loop_body (loop
);
654 for (j
= 0; j
< loop
->num_nodes
; j
++)
656 int header_found
= 0;
662 /* Bypass loop heuristics on continue statement. These
663 statements construct loops via "non-loop" constructs
664 in the source language and are better to be handled
666 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
667 || predicted_by_p (bb
, PRED_CONTINUE
))
670 /* Loop branch heuristics - predict an edge back to a
671 loop's head as taken. */
672 if (bb
== loop
->latch
)
674 e
= find_edge (loop
->latch
, loop
->header
);
678 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
682 /* Loop exit heuristics - predict an edge exiting the loop if the
683 conditional has no loop header successors as not taken. */
685 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
686 if (e
->dest
->index
< 0
687 || !flow_bb_inside_loop_p (loop
, e
->dest
))
691 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
695 /* Free basic blocks from get_loop_body. */
703 /* Attempt to predict probabilities of BB outgoing edges using local
706 bb_estimate_probability_locally (basic_block bb
)
708 rtx last_insn
= BB_END (bb
);
711 if (! can_predict_insn_p (last_insn
))
713 cond
= get_condition (last_insn
, NULL
, false, false);
717 /* Try "pointer heuristic."
718 A comparison ptr == 0 is predicted as false.
719 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
720 if (COMPARISON_P (cond
)
721 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
722 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
724 if (GET_CODE (cond
) == EQ
)
725 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
726 else if (GET_CODE (cond
) == NE
)
727 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
731 /* Try "opcode heuristic."
732 EQ tests are usually false and NE tests are usually true. Also,
733 most quantities are positive, so we can make the appropriate guesses
734 about signed comparisons against zero. */
735 switch (GET_CODE (cond
))
738 /* Unconditional branch. */
739 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
740 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
745 /* Floating point comparisons appears to behave in a very
746 unpredictable way because of special role of = tests in
748 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
750 /* Comparisons with 0 are often used for booleans and there is
751 nothing useful to predict about them. */
752 else if (XEXP (cond
, 1) == const0_rtx
753 || XEXP (cond
, 0) == const0_rtx
)
756 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
761 /* Floating point comparisons appears to behave in a very
762 unpredictable way because of special role of = tests in
764 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
766 /* Comparisons with 0 are often used for booleans and there is
767 nothing useful to predict about them. */
768 else if (XEXP (cond
, 1) == const0_rtx
769 || XEXP (cond
, 0) == const0_rtx
)
772 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
776 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
780 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
785 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
786 || XEXP (cond
, 1) == constm1_rtx
)
787 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
792 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
793 || XEXP (cond
, 1) == constm1_rtx
)
794 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
802 /* Statically estimate the probability that a branch will be taken and produce
803 estimated profile. When profile feedback is present never executed portions
804 of function gets estimated. */
807 estimate_probability (struct loops
*loops_info
)
811 connect_infinite_loops_to_exit ();
812 calculate_dominance_info (CDI_DOMINATORS
);
813 calculate_dominance_info (CDI_POST_DOMINATORS
);
815 predict_loops (loops_info
, true);
819 /* Attempt to predict conditional jumps using a number of heuristics. */
822 rtx last_insn
= BB_END (bb
);
826 if (! can_predict_insn_p (last_insn
))
829 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
831 /* Predict early returns to be probable, as we've already taken
832 care for error returns and other are often used for fast paths
834 if ((e
->dest
== EXIT_BLOCK_PTR
835 || (EDGE_COUNT (e
->dest
->succs
) == 1
836 && EDGE_SUCC (e
->dest
, 0)->dest
== EXIT_BLOCK_PTR
))
837 && !predicted_by_p (bb
, PRED_NULL_RETURN
)
838 && !predicted_by_p (bb
, PRED_CONST_RETURN
)
839 && !predicted_by_p (bb
, PRED_NEGATIVE_RETURN
)
840 && !last_basic_block_p (e
->dest
))
841 predict_edge_def (e
, PRED_EARLY_RETURN
, TAKEN
);
843 /* Look for block we are guarding (i.e. we dominate it,
844 but it doesn't postdominate us). */
845 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
846 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
847 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
851 /* The call heuristic claims that a guarded function call
852 is improbable. This is because such calls are often used
853 to signal exceptional situations such as printing error
855 for (insn
= BB_HEAD (e
->dest
); insn
!= NEXT_INSN (BB_END (e
->dest
));
856 insn
= NEXT_INSN (insn
))
858 /* Constant and pure calls are hardly used to signalize
859 something exceptional. */
860 && ! CONST_OR_PURE_CALL_P (insn
))
862 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
867 bb_estimate_probability_locally (bb
);
870 /* Attach the combined probability to each conditional jump. */
872 combine_predictions_for_insn (BB_END (bb
), bb
);
874 remove_fake_edges ();
875 estimate_bb_frequencies (loops_info
);
876 free_dominance_info (CDI_POST_DOMINATORS
);
877 if (profile_status
== PROFILE_ABSENT
)
878 profile_status
= PROFILE_GUESSED
;
881 /* Set edge->probability for each successor edge of BB. */
883 guess_outgoing_edge_probabilities (basic_block bb
)
885 bb_estimate_probability_locally (bb
);
886 combine_predictions_for_insn (BB_END (bb
), bb
);
889 /* Return constant EXPR will likely have at execution time, NULL if unknown.
890 The function is used by builtin_expect branch predictor so the evidence
891 must come from this construct and additional possible constant folding.
893 We may want to implement more involved value guess (such as value range
894 propagation based prediction), but such tricks shall go to new
898 expr_expected_value (tree expr
, bitmap visited
)
900 if (TREE_CONSTANT (expr
))
902 else if (TREE_CODE (expr
) == SSA_NAME
)
904 tree def
= SSA_NAME_DEF_STMT (expr
);
906 /* If we were already here, break the infinite cycle. */
907 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
909 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
911 if (TREE_CODE (def
) == PHI_NODE
)
913 /* All the arguments of the PHI node must have the same constant
916 tree val
= NULL
, new_val
;
918 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
920 tree arg
= PHI_ARG_DEF (def
, i
);
922 /* If this PHI has itself as an argument, we cannot
923 determine the string length of this argument. However,
924 if we can find an expected constant value for the other
925 PHI args then we can still be sure that this is
926 likely a constant. So be optimistic and just
927 continue with the next argument. */
928 if (arg
== PHI_RESULT (def
))
931 new_val
= expr_expected_value (arg
, visited
);
936 else if (!operand_equal_p (val
, new_val
, false))
941 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
943 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
945 else if (TREE_CODE (expr
) == CALL_EXPR
)
947 tree decl
= get_callee_fndecl (expr
);
950 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
951 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
953 tree arglist
= TREE_OPERAND (expr
, 1);
956 if (arglist
== NULL_TREE
957 || TREE_CHAIN (arglist
) == NULL_TREE
)
959 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
960 if (TREE_CONSTANT (val
))
962 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
965 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
968 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
971 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
974 res
= fold (build (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
));
975 if (TREE_CONSTANT (res
))
979 if (UNARY_CLASS_P (expr
))
982 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
985 res
= fold (build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
));
986 if (TREE_CONSTANT (res
))
993 /* Get rid of all builtin_expect calls we no longer need. */
995 strip_builtin_expect (void)
1000 block_stmt_iterator bi
;
1001 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
1003 tree stmt
= bsi_stmt (bi
);
1007 if (TREE_CODE (stmt
) == MODIFY_EXPR
1008 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
1009 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
1010 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1011 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1012 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
1013 && TREE_CHAIN (arglist
))
1015 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
1022 /* Predict using opcode of the last statement in basic block. */
1024 tree_predict_by_opcode (basic_block bb
)
1026 tree stmt
= last_stmt (bb
);
1035 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
1037 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1038 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1040 cond
= TREE_OPERAND (stmt
, 0);
1041 if (!COMPARISON_CLASS_P (cond
))
1043 op0
= TREE_OPERAND (cond
, 0);
1044 type
= TREE_TYPE (op0
);
1045 visited
= BITMAP_XMALLOC ();
1046 val
= expr_expected_value (cond
, visited
);
1047 BITMAP_XFREE (visited
);
1050 if (integer_zerop (val
))
1051 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1053 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1056 /* Try "pointer heuristic."
1057 A comparison ptr == 0 is predicted as false.
1058 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1059 if (POINTER_TYPE_P (type
))
1061 if (TREE_CODE (cond
) == EQ_EXPR
)
1062 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1063 else if (TREE_CODE (cond
) == NE_EXPR
)
1064 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1068 /* Try "opcode heuristic."
1069 EQ tests are usually false and NE tests are usually true. Also,
1070 most quantities are positive, so we can make the appropriate guesses
1071 about signed comparisons against zero. */
1072 switch (TREE_CODE (cond
))
1076 /* Floating point comparisons appears to behave in a very
1077 unpredictable way because of special role of = tests in
1079 if (FLOAT_TYPE_P (type
))
1081 /* Comparisons with 0 are often used for booleans and there is
1082 nothing useful to predict about them. */
1083 else if (integer_zerop (op0
)
1084 || integer_zerop (TREE_OPERAND (cond
, 1)))
1087 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1092 /* Floating point comparisons appears to behave in a very
1093 unpredictable way because of special role of = tests in
1095 if (FLOAT_TYPE_P (type
))
1097 /* Comparisons with 0 are often used for booleans and there is
1098 nothing useful to predict about them. */
1099 else if (integer_zerop (op0
)
1100 || integer_zerop (TREE_OPERAND (cond
, 1)))
1103 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1107 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1110 case UNORDERED_EXPR
:
1111 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1116 if (integer_zerop (TREE_OPERAND (cond
, 1))
1117 || integer_onep (TREE_OPERAND (cond
, 1))
1118 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1119 || real_zerop (TREE_OPERAND (cond
, 1))
1120 || real_onep (TREE_OPERAND (cond
, 1))
1121 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1122 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1127 if (integer_zerop (TREE_OPERAND (cond
, 1))
1128 || integer_onep (TREE_OPERAND (cond
, 1))
1129 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1130 || real_zerop (TREE_OPERAND (cond
, 1))
1131 || real_onep (TREE_OPERAND (cond
, 1))
1132 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1133 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1141 /* Try to guess whether the value of return means error code. */
1142 static enum br_predictor
1143 return_prediction (tree val
, enum prediction
*prediction
)
1147 return PRED_NO_PREDICTION
;
1148 /* Different heuristics for pointers and scalars. */
1149 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1151 /* NULL is usually not returned. */
1152 if (integer_zerop (val
))
1154 *prediction
= NOT_TAKEN
;
1155 return PRED_NULL_RETURN
;
1158 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1160 /* Negative return values are often used to indicate
1162 if (TREE_CODE (val
) == INTEGER_CST
1163 && tree_int_cst_sgn (val
) < 0)
1165 *prediction
= NOT_TAKEN
;
1166 return PRED_NEGATIVE_RETURN
;
1168 /* Constant return values seems to be commonly taken.
1169 Zero/one often represent booleans so exclude them from the
1171 if (TREE_CONSTANT (val
)
1172 && (!integer_zerop (val
) && !integer_onep (val
)))
1174 *prediction
= TAKEN
;
1175 return PRED_NEGATIVE_RETURN
;
1178 return PRED_NO_PREDICTION
;
1181 /* Find the basic block with return expression and look up for possible
1182 return value trying to apply RETURN_PREDICTION heuristics. */
1184 apply_return_prediction (int *heads
)
1190 int phi_num_args
, i
;
1191 enum br_predictor pred
;
1192 enum prediction direction
;
1195 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1197 return_stmt
= last_stmt (e
->src
);
1198 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1203 return_val
= TREE_OPERAND (return_stmt
, 0);
1206 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1207 return_val
= TREE_OPERAND (return_val
, 1);
1208 if (TREE_CODE (return_val
) != SSA_NAME
1209 || !SSA_NAME_DEF_STMT (return_val
)
1210 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1212 phi
= SSA_NAME_DEF_STMT (return_val
);
1215 tree next
= PHI_CHAIN (phi
);
1216 if (PHI_RESULT (phi
) == return_val
)
1222 phi_num_args
= PHI_NUM_ARGS (phi
);
1223 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1225 /* Avoid the degenerate case where all return values form the function
1226 belongs to same category (ie they are all positive constants)
1227 so we can hardly say something about them. */
1228 for (i
= 1; i
< phi_num_args
; i
++)
1229 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1231 if (i
!= phi_num_args
)
1232 for (i
= 0; i
< phi_num_args
; i
++)
1234 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1235 if (pred
!= PRED_NO_PREDICTION
)
1236 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1241 /* Look for basic block that contains unlikely to happen events
1242 (such as noreturn calls) and mark all paths leading to execution
1243 of this basic blocks as unlikely. */
1246 tree_bb_level_predictions (void)
1251 heads
= xmalloc (sizeof (int) * last_basic_block
);
1252 memset (heads
, -1, sizeof (int) * last_basic_block
);
1253 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1255 apply_return_prediction (heads
);
1259 block_stmt_iterator bsi
= bsi_last (bb
);
1261 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1263 tree stmt
= bsi_stmt (bsi
);
1264 switch (TREE_CODE (stmt
))
1267 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1269 stmt
= TREE_OPERAND (stmt
, 1);
1275 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1276 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1288 /* Predict branch probabilities and estimate profile of the tree CFG. */
1290 tree_estimate_probability (void)
1293 struct loops loops_info
;
1295 flow_loops_find (&loops_info
, LOOP_TREE
);
1296 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1297 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1299 add_noreturn_fake_exit_edges ();
1300 connect_infinite_loops_to_exit ();
1301 calculate_dominance_info (CDI_DOMINATORS
);
1302 calculate_dominance_info (CDI_POST_DOMINATORS
);
1304 tree_bb_level_predictions ();
1306 mark_irreducible_loops (&loops_info
);
1307 predict_loops (&loops_info
, false);
1314 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1316 /* Predict early returns to be probable, as we've already taken
1317 care for error returns and other cases are often used for
1318 fast paths trought function. */
1319 if (e
->dest
== EXIT_BLOCK_PTR
1320 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1321 && EDGE_COUNT (bb
->preds
) > 1)
1326 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1327 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1328 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1329 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1330 && !last_basic_block_p (e1
->src
))
1331 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1334 /* Look for block we are guarding (ie we dominate it,
1335 but it doesn't postdominate us). */
1336 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1337 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1338 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1340 block_stmt_iterator bi
;
1342 /* The call heuristic claims that a guarded function call
1343 is improbable. This is because such calls are often used
1344 to signal exceptional situations such as printing error
1346 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1349 tree stmt
= bsi_stmt (bi
);
1350 if ((TREE_CODE (stmt
) == CALL_EXPR
1351 || (TREE_CODE (stmt
) == MODIFY_EXPR
1352 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1353 /* Constant and pure calls are hardly used to signalize
1354 something exceptional. */
1355 && TREE_SIDE_EFFECTS (stmt
))
1357 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1363 tree_predict_by_opcode (bb
);
1366 combine_predictions_for_bb (dump_file
, bb
);
1368 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1369 strip_builtin_expect ();
1370 estimate_bb_frequencies (&loops_info
);
1371 free_dominance_info (CDI_POST_DOMINATORS
);
1372 remove_fake_exit_edges ();
1373 flow_loops_free (&loops_info
);
1374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1375 dump_tree_cfg (dump_file
, dump_flags
);
1376 if (profile_status
== PROFILE_ABSENT
)
1377 profile_status
= PROFILE_GUESSED
;
1380 /* __builtin_expect dropped tokens into the insn stream describing expected
1381 values of registers. Generate branch probabilities based off these
1385 expected_value_to_br_prob (void)
1387 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1389 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1391 switch (GET_CODE (insn
))
1394 /* Look for expected value notes. */
1395 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1397 ev
= NOTE_EXPECTED_VALUE (insn
);
1398 ev_reg
= XEXP (ev
, 0);
1404 /* Never propagate across labels. */
1409 /* Look for simple conditional branches. If we haven't got an
1410 expected value yet, no point going further. */
1411 if (!JUMP_P (insn
) || ev
== NULL_RTX
1412 || ! any_condjump_p (insn
))
1417 /* Look for insns that clobber the EV register. */
1418 if (ev
&& reg_set_p (ev_reg
, insn
))
1423 /* Collect the branch condition, hopefully relative to EV_REG. */
1424 /* ??? At present we'll miss things like
1425 (expected_value (eq r70 0))
1427 (set r80 (lt r70 r71))
1428 (set pc (if_then_else (ne r80 0) ...))
1429 as canonicalize_condition will render this to us as
1431 Could use cselib to try and reduce this further. */
1432 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1433 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1435 if (! cond
|| XEXP (cond
, 0) != ev_reg
1436 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1439 /* Substitute and simplify. Given that the expression we're
1440 building involves two constants, we should wind up with either
1442 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1443 XEXP (ev
, 1), XEXP (cond
, 1));
1444 cond
= simplify_rtx (cond
);
1446 /* Turn the condition into a scaled branch probability. */
1447 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
1449 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1450 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1454 /* Check whether this is the last basic block of function. Commonly
1455 there is one extra common cleanup block. */
1457 last_basic_block_p (basic_block bb
)
1459 if (bb
== EXIT_BLOCK_PTR
)
1462 return (bb
->next_bb
== EXIT_BLOCK_PTR
1463 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1464 && EDGE_COUNT (bb
->succs
) == 1
1465 && EDGE_SUCC (bb
, 0)->dest
->next_bb
== EXIT_BLOCK_PTR
));
1468 /* Sets branch probabilities according to PREDiction and
1469 FLAGS. HEADS[bb->index] should be index of basic block in that we
1470 need to alter branch predictions (i.e. the first of our dominators
1471 such that we do not post-dominate it) (but we fill this information
1472 on demand, so -1 may be there in case this was not needed yet). */
1475 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1476 enum prediction taken
)
1482 if (heads
[bb
->index
] < 0)
1484 /* This is first time we need this field in heads array; so
1485 find first dominator that we do not post-dominate (we are
1486 using already known members of heads array). */
1487 basic_block ai
= bb
;
1488 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1491 while (heads
[next_ai
->index
] < 0)
1493 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1495 heads
[next_ai
->index
] = ai
->index
;
1497 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1499 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1500 head
= next_ai
->index
;
1502 head
= heads
[next_ai
->index
];
1503 while (next_ai
!= bb
)
1506 if (heads
[ai
->index
] == ENTRY_BLOCK
)
1507 ai
= ENTRY_BLOCK_PTR
;
1509 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1510 heads
[next_ai
->index
] = head
;
1513 y
= heads
[bb
->index
];
1515 /* Now find the edge that leads to our branch and aply the prediction. */
1517 if (y
== last_basic_block
)
1519 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1520 if (e
->dest
->index
>= 0
1521 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1522 predict_edge_def (e
, pred
, taken
);
1525 /* This is used to carry information about basic blocks. It is
1526 attached to the AUX field of the standard CFG block. */
1528 typedef struct block_info_def
1530 /* Estimated frequency of execution of basic_block. */
1533 /* To keep queue of basic blocks to process. */
1536 /* Number of predecessors we need to visit first. */
1540 /* Similar information for edges. */
1541 typedef struct edge_info_def
1543 /* In case edge is an loopback edge, the probability edge will be reached
1544 in case header is. Estimated number of iterations of the loop can be
1545 then computed as 1 / (1 - back_edge_prob). */
1546 sreal back_edge_prob
;
1547 /* True if the edge is an loopback edge in the natural loop. */
1548 unsigned int back_edge
:1;
1551 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1552 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1554 /* Helper function for estimate_bb_frequencies.
1555 Propagate the frequencies for LOOP. */
1558 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1560 basic_block head
= loop
->header
;
1568 /* For each basic block we need to visit count number of his predecessors
1569 we need to visit first. */
1570 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1575 /* The outermost "loop" includes the exit block, which we can not
1576 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1577 directly. Do the same for the entry block. */
1578 if (i
== (unsigned)ENTRY_BLOCK
)
1579 bb
= ENTRY_BLOCK_PTR
;
1580 else if (i
== (unsigned)EXIT_BLOCK
)
1581 bb
= EXIT_BLOCK_PTR
;
1583 bb
= BASIC_BLOCK (i
);
1585 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1587 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1589 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1591 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1593 "Irreducible region hit, ignoring edge to %i->%i\n",
1594 e
->src
->index
, bb
->index
);
1596 BLOCK_INFO (bb
)->npredecessors
= count
;
1599 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1601 for (bb
= head
; bb
; bb
= nextbb
)
1604 sreal cyclic_probability
, frequency
;
1606 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1607 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1609 nextbb
= BLOCK_INFO (bb
)->next
;
1610 BLOCK_INFO (bb
)->next
= NULL
;
1612 /* Compute frequency of basic block. */
1615 #ifdef ENABLE_CHECKING
1616 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1617 if (bitmap_bit_p (tovisit
, e
->src
->index
)
1618 && !(e
->flags
& EDGE_DFS_BACK
))
1622 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1623 if (EDGE_INFO (e
)->back_edge
)
1625 sreal_add (&cyclic_probability
, &cyclic_probability
,
1626 &EDGE_INFO (e
)->back_edge_prob
);
1628 else if (!(e
->flags
& EDGE_DFS_BACK
))
1632 /* frequency += (e->probability
1633 * BLOCK_INFO (e->src)->frequency /
1634 REG_BR_PROB_BASE); */
1636 sreal_init (&tmp
, e
->probability
, 0);
1637 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1638 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1639 sreal_add (&frequency
, &frequency
, &tmp
);
1642 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1644 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1645 sizeof (frequency
));
1649 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1651 memcpy (&cyclic_probability
, &real_almost_one
,
1652 sizeof (real_almost_one
));
1655 /* BLOCK_INFO (bb)->frequency = frequency
1656 / (1 - cyclic_probability) */
1658 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1659 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1660 &frequency
, &cyclic_probability
);
1664 bitmap_clear_bit (tovisit
, bb
->index
);
1666 e
= find_edge (bb
, head
);
1671 /* EDGE_INFO (e)->back_edge_prob
1672 = ((e->probability * BLOCK_INFO (bb)->frequency)
1673 / REG_BR_PROB_BASE); */
1675 sreal_init (&tmp
, e
->probability
, 0);
1676 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1677 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1678 &tmp
, &real_inv_br_prob_base
);
1681 /* Propagate to successor blocks. */
1682 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1683 if (!(e
->flags
& EDGE_DFS_BACK
)
1684 && BLOCK_INFO (e
->dest
)->npredecessors
)
1686 BLOCK_INFO (e
->dest
)->npredecessors
--;
1687 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1692 BLOCK_INFO (last
)->next
= e
->dest
;
1700 /* Estimate probabilities of loopback edges in loops at same nest level. */
1703 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1707 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1713 estimate_loops_at_level (loop
->inner
, tovisit
);
1715 /* Do not do this for dummy function loop. */
1716 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1718 /* Find current loop back edge and mark it. */
1719 e
= loop_latch_edge (loop
);
1720 EDGE_INFO (e
)->back_edge
= 1;
1723 bbs
= get_loop_body (loop
);
1724 for (i
= 0; i
< loop
->num_nodes
; i
++)
1725 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1727 propagate_freq (loop
, tovisit
);
1731 /* Convert counts measured by profile driven feedback to frequencies.
1732 Return nonzero iff there was any nonzero execution count. */
1735 counts_to_freqs (void)
1737 gcov_type count_max
, true_count_max
= 0;
1741 true_count_max
= MAX (bb
->count
, true_count_max
);
1743 count_max
= MAX (true_count_max
, 1);
1744 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1745 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1746 return true_count_max
;
1749 /* Return true if function is likely to be expensive, so there is no point to
1750 optimize performance of prologue, epilogue or do inlining at the expense
1751 of code size growth. THRESHOLD is the limit of number of instructions
1752 function can execute at average to be still considered not expensive. */
1755 expensive_function_p (int threshold
)
1757 unsigned int sum
= 0;
1761 /* We can not compute accurately for large thresholds due to scaled
1763 if (threshold
> BB_FREQ_MAX
)
1766 /* Frequencies are out of range. This either means that function contains
1767 internal loop executing more than BB_FREQ_MAX times or profile feedback
1768 is available and function has not been executed at all. */
1769 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1772 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1773 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1778 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1779 insn
= NEXT_INSN (insn
))
1780 if (active_insn_p (insn
))
1782 sum
+= bb
->frequency
;
1791 /* Estimate basic blocks frequency by given branch probabilities. */
1794 estimate_bb_frequencies (struct loops
*loops
)
1799 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1801 static int real_values_initialized
= 0;
1804 if (!real_values_initialized
)
1806 real_values_initialized
= 1;
1807 sreal_init (&real_zero
, 0, 0);
1808 sreal_init (&real_one
, 1, 0);
1809 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1810 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1811 sreal_init (&real_one_half
, 1, -1);
1812 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1813 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1816 mark_dfs_back_edges ();
1818 EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->probability
= REG_BR_PROB_BASE
;
1820 /* Set up block info for each basic block. */
1821 tovisit
= BITMAP_XMALLOC ();
1822 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1823 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1824 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1829 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1831 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1832 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1833 &EDGE_INFO (e
)->back_edge_prob
,
1834 &real_inv_br_prob_base
);
1838 /* First compute probabilities locally for each loop from innermost
1839 to outermost to examine probabilities for back edges. */
1840 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1842 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1844 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1845 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1847 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1848 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1852 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1853 sreal_add (&tmp
, &tmp
, &real_one_half
);
1854 bb
->frequency
= sreal_to_int (&tmp
);
1857 free_aux_for_blocks ();
1858 free_aux_for_edges ();
1859 BITMAP_XFREE (tovisit
);
1861 compute_function_frequency ();
1862 if (flag_reorder_functions
)
1863 choose_function_section ();
1866 /* Decide whether function is hot, cold or unlikely executed. */
1868 compute_function_frequency (void)
1872 if (!profile_info
|| !flag_branch_probabilities
)
1874 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1877 if (maybe_hot_bb_p (bb
))
1879 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1882 if (!probably_never_executed_bb_p (bb
))
1883 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1887 /* Choose appropriate section for the function. */
1889 choose_function_section (void)
1891 if (DECL_SECTION_NAME (current_function_decl
)
1892 || !targetm
.have_named_sections
1893 /* Theoretically we can split the gnu.linkonce text section too,
1894 but this requires more work as the frequency needs to match
1895 for all generated objects so we need to merge the frequency
1896 of all instances. For now just never set frequency for these. */
1897 || DECL_ONE_ONLY (current_function_decl
))
1900 /* If we are doing the partitioning optimization, let the optimization
1901 choose the correct section into which to put things. */
1903 if (flag_reorder_blocks_and_partition
)
1906 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1907 DECL_SECTION_NAME (current_function_decl
) =
1908 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1909 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1910 DECL_SECTION_NAME (current_function_decl
) =
1911 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1912 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1916 struct tree_opt_pass pass_profile
=
1918 "profile", /* name */
1920 tree_estimate_probability
, /* execute */
1923 0, /* static_pass_number */
1924 TV_BRANCH_PROB
, /* tv_id */
1925 PROP_cfg
, /* properties_required */
1926 0, /* properties_provided */
1927 0, /* properties_destroyed */
1928 0, /* todo_flags_start */
1929 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */