errors.h (warning, [...]): Mark as cold.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (rtx);
81
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
84
85 struct predictor_info
86 {
87 const char *const name; /* Name used in the debugging dumps. */
88 const int hitrate; /* Expected hitrate used by
89 predict_insn_def call. */
90 const int flags;
91 };
92
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
96
97 /* Recompute hitrate in percent to our representation. */
98
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
100
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info[]= {
103 #include "predict.def"
104
105 /* Upper bound on predictors. */
106 {NULL, 0, 0}
107 };
108 #undef DEF_PREDICTOR
109
110 /* Return true in case BB can be CPU intensive and should be optimized
111 for maximal performance. */
112
113 bool
114 maybe_hot_bb_p (basic_block bb)
115 {
116 if (profile_info && flag_branch_probabilities
117 && (bb->count
118 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
119 return false;
120 if (!profile_info || !flag_branch_probabilities)
121 {
122 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
123 return false;
124 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
125 return true;
126 }
127 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
128 return false;
129 return true;
130 }
131
132 /* Return true in case BB is cold and should be optimized for size. */
133
134 bool
135 probably_cold_bb_p (basic_block bb)
136 {
137 if (profile_info && flag_branch_probabilities
138 && (bb->count
139 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
140 return true;
141 if ((!profile_info || !flag_branch_probabilities)
142 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
143 return true;
144 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
145 return true;
146 return false;
147 }
148
149 /* Return true in case BB is probably never executed. */
150 bool
151 probably_never_executed_bb_p (basic_block bb)
152 {
153 if (profile_info && flag_branch_probabilities)
154 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
155 if ((!profile_info || !flag_branch_probabilities)
156 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
157 return true;
158 return false;
159 }
160
161 /* Return true if the one of outgoing edges is already predicted by
162 PREDICTOR. */
163
164 bool
165 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
166 {
167 rtx note;
168 if (!INSN_P (BB_END (bb)))
169 return false;
170 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
171 if (REG_NOTE_KIND (note) == REG_BR_PRED
172 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
173 return true;
174 return false;
175 }
176
177 /* Return true if the one of outgoing edges is already predicted by
178 PREDICTOR. */
179
180 bool
181 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
182 {
183 struct edge_prediction *i;
184 for (i = bb->predictions; i; i = i->ep_next)
185 if (i->ep_predictor == predictor)
186 return true;
187 return false;
188 }
189
190 /* Return true when the probability of edge is reliable.
191
192 The profile guessing code is good at predicting branch outcome (ie.
193 taken/not taken), that is predicted right slightly over 75% of time.
194 It is however notoriously poor on predicting the probability itself.
195 In general the profile appear a lot flatter (with probabilities closer
196 to 50%) than the reality so it is bad idea to use it to drive optimization
197 such as those disabling dynamic branch prediction for well predictable
198 branches.
199
200 There are two exceptions - edges leading to noreturn edges and edges
201 predicted by number of iterations heuristics are predicted well. This macro
202 should be able to distinguish those, but at the moment it simply check for
203 noreturn heuristic that is only one giving probability over 99% or bellow
204 1%. In future we might want to propagate reliability information across the
205 CFG if we find this information useful on multiple places. */
206 static bool
207 probability_reliable_p (int prob)
208 {
209 return (profile_status == PROFILE_READ
210 || (profile_status == PROFILE_GUESSED
211 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
212 }
213
214 /* Same predicate as above, working on edges. */
215 bool
216 edge_probability_reliable_p (edge e)
217 {
218 return probability_reliable_p (e->probability);
219 }
220
221 /* Same predicate as edge_probability_reliable_p, working on notes. */
222 bool
223 br_prob_note_reliable_p (rtx note)
224 {
225 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
226 return probability_reliable_p (INTVAL (XEXP (note, 0)));
227 }
228
229 static void
230 predict_insn (rtx insn, enum br_predictor predictor, int probability)
231 {
232 gcc_assert (any_condjump_p (insn));
233 if (!flag_guess_branch_prob)
234 return;
235
236 REG_NOTES (insn)
237 = gen_rtx_EXPR_LIST (REG_BR_PRED,
238 gen_rtx_CONCAT (VOIDmode,
239 GEN_INT ((int) predictor),
240 GEN_INT ((int) probability)),
241 REG_NOTES (insn));
242 }
243
244 /* Predict insn by given predictor. */
245
246 void
247 predict_insn_def (rtx insn, enum br_predictor predictor,
248 enum prediction taken)
249 {
250 int probability = predictor_info[(int) predictor].hitrate;
251
252 if (taken != TAKEN)
253 probability = REG_BR_PROB_BASE - probability;
254
255 predict_insn (insn, predictor, probability);
256 }
257
258 /* Predict edge E with given probability if possible. */
259
260 void
261 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
262 {
263 rtx last_insn;
264 last_insn = BB_END (e->src);
265
266 /* We can store the branch prediction information only about
267 conditional jumps. */
268 if (!any_condjump_p (last_insn))
269 return;
270
271 /* We always store probability of branching. */
272 if (e->flags & EDGE_FALLTHRU)
273 probability = REG_BR_PROB_BASE - probability;
274
275 predict_insn (last_insn, predictor, probability);
276 }
277
278 /* Predict edge E with the given PROBABILITY. */
279 void
280 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
281 {
282 gcc_assert (profile_status != PROFILE_GUESSED);
283 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
284 && flag_guess_branch_prob && optimize)
285 {
286 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
287
288 i->ep_next = e->src->predictions;
289 e->src->predictions = i;
290 i->ep_probability = probability;
291 i->ep_predictor = predictor;
292 i->ep_edge = e;
293 }
294 }
295
296 /* Remove all predictions on given basic block that are attached
297 to edge E. */
298 void
299 remove_predictions_associated_with_edge (edge e)
300 {
301 if (e->src->predictions)
302 {
303 struct edge_prediction **prediction = &e->src->predictions;
304 while (*prediction)
305 {
306 if ((*prediction)->ep_edge == e)
307 *prediction = (*prediction)->ep_next;
308 else
309 prediction = &((*prediction)->ep_next);
310 }
311 }
312 }
313
314 /* Return true when we can store prediction on insn INSN.
315 At the moment we represent predictions only on conditional
316 jumps, not at computed jump or other complicated cases. */
317 static bool
318 can_predict_insn_p (rtx insn)
319 {
320 return (JUMP_P (insn)
321 && any_condjump_p (insn)
322 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
323 }
324
325 /* Predict edge E by given predictor if possible. */
326
327 void
328 predict_edge_def (edge e, enum br_predictor predictor,
329 enum prediction taken)
330 {
331 int probability = predictor_info[(int) predictor].hitrate;
332
333 if (taken != TAKEN)
334 probability = REG_BR_PROB_BASE - probability;
335
336 predict_edge (e, predictor, probability);
337 }
338
339 /* Invert all branch predictions or probability notes in the INSN. This needs
340 to be done each time we invert the condition used by the jump. */
341
342 void
343 invert_br_probabilities (rtx insn)
344 {
345 rtx note;
346
347 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
348 if (REG_NOTE_KIND (note) == REG_BR_PROB)
349 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
350 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
351 XEXP (XEXP (note, 0), 1)
352 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
353 }
354
355 /* Dump information about the branch prediction to the output file. */
356
357 static void
358 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
359 basic_block bb, int used)
360 {
361 edge e;
362 edge_iterator ei;
363
364 if (!file)
365 return;
366
367 FOR_EACH_EDGE (e, ei, bb->succs)
368 if (! (e->flags & EDGE_FALLTHRU))
369 break;
370
371 fprintf (file, " %s heuristics%s: %.1f%%",
372 predictor_info[predictor].name,
373 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
374
375 if (bb->count)
376 {
377 fprintf (file, " exec ");
378 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
379 if (e)
380 {
381 fprintf (file, " hit ");
382 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
383 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
384 }
385 }
386
387 fprintf (file, "\n");
388 }
389
390 /* We can not predict the probabilities of outgoing edges of bb. Set them
391 evenly and hope for the best. */
392 static void
393 set_even_probabilities (basic_block bb)
394 {
395 int nedges = 0;
396 edge e;
397 edge_iterator ei;
398
399 FOR_EACH_EDGE (e, ei, bb->succs)
400 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
401 nedges ++;
402 FOR_EACH_EDGE (e, ei, bb->succs)
403 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
404 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
405 else
406 e->probability = 0;
407 }
408
409 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
410 note if not already present. Remove now useless REG_BR_PRED notes. */
411
412 static void
413 combine_predictions_for_insn (rtx insn, basic_block bb)
414 {
415 rtx prob_note;
416 rtx *pnote;
417 rtx note;
418 int best_probability = PROB_EVEN;
419 int best_predictor = END_PREDICTORS;
420 int combined_probability = REG_BR_PROB_BASE / 2;
421 int d;
422 bool first_match = false;
423 bool found = false;
424
425 if (!can_predict_insn_p (insn))
426 {
427 set_even_probabilities (bb);
428 return;
429 }
430
431 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
432 pnote = &REG_NOTES (insn);
433 if (dump_file)
434 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
435 bb->index);
436
437 /* We implement "first match" heuristics and use probability guessed
438 by predictor with smallest index. */
439 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
440 if (REG_NOTE_KIND (note) == REG_BR_PRED)
441 {
442 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
443 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
444
445 found = true;
446 if (best_predictor > predictor)
447 best_probability = probability, best_predictor = predictor;
448
449 d = (combined_probability * probability
450 + (REG_BR_PROB_BASE - combined_probability)
451 * (REG_BR_PROB_BASE - probability));
452
453 /* Use FP math to avoid overflows of 32bit integers. */
454 if (d == 0)
455 /* If one probability is 0% and one 100%, avoid division by zero. */
456 combined_probability = REG_BR_PROB_BASE / 2;
457 else
458 combined_probability = (((double) combined_probability) * probability
459 * REG_BR_PROB_BASE / d + 0.5);
460 }
461
462 /* Decide which heuristic to use. In case we didn't match anything,
463 use no_prediction heuristic, in case we did match, use either
464 first match or Dempster-Shaffer theory depending on the flags. */
465
466 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
467 first_match = true;
468
469 if (!found)
470 dump_prediction (dump_file, PRED_NO_PREDICTION,
471 combined_probability, bb, true);
472 else
473 {
474 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
475 bb, !first_match);
476 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
477 bb, first_match);
478 }
479
480 if (first_match)
481 combined_probability = best_probability;
482 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
483
484 while (*pnote)
485 {
486 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
487 {
488 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
489 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
490
491 dump_prediction (dump_file, predictor, probability, bb,
492 !first_match || best_predictor == predictor);
493 *pnote = XEXP (*pnote, 1);
494 }
495 else
496 pnote = &XEXP (*pnote, 1);
497 }
498
499 if (!prob_note)
500 {
501 REG_NOTES (insn)
502 = gen_rtx_EXPR_LIST (REG_BR_PROB,
503 GEN_INT (combined_probability), REG_NOTES (insn));
504
505 /* Save the prediction into CFG in case we are seeing non-degenerated
506 conditional jump. */
507 if (!single_succ_p (bb))
508 {
509 BRANCH_EDGE (bb)->probability = combined_probability;
510 FALLTHRU_EDGE (bb)->probability
511 = REG_BR_PROB_BASE - combined_probability;
512 }
513 }
514 else if (!single_succ_p (bb))
515 {
516 int prob = INTVAL (XEXP (prob_note, 0));
517
518 BRANCH_EDGE (bb)->probability = prob;
519 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
520 }
521 else
522 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
523 }
524
525 /* Combine predictions into single probability and store them into CFG.
526 Remove now useless prediction entries. */
527
528 static void
529 combine_predictions_for_bb (basic_block bb)
530 {
531 int best_probability = PROB_EVEN;
532 int best_predictor = END_PREDICTORS;
533 int combined_probability = REG_BR_PROB_BASE / 2;
534 int d;
535 bool first_match = false;
536 bool found = false;
537 struct edge_prediction *pred;
538 int nedges = 0;
539 edge e, first = NULL, second = NULL;
540 edge_iterator ei;
541
542 FOR_EACH_EDGE (e, ei, bb->succs)
543 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
544 {
545 nedges ++;
546 if (first && !second)
547 second = e;
548 if (!first)
549 first = e;
550 }
551
552 /* When there is no successor or only one choice, prediction is easy.
553
554 We are lazy for now and predict only basic blocks with two outgoing
555 edges. It is possible to predict generic case too, but we have to
556 ignore first match heuristics and do more involved combining. Implement
557 this later. */
558 if (nedges != 2)
559 {
560 if (!bb->count)
561 set_even_probabilities (bb);
562 bb->predictions = NULL;
563 if (dump_file)
564 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
565 nedges, bb->index);
566 return;
567 }
568
569 if (dump_file)
570 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
571
572 /* We implement "first match" heuristics and use probability guessed
573 by predictor with smallest index. */
574 for (pred = bb->predictions; pred; pred = pred->ep_next)
575 {
576 int predictor = pred->ep_predictor;
577 int probability = pred->ep_probability;
578
579 if (pred->ep_edge != first)
580 probability = REG_BR_PROB_BASE - probability;
581
582 found = true;
583 if (best_predictor > predictor)
584 best_probability = probability, best_predictor = predictor;
585
586 d = (combined_probability * probability
587 + (REG_BR_PROB_BASE - combined_probability)
588 * (REG_BR_PROB_BASE - probability));
589
590 /* Use FP math to avoid overflows of 32bit integers. */
591 if (d == 0)
592 /* If one probability is 0% and one 100%, avoid division by zero. */
593 combined_probability = REG_BR_PROB_BASE / 2;
594 else
595 combined_probability = (((double) combined_probability) * probability
596 * REG_BR_PROB_BASE / d + 0.5);
597 }
598
599 /* Decide which heuristic to use. In case we didn't match anything,
600 use no_prediction heuristic, in case we did match, use either
601 first match or Dempster-Shaffer theory depending on the flags. */
602
603 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
604 first_match = true;
605
606 if (!found)
607 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
608 else
609 {
610 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
611 !first_match);
612 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
613 first_match);
614 }
615
616 if (first_match)
617 combined_probability = best_probability;
618 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
619
620 for (pred = bb->predictions; pred; pred = pred->ep_next)
621 {
622 int predictor = pred->ep_predictor;
623 int probability = pred->ep_probability;
624
625 if (pred->ep_edge != EDGE_SUCC (bb, 0))
626 probability = REG_BR_PROB_BASE - probability;
627 dump_prediction (dump_file, predictor, probability, bb,
628 !first_match || best_predictor == predictor);
629 }
630 bb->predictions = NULL;
631
632 if (!bb->count)
633 {
634 first->probability = combined_probability;
635 second->probability = REG_BR_PROB_BASE - combined_probability;
636 }
637 }
638
639 /* Predict edge probabilities by exploiting loop structure. */
640
641 static void
642 predict_loops (void)
643 {
644 loop_iterator li;
645 struct loop *loop;
646
647 scev_initialize ();
648
649 /* Try to predict out blocks in a loop that are not part of a
650 natural loop. */
651 FOR_EACH_LOOP (li, loop, 0)
652 {
653 basic_block bb, *bbs;
654 unsigned j, n_exits;
655 VEC (edge, heap) *exits;
656 struct tree_niter_desc niter_desc;
657 edge ex;
658
659 exits = get_loop_exit_edges (loop);
660 n_exits = VEC_length (edge, exits);
661
662 for (j = 0; VEC_iterate (edge, exits, j, ex); j++)
663 {
664 tree niter = NULL;
665 HOST_WIDE_INT nitercst;
666 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
667 int probability;
668 enum br_predictor predictor;
669
670 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
671 niter = niter_desc.niter;
672 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
673 niter = loop_niter_by_eval (loop, ex);
674
675 if (TREE_CODE (niter) == INTEGER_CST)
676 {
677 if (host_integerp (niter, 1)
678 && compare_tree_int (niter, max-1) == -1)
679 nitercst = tree_low_cst (niter, 1) + 1;
680 else
681 nitercst = max;
682 predictor = PRED_LOOP_ITERATIONS;
683 }
684 /* If we have just one exit and we can derive some information about
685 the number of iterations of the loop from the statements inside
686 the loop, use it to predict this exit. */
687 else if (n_exits == 1)
688 {
689 nitercst = estimated_loop_iterations_int (loop, false);
690 if (nitercst < 0)
691 continue;
692 if (nitercst > max)
693 nitercst = max;
694
695 predictor = PRED_LOOP_ITERATIONS_GUESSED;
696 }
697 else
698 continue;
699
700 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
701 predict_edge (ex, predictor, probability);
702 }
703 VEC_free (edge, heap, exits);
704
705 bbs = get_loop_body (loop);
706
707 for (j = 0; j < loop->num_nodes; j++)
708 {
709 int header_found = 0;
710 edge e;
711 edge_iterator ei;
712
713 bb = bbs[j];
714
715 /* Bypass loop heuristics on continue statement. These
716 statements construct loops via "non-loop" constructs
717 in the source language and are better to be handled
718 separately. */
719 if (predicted_by_p (bb, PRED_CONTINUE))
720 continue;
721
722 /* Loop branch heuristics - predict an edge back to a
723 loop's head as taken. */
724 if (bb == loop->latch)
725 {
726 e = find_edge (loop->latch, loop->header);
727 if (e)
728 {
729 header_found = 1;
730 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
731 }
732 }
733
734 /* Loop exit heuristics - predict an edge exiting the loop if the
735 conditional has no loop header successors as not taken. */
736 if (!header_found
737 /* If we already used more reliable loop exit predictors, do not
738 bother with PRED_LOOP_EXIT. */
739 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
740 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
741 {
742 /* For loop with many exits we don't want to predict all exits
743 with the pretty large probability, because if all exits are
744 considered in row, the loop would be predicted to iterate
745 almost never. The code to divide probability by number of
746 exits is very rough. It should compute the number of exits
747 taken in each patch through function (not the overall number
748 of exits that might be a lot higher for loops with wide switch
749 statements in them) and compute n-th square root.
750
751 We limit the minimal probability by 2% to avoid
752 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
753 as this was causing regression in perl benchmark containing such
754 a wide loop. */
755
756 int probability = ((REG_BR_PROB_BASE
757 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
758 / n_exits);
759 if (probability < HITRATE (2))
760 probability = HITRATE (2);
761 FOR_EACH_EDGE (e, ei, bb->succs)
762 if (e->dest->index < NUM_FIXED_BLOCKS
763 || !flow_bb_inside_loop_p (loop, e->dest))
764 predict_edge (e, PRED_LOOP_EXIT, probability);
765 }
766 }
767
768 /* Free basic blocks from get_loop_body. */
769 free (bbs);
770 }
771
772 scev_finalize ();
773 }
774
775 /* Attempt to predict probabilities of BB outgoing edges using local
776 properties. */
777 static void
778 bb_estimate_probability_locally (basic_block bb)
779 {
780 rtx last_insn = BB_END (bb);
781 rtx cond;
782
783 if (! can_predict_insn_p (last_insn))
784 return;
785 cond = get_condition (last_insn, NULL, false, false);
786 if (! cond)
787 return;
788
789 /* Try "pointer heuristic."
790 A comparison ptr == 0 is predicted as false.
791 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
792 if (COMPARISON_P (cond)
793 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
794 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
795 {
796 if (GET_CODE (cond) == EQ)
797 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
798 else if (GET_CODE (cond) == NE)
799 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
800 }
801 else
802
803 /* Try "opcode heuristic."
804 EQ tests are usually false and NE tests are usually true. Also,
805 most quantities are positive, so we can make the appropriate guesses
806 about signed comparisons against zero. */
807 switch (GET_CODE (cond))
808 {
809 case CONST_INT:
810 /* Unconditional branch. */
811 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
812 cond == const0_rtx ? NOT_TAKEN : TAKEN);
813 break;
814
815 case EQ:
816 case UNEQ:
817 /* Floating point comparisons appears to behave in a very
818 unpredictable way because of special role of = tests in
819 FP code. */
820 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
821 ;
822 /* Comparisons with 0 are often used for booleans and there is
823 nothing useful to predict about them. */
824 else if (XEXP (cond, 1) == const0_rtx
825 || XEXP (cond, 0) == const0_rtx)
826 ;
827 else
828 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
829 break;
830
831 case NE:
832 case LTGT:
833 /* Floating point comparisons appears to behave in a very
834 unpredictable way because of special role of = tests in
835 FP code. */
836 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
837 ;
838 /* Comparisons with 0 are often used for booleans and there is
839 nothing useful to predict about them. */
840 else if (XEXP (cond, 1) == const0_rtx
841 || XEXP (cond, 0) == const0_rtx)
842 ;
843 else
844 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
845 break;
846
847 case ORDERED:
848 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
849 break;
850
851 case UNORDERED:
852 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
853 break;
854
855 case LE:
856 case LT:
857 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
858 || XEXP (cond, 1) == constm1_rtx)
859 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
860 break;
861
862 case GE:
863 case GT:
864 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
865 || XEXP (cond, 1) == constm1_rtx)
866 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
867 break;
868
869 default:
870 break;
871 }
872 }
873
874 /* Set edge->probability for each successor edge of BB. */
875 void
876 guess_outgoing_edge_probabilities (basic_block bb)
877 {
878 bb_estimate_probability_locally (bb);
879 combine_predictions_for_insn (BB_END (bb), bb);
880 }
881 \f
882 /* Return constant EXPR will likely have at execution time, NULL if unknown.
883 The function is used by builtin_expect branch predictor so the evidence
884 must come from this construct and additional possible constant folding.
885
886 We may want to implement more involved value guess (such as value range
887 propagation based prediction), but such tricks shall go to new
888 implementation. */
889
890 static tree
891 expr_expected_value (tree expr, bitmap visited)
892 {
893 if (TREE_CONSTANT (expr))
894 return expr;
895 else if (TREE_CODE (expr) == SSA_NAME)
896 {
897 tree def = SSA_NAME_DEF_STMT (expr);
898
899 /* If we were already here, break the infinite cycle. */
900 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
901 return NULL;
902 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
903
904 if (TREE_CODE (def) == PHI_NODE)
905 {
906 /* All the arguments of the PHI node must have the same constant
907 length. */
908 int i;
909 tree val = NULL, new_val;
910
911 for (i = 0; i < PHI_NUM_ARGS (def); i++)
912 {
913 tree arg = PHI_ARG_DEF (def, i);
914
915 /* If this PHI has itself as an argument, we cannot
916 determine the string length of this argument. However,
917 if we can find an expected constant value for the other
918 PHI args then we can still be sure that this is
919 likely a constant. So be optimistic and just
920 continue with the next argument. */
921 if (arg == PHI_RESULT (def))
922 continue;
923
924 new_val = expr_expected_value (arg, visited);
925 if (!new_val)
926 return NULL;
927 if (!val)
928 val = new_val;
929 else if (!operand_equal_p (val, new_val, false))
930 return NULL;
931 }
932 return val;
933 }
934 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
935 || GIMPLE_STMT_OPERAND (def, 0) != expr)
936 return NULL;
937 return expr_expected_value (GIMPLE_STMT_OPERAND (def, 1), visited);
938 }
939 else if (TREE_CODE (expr) == CALL_EXPR)
940 {
941 tree decl = get_callee_fndecl (expr);
942 if (!decl)
943 return NULL;
944 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
945 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
946 {
947 tree val;
948
949 if (call_expr_nargs (expr) != 2)
950 return NULL;
951 val = CALL_EXPR_ARG (expr, 0);
952 if (TREE_CONSTANT (val))
953 return val;
954 return CALL_EXPR_ARG (expr, 1);
955 }
956 }
957 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
958 {
959 tree op0, op1, res;
960 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
961 if (!op0)
962 return NULL;
963 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
964 if (!op1)
965 return NULL;
966 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
967 if (TREE_CONSTANT (res))
968 return res;
969 return NULL;
970 }
971 if (UNARY_CLASS_P (expr))
972 {
973 tree op0, res;
974 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
975 if (!op0)
976 return NULL;
977 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
978 if (TREE_CONSTANT (res))
979 return res;
980 return NULL;
981 }
982 return NULL;
983 }
984 \f
985 /* Get rid of all builtin_expect calls we no longer need. */
986 static void
987 strip_builtin_expect (void)
988 {
989 basic_block bb;
990 FOR_EACH_BB (bb)
991 {
992 block_stmt_iterator bi;
993 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
994 {
995 tree stmt = bsi_stmt (bi);
996 tree fndecl;
997 tree call;
998
999 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1000 && (call = GIMPLE_STMT_OPERAND (stmt, 1))
1001 && TREE_CODE (call) == CALL_EXPR
1002 && (fndecl = get_callee_fndecl (call))
1003 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1004 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1005 && call_expr_nargs (call) == 2)
1006 {
1007 GIMPLE_STMT_OPERAND (stmt, 1) = CALL_EXPR_ARG (call, 0);
1008 update_stmt (stmt);
1009 }
1010 }
1011 }
1012 }
1013 \f
1014 /* Predict using opcode of the last statement in basic block. */
1015 static void
1016 tree_predict_by_opcode (basic_block bb)
1017 {
1018 tree stmt = last_stmt (bb);
1019 edge then_edge;
1020 tree cond;
1021 tree op0;
1022 tree type;
1023 tree val;
1024 bitmap visited;
1025 edge_iterator ei;
1026
1027 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1028 return;
1029 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1030 if (then_edge->flags & EDGE_TRUE_VALUE)
1031 break;
1032 cond = TREE_OPERAND (stmt, 0);
1033 if (!COMPARISON_CLASS_P (cond))
1034 return;
1035 op0 = TREE_OPERAND (cond, 0);
1036 type = TREE_TYPE (op0);
1037 visited = BITMAP_ALLOC (NULL);
1038 val = expr_expected_value (cond, visited);
1039 BITMAP_FREE (visited);
1040 if (val)
1041 {
1042 if (integer_zerop (val))
1043 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1044 else
1045 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1046 return;
1047 }
1048 /* Try "pointer heuristic."
1049 A comparison ptr == 0 is predicted as false.
1050 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1051 if (POINTER_TYPE_P (type))
1052 {
1053 if (TREE_CODE (cond) == EQ_EXPR)
1054 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1055 else if (TREE_CODE (cond) == NE_EXPR)
1056 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1057 }
1058 else
1059
1060 /* Try "opcode heuristic."
1061 EQ tests are usually false and NE tests are usually true. Also,
1062 most quantities are positive, so we can make the appropriate guesses
1063 about signed comparisons against zero. */
1064 switch (TREE_CODE (cond))
1065 {
1066 case EQ_EXPR:
1067 case UNEQ_EXPR:
1068 /* Floating point comparisons appears to behave in a very
1069 unpredictable way because of special role of = tests in
1070 FP code. */
1071 if (FLOAT_TYPE_P (type))
1072 ;
1073 /* Comparisons with 0 are often used for booleans and there is
1074 nothing useful to predict about them. */
1075 else if (integer_zerop (op0)
1076 || integer_zerop (TREE_OPERAND (cond, 1)))
1077 ;
1078 else
1079 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1080 break;
1081
1082 case NE_EXPR:
1083 case LTGT_EXPR:
1084 /* Floating point comparisons appears to behave in a very
1085 unpredictable way because of special role of = tests in
1086 FP code. */
1087 if (FLOAT_TYPE_P (type))
1088 ;
1089 /* Comparisons with 0 are often used for booleans and there is
1090 nothing useful to predict about them. */
1091 else if (integer_zerop (op0)
1092 || integer_zerop (TREE_OPERAND (cond, 1)))
1093 ;
1094 else
1095 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1096 break;
1097
1098 case ORDERED_EXPR:
1099 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1100 break;
1101
1102 case UNORDERED_EXPR:
1103 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1104 break;
1105
1106 case LE_EXPR:
1107 case LT_EXPR:
1108 if (integer_zerop (TREE_OPERAND (cond, 1))
1109 || integer_onep (TREE_OPERAND (cond, 1))
1110 || integer_all_onesp (TREE_OPERAND (cond, 1))
1111 || real_zerop (TREE_OPERAND (cond, 1))
1112 || real_onep (TREE_OPERAND (cond, 1))
1113 || real_minus_onep (TREE_OPERAND (cond, 1)))
1114 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1115 break;
1116
1117 case GE_EXPR:
1118 case GT_EXPR:
1119 if (integer_zerop (TREE_OPERAND (cond, 1))
1120 || integer_onep (TREE_OPERAND (cond, 1))
1121 || integer_all_onesp (TREE_OPERAND (cond, 1))
1122 || real_zerop (TREE_OPERAND (cond, 1))
1123 || real_onep (TREE_OPERAND (cond, 1))
1124 || real_minus_onep (TREE_OPERAND (cond, 1)))
1125 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1126 break;
1127
1128 default:
1129 break;
1130 }
1131 }
1132
1133 /* Try to guess whether the value of return means error code. */
1134 static enum br_predictor
1135 return_prediction (tree val, enum prediction *prediction)
1136 {
1137 /* VOID. */
1138 if (!val)
1139 return PRED_NO_PREDICTION;
1140 /* Different heuristics for pointers and scalars. */
1141 if (POINTER_TYPE_P (TREE_TYPE (val)))
1142 {
1143 /* NULL is usually not returned. */
1144 if (integer_zerop (val))
1145 {
1146 *prediction = NOT_TAKEN;
1147 return PRED_NULL_RETURN;
1148 }
1149 }
1150 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1151 {
1152 /* Negative return values are often used to indicate
1153 errors. */
1154 if (TREE_CODE (val) == INTEGER_CST
1155 && tree_int_cst_sgn (val) < 0)
1156 {
1157 *prediction = NOT_TAKEN;
1158 return PRED_NEGATIVE_RETURN;
1159 }
1160 /* Constant return values seems to be commonly taken.
1161 Zero/one often represent booleans so exclude them from the
1162 heuristics. */
1163 if (TREE_CONSTANT (val)
1164 && (!integer_zerop (val) && !integer_onep (val)))
1165 {
1166 *prediction = TAKEN;
1167 return PRED_CONST_RETURN;
1168 }
1169 }
1170 return PRED_NO_PREDICTION;
1171 }
1172
1173 /* Find the basic block with return expression and look up for possible
1174 return value trying to apply RETURN_PREDICTION heuristics. */
1175 static void
1176 apply_return_prediction (int *heads)
1177 {
1178 tree return_stmt = NULL;
1179 tree return_val;
1180 edge e;
1181 tree phi;
1182 int phi_num_args, i;
1183 enum br_predictor pred;
1184 enum prediction direction;
1185 edge_iterator ei;
1186
1187 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1188 {
1189 return_stmt = last_stmt (e->src);
1190 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1191 break;
1192 }
1193 if (!e)
1194 return;
1195 return_val = TREE_OPERAND (return_stmt, 0);
1196 if (!return_val)
1197 return;
1198 if (TREE_CODE (return_val) == GIMPLE_MODIFY_STMT)
1199 return_val = GIMPLE_STMT_OPERAND (return_val, 1);
1200 if (TREE_CODE (return_val) != SSA_NAME
1201 || !SSA_NAME_DEF_STMT (return_val)
1202 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1203 return;
1204 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1205 if (PHI_RESULT (phi) == return_val)
1206 break;
1207 if (!phi)
1208 return;
1209 phi_num_args = PHI_NUM_ARGS (phi);
1210 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1211
1212 /* Avoid the degenerate case where all return values form the function
1213 belongs to same category (ie they are all positive constants)
1214 so we can hardly say something about them. */
1215 for (i = 1; i < phi_num_args; i++)
1216 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1217 break;
1218 if (i != phi_num_args)
1219 for (i = 0; i < phi_num_args; i++)
1220 {
1221 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1222 if (pred != PRED_NO_PREDICTION)
1223 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1224 direction);
1225 }
1226 }
1227
1228 /* Look for basic block that contains unlikely to happen events
1229 (such as noreturn calls) and mark all paths leading to execution
1230 of this basic blocks as unlikely. */
1231
1232 static void
1233 tree_bb_level_predictions (void)
1234 {
1235 basic_block bb;
1236 int *heads;
1237
1238 heads = XCNEWVEC (int, last_basic_block);
1239 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1240
1241 apply_return_prediction (heads);
1242
1243 FOR_EACH_BB (bb)
1244 {
1245 block_stmt_iterator bsi = bsi_last (bb);
1246
1247 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1248 {
1249 tree stmt = bsi_stmt (bsi);
1250 tree decl;
1251 switch (TREE_CODE (stmt))
1252 {
1253 case GIMPLE_MODIFY_STMT:
1254 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
1255 {
1256 stmt = GIMPLE_STMT_OPERAND (stmt, 1);
1257 goto call_expr;
1258 }
1259 break;
1260 case CALL_EXPR:
1261 call_expr:;
1262 if (call_expr_flags (stmt) & ECF_NORETURN)
1263 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1264 NOT_TAKEN);
1265 decl = get_callee_fndecl (stmt);
1266 if (decl
1267 && lookup_attribute ("cold",
1268 DECL_ATTRIBUTES (decl)))
1269 predict_paths_leading_to (bb, heads, PRED_COLD_FUNCTION,
1270 NOT_TAKEN);
1271 break;
1272 default:
1273 break;
1274 }
1275 }
1276 }
1277
1278 free (heads);
1279 }
1280
1281 /* Predict branch probabilities and estimate profile of the tree CFG. */
1282 static unsigned int
1283 tree_estimate_probability (void)
1284 {
1285 basic_block bb;
1286
1287 loop_optimizer_init (0);
1288 if (current_loops && dump_file && (dump_flags & TDF_DETAILS))
1289 flow_loops_dump (dump_file, NULL, 0);
1290
1291 add_noreturn_fake_exit_edges ();
1292 connect_infinite_loops_to_exit ();
1293 calculate_dominance_info (CDI_DOMINATORS);
1294 calculate_dominance_info (CDI_POST_DOMINATORS);
1295
1296 tree_bb_level_predictions ();
1297
1298 mark_irreducible_loops ();
1299 record_loop_exits ();
1300 if (current_loops)
1301 predict_loops ();
1302
1303 FOR_EACH_BB (bb)
1304 {
1305 edge e;
1306 edge_iterator ei;
1307
1308 FOR_EACH_EDGE (e, ei, bb->succs)
1309 {
1310 /* Predict early returns to be probable, as we've already taken
1311 care for error returns and other cases are often used for
1312 fast paths through function.
1313
1314 Since we've already removed the return statments, we are
1315 looking for CFG like:
1316
1317 if (conditoinal)
1318 {
1319 ..
1320 goto return_block
1321 }
1322 some other blocks
1323 return_block:
1324 return_stmt. */
1325 if (e->dest != bb->next_bb
1326 && e->dest != EXIT_BLOCK_PTR
1327 && single_succ_p (e->dest)
1328 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
1329 && TREE_CODE (last_stmt (e->dest)) == RETURN_EXPR)
1330 {
1331 edge e1;
1332 edge_iterator ei1;
1333
1334 if (single_succ_p (bb))
1335 {
1336 FOR_EACH_EDGE (e1, ei1, bb->preds)
1337 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1338 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1339 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
1340 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1341 }
1342 else
1343 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
1344 && !predicted_by_p (e->src, PRED_CONST_RETURN)
1345 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
1346 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1347 }
1348
1349 /* Look for block we are guarding (ie we dominate it,
1350 but it doesn't postdominate us). */
1351 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1352 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1353 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1354 {
1355 block_stmt_iterator bi;
1356
1357 /* The call heuristic claims that a guarded function call
1358 is improbable. This is because such calls are often used
1359 to signal exceptional situations such as printing error
1360 messages. */
1361 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1362 bsi_next (&bi))
1363 {
1364 tree stmt = bsi_stmt (bi);
1365 if ((TREE_CODE (stmt) == CALL_EXPR
1366 || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1367 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1))
1368 == CALL_EXPR))
1369 /* Constant and pure calls are hardly used to signalize
1370 something exceptional. */
1371 && TREE_SIDE_EFFECTS (stmt))
1372 {
1373 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1374 break;
1375 }
1376 }
1377 }
1378 }
1379 tree_predict_by_opcode (bb);
1380 }
1381 FOR_EACH_BB (bb)
1382 combine_predictions_for_bb (bb);
1383
1384 strip_builtin_expect ();
1385 estimate_bb_frequencies ();
1386 free_dominance_info (CDI_POST_DOMINATORS);
1387 remove_fake_exit_edges ();
1388 loop_optimizer_finalize ();
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1390 dump_tree_cfg (dump_file, dump_flags);
1391 if (profile_status == PROFILE_ABSENT)
1392 profile_status = PROFILE_GUESSED;
1393 return 0;
1394 }
1395 \f
1396 /* Sets branch probabilities according to PREDiction and
1397 FLAGS. HEADS[bb->index] should be index of basic block in that we
1398 need to alter branch predictions (i.e. the first of our dominators
1399 such that we do not post-dominate it) (but we fill this information
1400 on demand, so -1 may be there in case this was not needed yet). */
1401
1402 static void
1403 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1404 enum prediction taken)
1405 {
1406 edge e;
1407 edge_iterator ei;
1408 int y;
1409
1410 if (heads[bb->index] == ENTRY_BLOCK)
1411 {
1412 /* This is first time we need this field in heads array; so
1413 find first dominator that we do not post-dominate (we are
1414 using already known members of heads array). */
1415 basic_block ai = bb;
1416 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1417 int head;
1418
1419 while (heads[next_ai->index] == ENTRY_BLOCK)
1420 {
1421 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1422 break;
1423 heads[next_ai->index] = ai->index;
1424 ai = next_ai;
1425 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1426 }
1427 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1428 head = next_ai->index;
1429 else
1430 head = heads[next_ai->index];
1431 while (next_ai != bb)
1432 {
1433 next_ai = ai;
1434 ai = BASIC_BLOCK (heads[ai->index]);
1435 heads[next_ai->index] = head;
1436 }
1437 }
1438 y = heads[bb->index];
1439
1440 /* Now find the edge that leads to our branch and aply the prediction. */
1441
1442 if (y == last_basic_block)
1443 return;
1444 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1445 if (e->dest->index >= NUM_FIXED_BLOCKS
1446 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1447 predict_edge_def (e, pred, taken);
1448 }
1449 \f
1450 /* This is used to carry information about basic blocks. It is
1451 attached to the AUX field of the standard CFG block. */
1452
1453 typedef struct block_info_def
1454 {
1455 /* Estimated frequency of execution of basic_block. */
1456 sreal frequency;
1457
1458 /* To keep queue of basic blocks to process. */
1459 basic_block next;
1460
1461 /* Number of predecessors we need to visit first. */
1462 int npredecessors;
1463 } *block_info;
1464
1465 /* Similar information for edges. */
1466 typedef struct edge_info_def
1467 {
1468 /* In case edge is a loopback edge, the probability edge will be reached
1469 in case header is. Estimated number of iterations of the loop can be
1470 then computed as 1 / (1 - back_edge_prob). */
1471 sreal back_edge_prob;
1472 /* True if the edge is a loopback edge in the natural loop. */
1473 unsigned int back_edge:1;
1474 } *edge_info;
1475
1476 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1477 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1478
1479 /* Helper function for estimate_bb_frequencies.
1480 Propagate the frequencies in blocks marked in
1481 TOVISIT, starting in HEAD. */
1482
1483 static void
1484 propagate_freq (basic_block head, bitmap tovisit)
1485 {
1486 basic_block bb;
1487 basic_block last;
1488 unsigned i;
1489 edge e;
1490 basic_block nextbb;
1491 bitmap_iterator bi;
1492
1493 /* For each basic block we need to visit count number of his predecessors
1494 we need to visit first. */
1495 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1496 {
1497 edge_iterator ei;
1498 int count = 0;
1499
1500 /* The outermost "loop" includes the exit block, which we can not
1501 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1502 directly. Do the same for the entry block. */
1503 bb = BASIC_BLOCK (i);
1504
1505 FOR_EACH_EDGE (e, ei, bb->preds)
1506 {
1507 bool visit = bitmap_bit_p (tovisit, e->src->index);
1508
1509 if (visit && !(e->flags & EDGE_DFS_BACK))
1510 count++;
1511 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1512 fprintf (dump_file,
1513 "Irreducible region hit, ignoring edge to %i->%i\n",
1514 e->src->index, bb->index);
1515 }
1516 BLOCK_INFO (bb)->npredecessors = count;
1517 }
1518
1519 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1520 last = head;
1521 for (bb = head; bb; bb = nextbb)
1522 {
1523 edge_iterator ei;
1524 sreal cyclic_probability, frequency;
1525
1526 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1527 memcpy (&frequency, &real_zero, sizeof (real_zero));
1528
1529 nextbb = BLOCK_INFO (bb)->next;
1530 BLOCK_INFO (bb)->next = NULL;
1531
1532 /* Compute frequency of basic block. */
1533 if (bb != head)
1534 {
1535 #ifdef ENABLE_CHECKING
1536 FOR_EACH_EDGE (e, ei, bb->preds)
1537 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1538 || (e->flags & EDGE_DFS_BACK));
1539 #endif
1540
1541 FOR_EACH_EDGE (e, ei, bb->preds)
1542 if (EDGE_INFO (e)->back_edge)
1543 {
1544 sreal_add (&cyclic_probability, &cyclic_probability,
1545 &EDGE_INFO (e)->back_edge_prob);
1546 }
1547 else if (!(e->flags & EDGE_DFS_BACK))
1548 {
1549 sreal tmp;
1550
1551 /* frequency += (e->probability
1552 * BLOCK_INFO (e->src)->frequency /
1553 REG_BR_PROB_BASE); */
1554
1555 sreal_init (&tmp, e->probability, 0);
1556 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1557 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1558 sreal_add (&frequency, &frequency, &tmp);
1559 }
1560
1561 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1562 {
1563 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1564 sizeof (frequency));
1565 }
1566 else
1567 {
1568 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1569 {
1570 memcpy (&cyclic_probability, &real_almost_one,
1571 sizeof (real_almost_one));
1572 }
1573
1574 /* BLOCK_INFO (bb)->frequency = frequency
1575 / (1 - cyclic_probability) */
1576
1577 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1578 sreal_div (&BLOCK_INFO (bb)->frequency,
1579 &frequency, &cyclic_probability);
1580 }
1581 }
1582
1583 bitmap_clear_bit (tovisit, bb->index);
1584
1585 e = find_edge (bb, head);
1586 if (e)
1587 {
1588 sreal tmp;
1589
1590 /* EDGE_INFO (e)->back_edge_prob
1591 = ((e->probability * BLOCK_INFO (bb)->frequency)
1592 / REG_BR_PROB_BASE); */
1593
1594 sreal_init (&tmp, e->probability, 0);
1595 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1596 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1597 &tmp, &real_inv_br_prob_base);
1598 }
1599
1600 /* Propagate to successor blocks. */
1601 FOR_EACH_EDGE (e, ei, bb->succs)
1602 if (!(e->flags & EDGE_DFS_BACK)
1603 && BLOCK_INFO (e->dest)->npredecessors)
1604 {
1605 BLOCK_INFO (e->dest)->npredecessors--;
1606 if (!BLOCK_INFO (e->dest)->npredecessors)
1607 {
1608 if (!nextbb)
1609 nextbb = e->dest;
1610 else
1611 BLOCK_INFO (last)->next = e->dest;
1612
1613 last = e->dest;
1614 }
1615 }
1616 }
1617 }
1618
1619 /* Estimate probabilities of loopback edges in loops at same nest level. */
1620
1621 static void
1622 estimate_loops_at_level (struct loop *first_loop)
1623 {
1624 struct loop *loop;
1625
1626 for (loop = first_loop; loop; loop = loop->next)
1627 {
1628 edge e;
1629 basic_block *bbs;
1630 unsigned i;
1631 bitmap tovisit = BITMAP_ALLOC (NULL);
1632
1633 estimate_loops_at_level (loop->inner);
1634
1635 /* Find current loop back edge and mark it. */
1636 e = loop_latch_edge (loop);
1637 EDGE_INFO (e)->back_edge = 1;
1638
1639 bbs = get_loop_body (loop);
1640 for (i = 0; i < loop->num_nodes; i++)
1641 bitmap_set_bit (tovisit, bbs[i]->index);
1642 free (bbs);
1643 propagate_freq (loop->header, tovisit);
1644 BITMAP_FREE (tovisit);
1645 }
1646 }
1647
1648 /* Propagates frequencies through structure of loops. */
1649
1650 static void
1651 estimate_loops (void)
1652 {
1653 bitmap tovisit = BITMAP_ALLOC (NULL);
1654 basic_block bb;
1655
1656 /* Start by estimating the frequencies in the loops. */
1657 if (current_loops)
1658 estimate_loops_at_level (current_loops->tree_root->inner);
1659
1660 /* Now propagate the frequencies through all the blocks. */
1661 FOR_ALL_BB (bb)
1662 {
1663 bitmap_set_bit (tovisit, bb->index);
1664 }
1665 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
1666 BITMAP_FREE (tovisit);
1667 }
1668
1669 /* Convert counts measured by profile driven feedback to frequencies.
1670 Return nonzero iff there was any nonzero execution count. */
1671
1672 int
1673 counts_to_freqs (void)
1674 {
1675 gcov_type count_max, true_count_max = 0;
1676 basic_block bb;
1677
1678 FOR_EACH_BB (bb)
1679 true_count_max = MAX (bb->count, true_count_max);
1680
1681 count_max = MAX (true_count_max, 1);
1682 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1683 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1684
1685 return true_count_max;
1686 }
1687
1688 /* Return true if function is likely to be expensive, so there is no point to
1689 optimize performance of prologue, epilogue or do inlining at the expense
1690 of code size growth. THRESHOLD is the limit of number of instructions
1691 function can execute at average to be still considered not expensive. */
1692
1693 bool
1694 expensive_function_p (int threshold)
1695 {
1696 unsigned int sum = 0;
1697 basic_block bb;
1698 unsigned int limit;
1699
1700 /* We can not compute accurately for large thresholds due to scaled
1701 frequencies. */
1702 gcc_assert (threshold <= BB_FREQ_MAX);
1703
1704 /* Frequencies are out of range. This either means that function contains
1705 internal loop executing more than BB_FREQ_MAX times or profile feedback
1706 is available and function has not been executed at all. */
1707 if (ENTRY_BLOCK_PTR->frequency == 0)
1708 return true;
1709
1710 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1711 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1712 FOR_EACH_BB (bb)
1713 {
1714 rtx insn;
1715
1716 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1717 insn = NEXT_INSN (insn))
1718 if (active_insn_p (insn))
1719 {
1720 sum += bb->frequency;
1721 if (sum > limit)
1722 return true;
1723 }
1724 }
1725
1726 return false;
1727 }
1728
1729 /* Estimate basic blocks frequency by given branch probabilities. */
1730
1731 void
1732 estimate_bb_frequencies (void)
1733 {
1734 basic_block bb;
1735 sreal freq_max;
1736
1737 if (!flag_branch_probabilities || !counts_to_freqs ())
1738 {
1739 static int real_values_initialized = 0;
1740
1741 if (!real_values_initialized)
1742 {
1743 real_values_initialized = 1;
1744 sreal_init (&real_zero, 0, 0);
1745 sreal_init (&real_one, 1, 0);
1746 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1747 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1748 sreal_init (&real_one_half, 1, -1);
1749 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1750 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1751 }
1752
1753 mark_dfs_back_edges ();
1754
1755 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1756
1757 /* Set up block info for each basic block. */
1758 alloc_aux_for_blocks (sizeof (struct block_info_def));
1759 alloc_aux_for_edges (sizeof (struct edge_info_def));
1760 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1761 {
1762 edge e;
1763 edge_iterator ei;
1764
1765 FOR_EACH_EDGE (e, ei, bb->succs)
1766 {
1767 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1768 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1769 &EDGE_INFO (e)->back_edge_prob,
1770 &real_inv_br_prob_base);
1771 }
1772 }
1773
1774 /* First compute probabilities locally for each loop from innermost
1775 to outermost to examine probabilities for back edges. */
1776 estimate_loops ();
1777
1778 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1779 FOR_EACH_BB (bb)
1780 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1781 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1782
1783 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1784 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1785 {
1786 sreal tmp;
1787
1788 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1789 sreal_add (&tmp, &tmp, &real_one_half);
1790 bb->frequency = sreal_to_int (&tmp);
1791 }
1792
1793 free_aux_for_blocks ();
1794 free_aux_for_edges ();
1795 }
1796 compute_function_frequency ();
1797 if (flag_reorder_functions)
1798 choose_function_section ();
1799 }
1800
1801 /* Decide whether function is hot, cold or unlikely executed. */
1802 static void
1803 compute_function_frequency (void)
1804 {
1805 basic_block bb;
1806
1807 if (!profile_info || !flag_branch_probabilities)
1808 {
1809 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
1810 != NULL)
1811 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1812 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
1813 != NULL)
1814 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1815 return;
1816 }
1817 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1818 FOR_EACH_BB (bb)
1819 {
1820 if (maybe_hot_bb_p (bb))
1821 {
1822 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1823 return;
1824 }
1825 if (!probably_never_executed_bb_p (bb))
1826 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1827 }
1828 }
1829
1830 /* Choose appropriate section for the function. */
1831 static void
1832 choose_function_section (void)
1833 {
1834 if (DECL_SECTION_NAME (current_function_decl)
1835 || !targetm.have_named_sections
1836 /* Theoretically we can split the gnu.linkonce text section too,
1837 but this requires more work as the frequency needs to match
1838 for all generated objects so we need to merge the frequency
1839 of all instances. For now just never set frequency for these. */
1840 || DECL_ONE_ONLY (current_function_decl))
1841 return;
1842
1843 /* If we are doing the partitioning optimization, let the optimization
1844 choose the correct section into which to put things. */
1845
1846 if (flag_reorder_blocks_and_partition)
1847 return;
1848
1849 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1850 DECL_SECTION_NAME (current_function_decl) =
1851 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1852 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1853 DECL_SECTION_NAME (current_function_decl) =
1854 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1855 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1856 }
1857
1858 static bool
1859 gate_estimate_probability (void)
1860 {
1861 return flag_guess_branch_prob;
1862 }
1863
1864 struct tree_opt_pass pass_profile =
1865 {
1866 "profile", /* name */
1867 gate_estimate_probability, /* gate */
1868 tree_estimate_probability, /* execute */
1869 NULL, /* sub */
1870 NULL, /* next */
1871 0, /* static_pass_number */
1872 TV_BRANCH_PROB, /* tv_id */
1873 PROP_cfg, /* properties_required */
1874 0, /* properties_provided */
1875 0, /* properties_destroyed */
1876 0, /* todo_flags_start */
1877 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1878 0 /* letter */
1879 };