predict.c (last_basic_block_p): Remove.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (rtx);
81
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
84
85 struct predictor_info
86 {
87 const char *const name; /* Name used in the debugging dumps. */
88 const int hitrate; /* Expected hitrate used by
89 predict_insn_def call. */
90 const int flags;
91 };
92
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
96
97 /* Recompute hitrate in percent to our representation. */
98
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
100
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info[]= {
103 #include "predict.def"
104
105 /* Upper bound on predictors. */
106 {NULL, 0, 0}
107 };
108 #undef DEF_PREDICTOR
109
110 /* Return true in case BB can be CPU intensive and should be optimized
111 for maximal performance. */
112
113 bool
114 maybe_hot_bb_p (basic_block bb)
115 {
116 if (profile_info && flag_branch_probabilities
117 && (bb->count
118 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
119 return false;
120 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
121 return false;
122 return true;
123 }
124
125 /* Return true in case BB is cold and should be optimized for size. */
126
127 bool
128 probably_cold_bb_p (basic_block bb)
129 {
130 if (profile_info && flag_branch_probabilities
131 && (bb->count
132 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
133 return true;
134 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
135 return true;
136 return false;
137 }
138
139 /* Return true in case BB is probably never executed. */
140 bool
141 probably_never_executed_bb_p (basic_block bb)
142 {
143 if (profile_info && flag_branch_probabilities)
144 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
145 return false;
146 }
147
148 /* Return true if the one of outgoing edges is already predicted by
149 PREDICTOR. */
150
151 bool
152 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
153 {
154 rtx note;
155 if (!INSN_P (BB_END (bb)))
156 return false;
157 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
158 if (REG_NOTE_KIND (note) == REG_BR_PRED
159 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
160 return true;
161 return false;
162 }
163
164 /* Return true if the one of outgoing edges is already predicted by
165 PREDICTOR. */
166
167 bool
168 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
169 {
170 struct edge_prediction *i;
171 for (i = bb->predictions; i; i = i->ep_next)
172 if (i->ep_predictor == predictor)
173 return true;
174 return false;
175 }
176
177 /* Return true when the probability of edge is reliable.
178
179 The profile guessing code is good at predicting branch outcome (ie.
180 taken/not taken), that is predicted right slightly over 75% of time.
181 It is however notoriously poor on predicting the probability itself.
182 In general the profile appear a lot flatter (with probabilities closer
183 to 50%) than the reality so it is bad idea to use it to drive optimization
184 such as those disabling dynamic branch prediction for well predictable
185 branches.
186
187 There are two exceptions - edges leading to noreturn edges and edges
188 predicted by number of iterations heuristics are predicted well. This macro
189 should be able to distinguish those, but at the moment it simply check for
190 noreturn heuristic that is only one giving probability over 99% or bellow
191 1%. In future we might want to propagate reliability information across the
192 CFG if we find this information useful on multiple places. */
193 static bool
194 probability_reliable_p (int prob)
195 {
196 return (profile_status == PROFILE_READ
197 || (profile_status == PROFILE_GUESSED
198 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
199 }
200
201 /* Same predicate as above, working on edges. */
202 bool
203 edge_probability_reliable_p (edge e)
204 {
205 return probability_reliable_p (e->probability);
206 }
207
208 /* Same predicate as edge_probability_reliable_p, working on notes. */
209 bool
210 br_prob_note_reliable_p (rtx note)
211 {
212 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
213 return probability_reliable_p (INTVAL (XEXP (note, 0)));
214 }
215
216 static void
217 predict_insn (rtx insn, enum br_predictor predictor, int probability)
218 {
219 gcc_assert (any_condjump_p (insn));
220 if (!flag_guess_branch_prob)
221 return;
222
223 REG_NOTES (insn)
224 = gen_rtx_EXPR_LIST (REG_BR_PRED,
225 gen_rtx_CONCAT (VOIDmode,
226 GEN_INT ((int) predictor),
227 GEN_INT ((int) probability)),
228 REG_NOTES (insn));
229 }
230
231 /* Predict insn by given predictor. */
232
233 void
234 predict_insn_def (rtx insn, enum br_predictor predictor,
235 enum prediction taken)
236 {
237 int probability = predictor_info[(int) predictor].hitrate;
238
239 if (taken != TAKEN)
240 probability = REG_BR_PROB_BASE - probability;
241
242 predict_insn (insn, predictor, probability);
243 }
244
245 /* Predict edge E with given probability if possible. */
246
247 void
248 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
249 {
250 rtx last_insn;
251 last_insn = BB_END (e->src);
252
253 /* We can store the branch prediction information only about
254 conditional jumps. */
255 if (!any_condjump_p (last_insn))
256 return;
257
258 /* We always store probability of branching. */
259 if (e->flags & EDGE_FALLTHRU)
260 probability = REG_BR_PROB_BASE - probability;
261
262 predict_insn (last_insn, predictor, probability);
263 }
264
265 /* Predict edge E with the given PROBABILITY. */
266 void
267 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
268 {
269 gcc_assert (profile_status != PROFILE_GUESSED);
270 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
271 && flag_guess_branch_prob && optimize)
272 {
273 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
274
275 i->ep_next = e->src->predictions;
276 e->src->predictions = i;
277 i->ep_probability = probability;
278 i->ep_predictor = predictor;
279 i->ep_edge = e;
280 }
281 }
282
283 /* Remove all predictions on given basic block that are attached
284 to edge E. */
285 void
286 remove_predictions_associated_with_edge (edge e)
287 {
288 if (e->src->predictions)
289 {
290 struct edge_prediction **prediction = &e->src->predictions;
291 while (*prediction)
292 {
293 if ((*prediction)->ep_edge == e)
294 *prediction = (*prediction)->ep_next;
295 else
296 prediction = &((*prediction)->ep_next);
297 }
298 }
299 }
300
301 /* Return true when we can store prediction on insn INSN.
302 At the moment we represent predictions only on conditional
303 jumps, not at computed jump or other complicated cases. */
304 static bool
305 can_predict_insn_p (rtx insn)
306 {
307 return (JUMP_P (insn)
308 && any_condjump_p (insn)
309 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
310 }
311
312 /* Predict edge E by given predictor if possible. */
313
314 void
315 predict_edge_def (edge e, enum br_predictor predictor,
316 enum prediction taken)
317 {
318 int probability = predictor_info[(int) predictor].hitrate;
319
320 if (taken != TAKEN)
321 probability = REG_BR_PROB_BASE - probability;
322
323 predict_edge (e, predictor, probability);
324 }
325
326 /* Invert all branch predictions or probability notes in the INSN. This needs
327 to be done each time we invert the condition used by the jump. */
328
329 void
330 invert_br_probabilities (rtx insn)
331 {
332 rtx note;
333
334 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
335 if (REG_NOTE_KIND (note) == REG_BR_PROB)
336 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
337 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
338 XEXP (XEXP (note, 0), 1)
339 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
340 }
341
342 /* Dump information about the branch prediction to the output file. */
343
344 static void
345 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
346 basic_block bb, int used)
347 {
348 edge e;
349 edge_iterator ei;
350
351 if (!file)
352 return;
353
354 FOR_EACH_EDGE (e, ei, bb->succs)
355 if (! (e->flags & EDGE_FALLTHRU))
356 break;
357
358 fprintf (file, " %s heuristics%s: %.1f%%",
359 predictor_info[predictor].name,
360 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
361
362 if (bb->count)
363 {
364 fprintf (file, " exec ");
365 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
366 if (e)
367 {
368 fprintf (file, " hit ");
369 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
370 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
371 }
372 }
373
374 fprintf (file, "\n");
375 }
376
377 /* We can not predict the probabilities of outgoing edges of bb. Set them
378 evenly and hope for the best. */
379 static void
380 set_even_probabilities (basic_block bb)
381 {
382 int nedges = 0;
383 edge e;
384 edge_iterator ei;
385
386 FOR_EACH_EDGE (e, ei, bb->succs)
387 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
388 nedges ++;
389 FOR_EACH_EDGE (e, ei, bb->succs)
390 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
391 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
392 else
393 e->probability = 0;
394 }
395
396 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
397 note if not already present. Remove now useless REG_BR_PRED notes. */
398
399 static void
400 combine_predictions_for_insn (rtx insn, basic_block bb)
401 {
402 rtx prob_note;
403 rtx *pnote;
404 rtx note;
405 int best_probability = PROB_EVEN;
406 int best_predictor = END_PREDICTORS;
407 int combined_probability = REG_BR_PROB_BASE / 2;
408 int d;
409 bool first_match = false;
410 bool found = false;
411
412 if (!can_predict_insn_p (insn))
413 {
414 set_even_probabilities (bb);
415 return;
416 }
417
418 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
419 pnote = &REG_NOTES (insn);
420 if (dump_file)
421 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
422 bb->index);
423
424 /* We implement "first match" heuristics and use probability guessed
425 by predictor with smallest index. */
426 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
427 if (REG_NOTE_KIND (note) == REG_BR_PRED)
428 {
429 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
430 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
431
432 found = true;
433 if (best_predictor > predictor)
434 best_probability = probability, best_predictor = predictor;
435
436 d = (combined_probability * probability
437 + (REG_BR_PROB_BASE - combined_probability)
438 * (REG_BR_PROB_BASE - probability));
439
440 /* Use FP math to avoid overflows of 32bit integers. */
441 if (d == 0)
442 /* If one probability is 0% and one 100%, avoid division by zero. */
443 combined_probability = REG_BR_PROB_BASE / 2;
444 else
445 combined_probability = (((double) combined_probability) * probability
446 * REG_BR_PROB_BASE / d + 0.5);
447 }
448
449 /* Decide which heuristic to use. In case we didn't match anything,
450 use no_prediction heuristic, in case we did match, use either
451 first match or Dempster-Shaffer theory depending on the flags. */
452
453 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
454 first_match = true;
455
456 if (!found)
457 dump_prediction (dump_file, PRED_NO_PREDICTION,
458 combined_probability, bb, true);
459 else
460 {
461 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
462 bb, !first_match);
463 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
464 bb, first_match);
465 }
466
467 if (first_match)
468 combined_probability = best_probability;
469 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
470
471 while (*pnote)
472 {
473 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
474 {
475 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
476 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
477
478 dump_prediction (dump_file, predictor, probability, bb,
479 !first_match || best_predictor == predictor);
480 *pnote = XEXP (*pnote, 1);
481 }
482 else
483 pnote = &XEXP (*pnote, 1);
484 }
485
486 if (!prob_note)
487 {
488 REG_NOTES (insn)
489 = gen_rtx_EXPR_LIST (REG_BR_PROB,
490 GEN_INT (combined_probability), REG_NOTES (insn));
491
492 /* Save the prediction into CFG in case we are seeing non-degenerated
493 conditional jump. */
494 if (!single_succ_p (bb))
495 {
496 BRANCH_EDGE (bb)->probability = combined_probability;
497 FALLTHRU_EDGE (bb)->probability
498 = REG_BR_PROB_BASE - combined_probability;
499 }
500 }
501 else if (!single_succ_p (bb))
502 {
503 int prob = INTVAL (XEXP (prob_note, 0));
504
505 BRANCH_EDGE (bb)->probability = prob;
506 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
507 }
508 else
509 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
510 }
511
512 /* Combine predictions into single probability and store them into CFG.
513 Remove now useless prediction entries. */
514
515 static void
516 combine_predictions_for_bb (basic_block bb)
517 {
518 int best_probability = PROB_EVEN;
519 int best_predictor = END_PREDICTORS;
520 int combined_probability = REG_BR_PROB_BASE / 2;
521 int d;
522 bool first_match = false;
523 bool found = false;
524 struct edge_prediction *pred;
525 int nedges = 0;
526 edge e, first = NULL, second = NULL;
527 edge_iterator ei;
528
529 FOR_EACH_EDGE (e, ei, bb->succs)
530 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
531 {
532 nedges ++;
533 if (first && !second)
534 second = e;
535 if (!first)
536 first = e;
537 }
538
539 /* When there is no successor or only one choice, prediction is easy.
540
541 We are lazy for now and predict only basic blocks with two outgoing
542 edges. It is possible to predict generic case too, but we have to
543 ignore first match heuristics and do more involved combining. Implement
544 this later. */
545 if (nedges != 2)
546 {
547 if (!bb->count)
548 set_even_probabilities (bb);
549 bb->predictions = NULL;
550 if (dump_file)
551 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
552 nedges, bb->index);
553 return;
554 }
555
556 if (dump_file)
557 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
558
559 /* We implement "first match" heuristics and use probability guessed
560 by predictor with smallest index. */
561 for (pred = bb->predictions; pred; pred = pred->ep_next)
562 {
563 int predictor = pred->ep_predictor;
564 int probability = pred->ep_probability;
565
566 if (pred->ep_edge != first)
567 probability = REG_BR_PROB_BASE - probability;
568
569 found = true;
570 if (best_predictor > predictor)
571 best_probability = probability, best_predictor = predictor;
572
573 d = (combined_probability * probability
574 + (REG_BR_PROB_BASE - combined_probability)
575 * (REG_BR_PROB_BASE - probability));
576
577 /* Use FP math to avoid overflows of 32bit integers. */
578 if (d == 0)
579 /* If one probability is 0% and one 100%, avoid division by zero. */
580 combined_probability = REG_BR_PROB_BASE / 2;
581 else
582 combined_probability = (((double) combined_probability) * probability
583 * REG_BR_PROB_BASE / d + 0.5);
584 }
585
586 /* Decide which heuristic to use. In case we didn't match anything,
587 use no_prediction heuristic, in case we did match, use either
588 first match or Dempster-Shaffer theory depending on the flags. */
589
590 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
591 first_match = true;
592
593 if (!found)
594 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
595 else
596 {
597 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
598 !first_match);
599 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
600 first_match);
601 }
602
603 if (first_match)
604 combined_probability = best_probability;
605 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
606
607 for (pred = bb->predictions; pred; pred = pred->ep_next)
608 {
609 int predictor = pred->ep_predictor;
610 int probability = pred->ep_probability;
611
612 if (pred->ep_edge != EDGE_SUCC (bb, 0))
613 probability = REG_BR_PROB_BASE - probability;
614 dump_prediction (dump_file, predictor, probability, bb,
615 !first_match || best_predictor == predictor);
616 }
617 bb->predictions = NULL;
618
619 if (!bb->count)
620 {
621 first->probability = combined_probability;
622 second->probability = REG_BR_PROB_BASE - combined_probability;
623 }
624 }
625
626 /* Predict edge probabilities by exploiting loop structure. */
627
628 static void
629 predict_loops (void)
630 {
631 loop_iterator li;
632 struct loop *loop;
633
634 scev_initialize ();
635
636 /* Try to predict out blocks in a loop that are not part of a
637 natural loop. */
638 FOR_EACH_LOOP (li, loop, 0)
639 {
640 basic_block bb, *bbs;
641 unsigned j, n_exits;
642 VEC (edge, heap) *exits;
643 struct tree_niter_desc niter_desc;
644 edge ex;
645
646 exits = get_loop_exit_edges (loop);
647 n_exits = VEC_length (edge, exits);
648
649 for (j = 0; VEC_iterate (edge, exits, j, ex); j++)
650 {
651 tree niter = NULL;
652 HOST_WIDE_INT nitercst;
653 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
654 int probability;
655 enum br_predictor predictor;
656
657 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
658 niter = niter_desc.niter;
659 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
660 niter = loop_niter_by_eval (loop, ex);
661
662 if (TREE_CODE (niter) == INTEGER_CST)
663 {
664 if (host_integerp (niter, 1)
665 && compare_tree_int (niter, max-1) == -1)
666 nitercst = tree_low_cst (niter, 1) + 1;
667 else
668 nitercst = max;
669 predictor = PRED_LOOP_ITERATIONS;
670 }
671 /* If we have just one exit and we can derive some information about
672 the number of iterations of the loop from the statements inside
673 the loop, use it to predict this exit. */
674 else if (n_exits == 1)
675 {
676 nitercst = estimated_loop_iterations_int (loop, false);
677 if (nitercst < 0)
678 continue;
679 if (nitercst > max)
680 nitercst = max;
681
682 predictor = PRED_LOOP_ITERATIONS_GUESSED;
683 }
684 else
685 continue;
686
687 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
688 predict_edge (ex, predictor, probability);
689 }
690 VEC_free (edge, heap, exits);
691
692 bbs = get_loop_body (loop);
693
694 for (j = 0; j < loop->num_nodes; j++)
695 {
696 int header_found = 0;
697 edge e;
698 edge_iterator ei;
699
700 bb = bbs[j];
701
702 /* Bypass loop heuristics on continue statement. These
703 statements construct loops via "non-loop" constructs
704 in the source language and are better to be handled
705 separately. */
706 if (predicted_by_p (bb, PRED_CONTINUE))
707 continue;
708
709 /* Loop branch heuristics - predict an edge back to a
710 loop's head as taken. */
711 if (bb == loop->latch)
712 {
713 e = find_edge (loop->latch, loop->header);
714 if (e)
715 {
716 header_found = 1;
717 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
718 }
719 }
720
721 /* Loop exit heuristics - predict an edge exiting the loop if the
722 conditional has no loop header successors as not taken. */
723 if (!header_found
724 /* If we already used more reliable loop exit predictors, do not
725 bother with PRED_LOOP_EXIT. */
726 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
727 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
728 {
729 /* For loop with many exits we don't want to predict all exits
730 with the pretty large probability, because if all exits are
731 considered in row, the loop would be predicted to iterate
732 almost never. The code to divide probability by number of
733 exits is very rough. It should compute the number of exits
734 taken in each patch through function (not the overall number
735 of exits that might be a lot higher for loops with wide switch
736 statements in them) and compute n-th square root.
737
738 We limit the minimal probability by 2% to avoid
739 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
740 as this was causing regression in perl benchmark containing such
741 a wide loop. */
742
743 int probability = ((REG_BR_PROB_BASE
744 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
745 / n_exits);
746 if (probability < HITRATE (2))
747 probability = HITRATE (2);
748 FOR_EACH_EDGE (e, ei, bb->succs)
749 if (e->dest->index < NUM_FIXED_BLOCKS
750 || !flow_bb_inside_loop_p (loop, e->dest))
751 predict_edge (e, PRED_LOOP_EXIT, probability);
752 }
753 }
754
755 /* Free basic blocks from get_loop_body. */
756 free (bbs);
757 }
758
759 scev_finalize ();
760 }
761
762 /* Attempt to predict probabilities of BB outgoing edges using local
763 properties. */
764 static void
765 bb_estimate_probability_locally (basic_block bb)
766 {
767 rtx last_insn = BB_END (bb);
768 rtx cond;
769
770 if (! can_predict_insn_p (last_insn))
771 return;
772 cond = get_condition (last_insn, NULL, false, false);
773 if (! cond)
774 return;
775
776 /* Try "pointer heuristic."
777 A comparison ptr == 0 is predicted as false.
778 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
779 if (COMPARISON_P (cond)
780 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
781 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
782 {
783 if (GET_CODE (cond) == EQ)
784 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
785 else if (GET_CODE (cond) == NE)
786 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
787 }
788 else
789
790 /* Try "opcode heuristic."
791 EQ tests are usually false and NE tests are usually true. Also,
792 most quantities are positive, so we can make the appropriate guesses
793 about signed comparisons against zero. */
794 switch (GET_CODE (cond))
795 {
796 case CONST_INT:
797 /* Unconditional branch. */
798 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
799 cond == const0_rtx ? NOT_TAKEN : TAKEN);
800 break;
801
802 case EQ:
803 case UNEQ:
804 /* Floating point comparisons appears to behave in a very
805 unpredictable way because of special role of = tests in
806 FP code. */
807 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
808 ;
809 /* Comparisons with 0 are often used for booleans and there is
810 nothing useful to predict about them. */
811 else if (XEXP (cond, 1) == const0_rtx
812 || XEXP (cond, 0) == const0_rtx)
813 ;
814 else
815 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
816 break;
817
818 case NE:
819 case LTGT:
820 /* Floating point comparisons appears to behave in a very
821 unpredictable way because of special role of = tests in
822 FP code. */
823 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
824 ;
825 /* Comparisons with 0 are often used for booleans and there is
826 nothing useful to predict about them. */
827 else if (XEXP (cond, 1) == const0_rtx
828 || XEXP (cond, 0) == const0_rtx)
829 ;
830 else
831 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
832 break;
833
834 case ORDERED:
835 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
836 break;
837
838 case UNORDERED:
839 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
840 break;
841
842 case LE:
843 case LT:
844 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
845 || XEXP (cond, 1) == constm1_rtx)
846 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
847 break;
848
849 case GE:
850 case GT:
851 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
852 || XEXP (cond, 1) == constm1_rtx)
853 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
854 break;
855
856 default:
857 break;
858 }
859 }
860
861 /* Set edge->probability for each successor edge of BB. */
862 void
863 guess_outgoing_edge_probabilities (basic_block bb)
864 {
865 bb_estimate_probability_locally (bb);
866 combine_predictions_for_insn (BB_END (bb), bb);
867 }
868 \f
869 /* Return constant EXPR will likely have at execution time, NULL if unknown.
870 The function is used by builtin_expect branch predictor so the evidence
871 must come from this construct and additional possible constant folding.
872
873 We may want to implement more involved value guess (such as value range
874 propagation based prediction), but such tricks shall go to new
875 implementation. */
876
877 static tree
878 expr_expected_value (tree expr, bitmap visited)
879 {
880 if (TREE_CONSTANT (expr))
881 return expr;
882 else if (TREE_CODE (expr) == SSA_NAME)
883 {
884 tree def = SSA_NAME_DEF_STMT (expr);
885
886 /* If we were already here, break the infinite cycle. */
887 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
888 return NULL;
889 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
890
891 if (TREE_CODE (def) == PHI_NODE)
892 {
893 /* All the arguments of the PHI node must have the same constant
894 length. */
895 int i;
896 tree val = NULL, new_val;
897
898 for (i = 0; i < PHI_NUM_ARGS (def); i++)
899 {
900 tree arg = PHI_ARG_DEF (def, i);
901
902 /* If this PHI has itself as an argument, we cannot
903 determine the string length of this argument. However,
904 if we can find an expected constant value for the other
905 PHI args then we can still be sure that this is
906 likely a constant. So be optimistic and just
907 continue with the next argument. */
908 if (arg == PHI_RESULT (def))
909 continue;
910
911 new_val = expr_expected_value (arg, visited);
912 if (!new_val)
913 return NULL;
914 if (!val)
915 val = new_val;
916 else if (!operand_equal_p (val, new_val, false))
917 return NULL;
918 }
919 return val;
920 }
921 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
922 || GIMPLE_STMT_OPERAND (def, 0) != expr)
923 return NULL;
924 return expr_expected_value (GIMPLE_STMT_OPERAND (def, 1), visited);
925 }
926 else if (TREE_CODE (expr) == CALL_EXPR)
927 {
928 tree decl = get_callee_fndecl (expr);
929 if (!decl)
930 return NULL;
931 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
932 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
933 {
934 tree val;
935
936 if (call_expr_nargs (expr) != 2)
937 return NULL;
938 val = CALL_EXPR_ARG (expr, 0);
939 if (TREE_CONSTANT (val))
940 return val;
941 return CALL_EXPR_ARG (expr, 1);
942 }
943 }
944 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
945 {
946 tree op0, op1, res;
947 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
948 if (!op0)
949 return NULL;
950 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
951 if (!op1)
952 return NULL;
953 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
954 if (TREE_CONSTANT (res))
955 return res;
956 return NULL;
957 }
958 if (UNARY_CLASS_P (expr))
959 {
960 tree op0, res;
961 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
962 if (!op0)
963 return NULL;
964 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
965 if (TREE_CONSTANT (res))
966 return res;
967 return NULL;
968 }
969 return NULL;
970 }
971 \f
972 /* Get rid of all builtin_expect calls we no longer need. */
973 static void
974 strip_builtin_expect (void)
975 {
976 basic_block bb;
977 FOR_EACH_BB (bb)
978 {
979 block_stmt_iterator bi;
980 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
981 {
982 tree stmt = bsi_stmt (bi);
983 tree fndecl;
984 tree call;
985
986 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
987 && (call = GIMPLE_STMT_OPERAND (stmt, 1))
988 && TREE_CODE (call) == CALL_EXPR
989 && (fndecl = get_callee_fndecl (call))
990 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
991 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
992 && call_expr_nargs (call) == 2)
993 {
994 GIMPLE_STMT_OPERAND (stmt, 1) = CALL_EXPR_ARG (call, 0);
995 update_stmt (stmt);
996 }
997 }
998 }
999 }
1000 \f
1001 /* Predict using opcode of the last statement in basic block. */
1002 static void
1003 tree_predict_by_opcode (basic_block bb)
1004 {
1005 tree stmt = last_stmt (bb);
1006 edge then_edge;
1007 tree cond;
1008 tree op0;
1009 tree type;
1010 tree val;
1011 bitmap visited;
1012 edge_iterator ei;
1013
1014 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1015 return;
1016 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1017 if (then_edge->flags & EDGE_TRUE_VALUE)
1018 break;
1019 cond = TREE_OPERAND (stmt, 0);
1020 if (!COMPARISON_CLASS_P (cond))
1021 return;
1022 op0 = TREE_OPERAND (cond, 0);
1023 type = TREE_TYPE (op0);
1024 visited = BITMAP_ALLOC (NULL);
1025 val = expr_expected_value (cond, visited);
1026 BITMAP_FREE (visited);
1027 if (val)
1028 {
1029 if (integer_zerop (val))
1030 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1031 else
1032 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1033 return;
1034 }
1035 /* Try "pointer heuristic."
1036 A comparison ptr == 0 is predicted as false.
1037 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1038 if (POINTER_TYPE_P (type))
1039 {
1040 if (TREE_CODE (cond) == EQ_EXPR)
1041 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1042 else if (TREE_CODE (cond) == NE_EXPR)
1043 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1044 }
1045 else
1046
1047 /* Try "opcode heuristic."
1048 EQ tests are usually false and NE tests are usually true. Also,
1049 most quantities are positive, so we can make the appropriate guesses
1050 about signed comparisons against zero. */
1051 switch (TREE_CODE (cond))
1052 {
1053 case EQ_EXPR:
1054 case UNEQ_EXPR:
1055 /* Floating point comparisons appears to behave in a very
1056 unpredictable way because of special role of = tests in
1057 FP code. */
1058 if (FLOAT_TYPE_P (type))
1059 ;
1060 /* Comparisons with 0 are often used for booleans and there is
1061 nothing useful to predict about them. */
1062 else if (integer_zerop (op0)
1063 || integer_zerop (TREE_OPERAND (cond, 1)))
1064 ;
1065 else
1066 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1067 break;
1068
1069 case NE_EXPR:
1070 case LTGT_EXPR:
1071 /* Floating point comparisons appears to behave in a very
1072 unpredictable way because of special role of = tests in
1073 FP code. */
1074 if (FLOAT_TYPE_P (type))
1075 ;
1076 /* Comparisons with 0 are often used for booleans and there is
1077 nothing useful to predict about them. */
1078 else if (integer_zerop (op0)
1079 || integer_zerop (TREE_OPERAND (cond, 1)))
1080 ;
1081 else
1082 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1083 break;
1084
1085 case ORDERED_EXPR:
1086 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1087 break;
1088
1089 case UNORDERED_EXPR:
1090 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1091 break;
1092
1093 case LE_EXPR:
1094 case LT_EXPR:
1095 if (integer_zerop (TREE_OPERAND (cond, 1))
1096 || integer_onep (TREE_OPERAND (cond, 1))
1097 || integer_all_onesp (TREE_OPERAND (cond, 1))
1098 || real_zerop (TREE_OPERAND (cond, 1))
1099 || real_onep (TREE_OPERAND (cond, 1))
1100 || real_minus_onep (TREE_OPERAND (cond, 1)))
1101 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1102 break;
1103
1104 case GE_EXPR:
1105 case GT_EXPR:
1106 if (integer_zerop (TREE_OPERAND (cond, 1))
1107 || integer_onep (TREE_OPERAND (cond, 1))
1108 || integer_all_onesp (TREE_OPERAND (cond, 1))
1109 || real_zerop (TREE_OPERAND (cond, 1))
1110 || real_onep (TREE_OPERAND (cond, 1))
1111 || real_minus_onep (TREE_OPERAND (cond, 1)))
1112 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1113 break;
1114
1115 default:
1116 break;
1117 }
1118 }
1119
1120 /* Try to guess whether the value of return means error code. */
1121 static enum br_predictor
1122 return_prediction (tree val, enum prediction *prediction)
1123 {
1124 /* VOID. */
1125 if (!val)
1126 return PRED_NO_PREDICTION;
1127 /* Different heuristics for pointers and scalars. */
1128 if (POINTER_TYPE_P (TREE_TYPE (val)))
1129 {
1130 /* NULL is usually not returned. */
1131 if (integer_zerop (val))
1132 {
1133 *prediction = NOT_TAKEN;
1134 return PRED_NULL_RETURN;
1135 }
1136 }
1137 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1138 {
1139 /* Negative return values are often used to indicate
1140 errors. */
1141 if (TREE_CODE (val) == INTEGER_CST
1142 && tree_int_cst_sgn (val) < 0)
1143 {
1144 *prediction = NOT_TAKEN;
1145 return PRED_NEGATIVE_RETURN;
1146 }
1147 /* Constant return values seems to be commonly taken.
1148 Zero/one often represent booleans so exclude them from the
1149 heuristics. */
1150 if (TREE_CONSTANT (val)
1151 && (!integer_zerop (val) && !integer_onep (val)))
1152 {
1153 *prediction = TAKEN;
1154 return PRED_CONST_RETURN;
1155 }
1156 }
1157 return PRED_NO_PREDICTION;
1158 }
1159
1160 /* Find the basic block with return expression and look up for possible
1161 return value trying to apply RETURN_PREDICTION heuristics. */
1162 static void
1163 apply_return_prediction (int *heads)
1164 {
1165 tree return_stmt = NULL;
1166 tree return_val;
1167 edge e;
1168 tree phi;
1169 int phi_num_args, i;
1170 enum br_predictor pred;
1171 enum prediction direction;
1172 edge_iterator ei;
1173
1174 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1175 {
1176 return_stmt = last_stmt (e->src);
1177 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1178 break;
1179 }
1180 if (!e)
1181 return;
1182 return_val = TREE_OPERAND (return_stmt, 0);
1183 if (!return_val)
1184 return;
1185 if (TREE_CODE (return_val) == GIMPLE_MODIFY_STMT)
1186 return_val = GIMPLE_STMT_OPERAND (return_val, 1);
1187 if (TREE_CODE (return_val) != SSA_NAME
1188 || !SSA_NAME_DEF_STMT (return_val)
1189 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1190 return;
1191 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1192 if (PHI_RESULT (phi) == return_val)
1193 break;
1194 if (!phi)
1195 return;
1196 phi_num_args = PHI_NUM_ARGS (phi);
1197 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1198
1199 /* Avoid the degenerate case where all return values form the function
1200 belongs to same category (ie they are all positive constants)
1201 so we can hardly say something about them. */
1202 for (i = 1; i < phi_num_args; i++)
1203 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1204 break;
1205 if (i != phi_num_args)
1206 for (i = 0; i < phi_num_args; i++)
1207 {
1208 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1209 if (pred != PRED_NO_PREDICTION)
1210 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1211 direction);
1212 }
1213 }
1214
1215 /* Look for basic block that contains unlikely to happen events
1216 (such as noreturn calls) and mark all paths leading to execution
1217 of this basic blocks as unlikely. */
1218
1219 static void
1220 tree_bb_level_predictions (void)
1221 {
1222 basic_block bb;
1223 int *heads;
1224
1225 heads = XCNEWVEC (int, last_basic_block);
1226 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1227
1228 apply_return_prediction (heads);
1229
1230 FOR_EACH_BB (bb)
1231 {
1232 block_stmt_iterator bsi = bsi_last (bb);
1233
1234 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1235 {
1236 tree stmt = bsi_stmt (bsi);
1237 switch (TREE_CODE (stmt))
1238 {
1239 case GIMPLE_MODIFY_STMT:
1240 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
1241 {
1242 stmt = GIMPLE_STMT_OPERAND (stmt, 1);
1243 goto call_expr;
1244 }
1245 break;
1246 case CALL_EXPR:
1247 call_expr:;
1248 if (call_expr_flags (stmt) & ECF_NORETURN)
1249 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1250 NOT_TAKEN);
1251 break;
1252 default:
1253 break;
1254 }
1255 }
1256 }
1257
1258 free (heads);
1259 }
1260
1261 /* Predict branch probabilities and estimate profile of the tree CFG. */
1262 static unsigned int
1263 tree_estimate_probability (void)
1264 {
1265 basic_block bb;
1266
1267 loop_optimizer_init (0);
1268 if (current_loops && dump_file && (dump_flags & TDF_DETAILS))
1269 flow_loops_dump (dump_file, NULL, 0);
1270
1271 add_noreturn_fake_exit_edges ();
1272 connect_infinite_loops_to_exit ();
1273 calculate_dominance_info (CDI_DOMINATORS);
1274 calculate_dominance_info (CDI_POST_DOMINATORS);
1275
1276 tree_bb_level_predictions ();
1277
1278 mark_irreducible_loops ();
1279 record_loop_exits ();
1280 if (current_loops)
1281 predict_loops ();
1282
1283 FOR_EACH_BB (bb)
1284 {
1285 edge e;
1286 edge_iterator ei;
1287
1288 FOR_EACH_EDGE (e, ei, bb->succs)
1289 {
1290 /* Predict early returns to be probable, as we've already taken
1291 care for error returns and other cases are often used for
1292 fast paths through function.
1293
1294 Since we've already removed the return statments, we are
1295 looking for CFG like:
1296
1297 if (conditoinal)
1298 {
1299 ..
1300 goto return_block
1301 }
1302 some other blocks
1303 return_block:
1304 return_stmt. */
1305 if (e->dest != bb->next_bb
1306 && e->dest != EXIT_BLOCK_PTR
1307 && single_succ_p (e->dest)
1308 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
1309 && TREE_CODE (last_stmt (e->dest)) == RETURN_EXPR)
1310 {
1311 edge e1;
1312 edge_iterator ei1;
1313
1314 if (single_succ_p (bb))
1315 {
1316 FOR_EACH_EDGE (e1, ei1, bb->preds)
1317 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1318 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1319 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
1320 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1321 }
1322 else
1323 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
1324 && !predicted_by_p (e->src, PRED_CONST_RETURN)
1325 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
1326 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1327 }
1328
1329 /* Look for block we are guarding (ie we dominate it,
1330 but it doesn't postdominate us). */
1331 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1332 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1333 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1334 {
1335 block_stmt_iterator bi;
1336
1337 /* The call heuristic claims that a guarded function call
1338 is improbable. This is because such calls are often used
1339 to signal exceptional situations such as printing error
1340 messages. */
1341 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1342 bsi_next (&bi))
1343 {
1344 tree stmt = bsi_stmt (bi);
1345 if ((TREE_CODE (stmt) == CALL_EXPR
1346 || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1347 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1))
1348 == CALL_EXPR))
1349 /* Constant and pure calls are hardly used to signalize
1350 something exceptional. */
1351 && TREE_SIDE_EFFECTS (stmt))
1352 {
1353 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1354 break;
1355 }
1356 }
1357 }
1358 }
1359 tree_predict_by_opcode (bb);
1360 }
1361 FOR_EACH_BB (bb)
1362 combine_predictions_for_bb (bb);
1363
1364 strip_builtin_expect ();
1365 estimate_bb_frequencies ();
1366 free_dominance_info (CDI_POST_DOMINATORS);
1367 remove_fake_exit_edges ();
1368 loop_optimizer_finalize ();
1369 if (dump_file && (dump_flags & TDF_DETAILS))
1370 dump_tree_cfg (dump_file, dump_flags);
1371 if (profile_status == PROFILE_ABSENT)
1372 profile_status = PROFILE_GUESSED;
1373 return 0;
1374 }
1375 \f
1376 /* Sets branch probabilities according to PREDiction and
1377 FLAGS. HEADS[bb->index] should be index of basic block in that we
1378 need to alter branch predictions (i.e. the first of our dominators
1379 such that we do not post-dominate it) (but we fill this information
1380 on demand, so -1 may be there in case this was not needed yet). */
1381
1382 static void
1383 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1384 enum prediction taken)
1385 {
1386 edge e;
1387 edge_iterator ei;
1388 int y;
1389
1390 if (heads[bb->index] == ENTRY_BLOCK)
1391 {
1392 /* This is first time we need this field in heads array; so
1393 find first dominator that we do not post-dominate (we are
1394 using already known members of heads array). */
1395 basic_block ai = bb;
1396 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1397 int head;
1398
1399 while (heads[next_ai->index] == ENTRY_BLOCK)
1400 {
1401 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1402 break;
1403 heads[next_ai->index] = ai->index;
1404 ai = next_ai;
1405 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1406 }
1407 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1408 head = next_ai->index;
1409 else
1410 head = heads[next_ai->index];
1411 while (next_ai != bb)
1412 {
1413 next_ai = ai;
1414 ai = BASIC_BLOCK (heads[ai->index]);
1415 heads[next_ai->index] = head;
1416 }
1417 }
1418 y = heads[bb->index];
1419
1420 /* Now find the edge that leads to our branch and aply the prediction. */
1421
1422 if (y == last_basic_block)
1423 return;
1424 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1425 if (e->dest->index >= NUM_FIXED_BLOCKS
1426 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1427 predict_edge_def (e, pred, taken);
1428 }
1429 \f
1430 /* This is used to carry information about basic blocks. It is
1431 attached to the AUX field of the standard CFG block. */
1432
1433 typedef struct block_info_def
1434 {
1435 /* Estimated frequency of execution of basic_block. */
1436 sreal frequency;
1437
1438 /* To keep queue of basic blocks to process. */
1439 basic_block next;
1440
1441 /* Number of predecessors we need to visit first. */
1442 int npredecessors;
1443 } *block_info;
1444
1445 /* Similar information for edges. */
1446 typedef struct edge_info_def
1447 {
1448 /* In case edge is a loopback edge, the probability edge will be reached
1449 in case header is. Estimated number of iterations of the loop can be
1450 then computed as 1 / (1 - back_edge_prob). */
1451 sreal back_edge_prob;
1452 /* True if the edge is a loopback edge in the natural loop. */
1453 unsigned int back_edge:1;
1454 } *edge_info;
1455
1456 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1457 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1458
1459 /* Helper function for estimate_bb_frequencies.
1460 Propagate the frequencies in blocks marked in
1461 TOVISIT, starting in HEAD. */
1462
1463 static void
1464 propagate_freq (basic_block head, bitmap tovisit)
1465 {
1466 basic_block bb;
1467 basic_block last;
1468 unsigned i;
1469 edge e;
1470 basic_block nextbb;
1471 bitmap_iterator bi;
1472
1473 /* For each basic block we need to visit count number of his predecessors
1474 we need to visit first. */
1475 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1476 {
1477 edge_iterator ei;
1478 int count = 0;
1479
1480 /* The outermost "loop" includes the exit block, which we can not
1481 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1482 directly. Do the same for the entry block. */
1483 bb = BASIC_BLOCK (i);
1484
1485 FOR_EACH_EDGE (e, ei, bb->preds)
1486 {
1487 bool visit = bitmap_bit_p (tovisit, e->src->index);
1488
1489 if (visit && !(e->flags & EDGE_DFS_BACK))
1490 count++;
1491 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1492 fprintf (dump_file,
1493 "Irreducible region hit, ignoring edge to %i->%i\n",
1494 e->src->index, bb->index);
1495 }
1496 BLOCK_INFO (bb)->npredecessors = count;
1497 }
1498
1499 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1500 last = head;
1501 for (bb = head; bb; bb = nextbb)
1502 {
1503 edge_iterator ei;
1504 sreal cyclic_probability, frequency;
1505
1506 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1507 memcpy (&frequency, &real_zero, sizeof (real_zero));
1508
1509 nextbb = BLOCK_INFO (bb)->next;
1510 BLOCK_INFO (bb)->next = NULL;
1511
1512 /* Compute frequency of basic block. */
1513 if (bb != head)
1514 {
1515 #ifdef ENABLE_CHECKING
1516 FOR_EACH_EDGE (e, ei, bb->preds)
1517 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1518 || (e->flags & EDGE_DFS_BACK));
1519 #endif
1520
1521 FOR_EACH_EDGE (e, ei, bb->preds)
1522 if (EDGE_INFO (e)->back_edge)
1523 {
1524 sreal_add (&cyclic_probability, &cyclic_probability,
1525 &EDGE_INFO (e)->back_edge_prob);
1526 }
1527 else if (!(e->flags & EDGE_DFS_BACK))
1528 {
1529 sreal tmp;
1530
1531 /* frequency += (e->probability
1532 * BLOCK_INFO (e->src)->frequency /
1533 REG_BR_PROB_BASE); */
1534
1535 sreal_init (&tmp, e->probability, 0);
1536 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1537 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1538 sreal_add (&frequency, &frequency, &tmp);
1539 }
1540
1541 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1542 {
1543 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1544 sizeof (frequency));
1545 }
1546 else
1547 {
1548 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1549 {
1550 memcpy (&cyclic_probability, &real_almost_one,
1551 sizeof (real_almost_one));
1552 }
1553
1554 /* BLOCK_INFO (bb)->frequency = frequency
1555 / (1 - cyclic_probability) */
1556
1557 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1558 sreal_div (&BLOCK_INFO (bb)->frequency,
1559 &frequency, &cyclic_probability);
1560 }
1561 }
1562
1563 bitmap_clear_bit (tovisit, bb->index);
1564
1565 e = find_edge (bb, head);
1566 if (e)
1567 {
1568 sreal tmp;
1569
1570 /* EDGE_INFO (e)->back_edge_prob
1571 = ((e->probability * BLOCK_INFO (bb)->frequency)
1572 / REG_BR_PROB_BASE); */
1573
1574 sreal_init (&tmp, e->probability, 0);
1575 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1576 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1577 &tmp, &real_inv_br_prob_base);
1578 }
1579
1580 /* Propagate to successor blocks. */
1581 FOR_EACH_EDGE (e, ei, bb->succs)
1582 if (!(e->flags & EDGE_DFS_BACK)
1583 && BLOCK_INFO (e->dest)->npredecessors)
1584 {
1585 BLOCK_INFO (e->dest)->npredecessors--;
1586 if (!BLOCK_INFO (e->dest)->npredecessors)
1587 {
1588 if (!nextbb)
1589 nextbb = e->dest;
1590 else
1591 BLOCK_INFO (last)->next = e->dest;
1592
1593 last = e->dest;
1594 }
1595 }
1596 }
1597 }
1598
1599 /* Estimate probabilities of loopback edges in loops at same nest level. */
1600
1601 static void
1602 estimate_loops_at_level (struct loop *first_loop)
1603 {
1604 struct loop *loop;
1605
1606 for (loop = first_loop; loop; loop = loop->next)
1607 {
1608 edge e;
1609 basic_block *bbs;
1610 unsigned i;
1611 bitmap tovisit = BITMAP_ALLOC (NULL);
1612
1613 estimate_loops_at_level (loop->inner);
1614
1615 /* Find current loop back edge and mark it. */
1616 e = loop_latch_edge (loop);
1617 EDGE_INFO (e)->back_edge = 1;
1618
1619 bbs = get_loop_body (loop);
1620 for (i = 0; i < loop->num_nodes; i++)
1621 bitmap_set_bit (tovisit, bbs[i]->index);
1622 free (bbs);
1623 propagate_freq (loop->header, tovisit);
1624 BITMAP_FREE (tovisit);
1625 }
1626 }
1627
1628 /* Propagates frequencies through structure of loops. */
1629
1630 static void
1631 estimate_loops (void)
1632 {
1633 bitmap tovisit = BITMAP_ALLOC (NULL);
1634 basic_block bb;
1635
1636 /* Start by estimating the frequencies in the loops. */
1637 if (current_loops)
1638 estimate_loops_at_level (current_loops->tree_root->inner);
1639
1640 /* Now propagate the frequencies through all the blocks. */
1641 FOR_ALL_BB (bb)
1642 {
1643 bitmap_set_bit (tovisit, bb->index);
1644 }
1645 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
1646 BITMAP_FREE (tovisit);
1647 }
1648
1649 /* Convert counts measured by profile driven feedback to frequencies.
1650 Return nonzero iff there was any nonzero execution count. */
1651
1652 int
1653 counts_to_freqs (void)
1654 {
1655 gcov_type count_max, true_count_max = 0;
1656 basic_block bb;
1657
1658 FOR_EACH_BB (bb)
1659 true_count_max = MAX (bb->count, true_count_max);
1660
1661 count_max = MAX (true_count_max, 1);
1662 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1663 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1664
1665 return true_count_max;
1666 }
1667
1668 /* Return true if function is likely to be expensive, so there is no point to
1669 optimize performance of prologue, epilogue or do inlining at the expense
1670 of code size growth. THRESHOLD is the limit of number of instructions
1671 function can execute at average to be still considered not expensive. */
1672
1673 bool
1674 expensive_function_p (int threshold)
1675 {
1676 unsigned int sum = 0;
1677 basic_block bb;
1678 unsigned int limit;
1679
1680 /* We can not compute accurately for large thresholds due to scaled
1681 frequencies. */
1682 gcc_assert (threshold <= BB_FREQ_MAX);
1683
1684 /* Frequencies are out of range. This either means that function contains
1685 internal loop executing more than BB_FREQ_MAX times or profile feedback
1686 is available and function has not been executed at all. */
1687 if (ENTRY_BLOCK_PTR->frequency == 0)
1688 return true;
1689
1690 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1691 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1692 FOR_EACH_BB (bb)
1693 {
1694 rtx insn;
1695
1696 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1697 insn = NEXT_INSN (insn))
1698 if (active_insn_p (insn))
1699 {
1700 sum += bb->frequency;
1701 if (sum > limit)
1702 return true;
1703 }
1704 }
1705
1706 return false;
1707 }
1708
1709 /* Estimate basic blocks frequency by given branch probabilities. */
1710
1711 void
1712 estimate_bb_frequencies (void)
1713 {
1714 basic_block bb;
1715 sreal freq_max;
1716
1717 if (!flag_branch_probabilities || !counts_to_freqs ())
1718 {
1719 static int real_values_initialized = 0;
1720
1721 if (!real_values_initialized)
1722 {
1723 real_values_initialized = 1;
1724 sreal_init (&real_zero, 0, 0);
1725 sreal_init (&real_one, 1, 0);
1726 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1727 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1728 sreal_init (&real_one_half, 1, -1);
1729 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1730 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1731 }
1732
1733 mark_dfs_back_edges ();
1734
1735 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1736
1737 /* Set up block info for each basic block. */
1738 alloc_aux_for_blocks (sizeof (struct block_info_def));
1739 alloc_aux_for_edges (sizeof (struct edge_info_def));
1740 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1741 {
1742 edge e;
1743 edge_iterator ei;
1744
1745 FOR_EACH_EDGE (e, ei, bb->succs)
1746 {
1747 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1748 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1749 &EDGE_INFO (e)->back_edge_prob,
1750 &real_inv_br_prob_base);
1751 }
1752 }
1753
1754 /* First compute probabilities locally for each loop from innermost
1755 to outermost to examine probabilities for back edges. */
1756 estimate_loops ();
1757
1758 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1759 FOR_EACH_BB (bb)
1760 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1761 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1762
1763 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1764 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1765 {
1766 sreal tmp;
1767
1768 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1769 sreal_add (&tmp, &tmp, &real_one_half);
1770 bb->frequency = sreal_to_int (&tmp);
1771 }
1772
1773 free_aux_for_blocks ();
1774 free_aux_for_edges ();
1775 }
1776 compute_function_frequency ();
1777 if (flag_reorder_functions)
1778 choose_function_section ();
1779 }
1780
1781 /* Decide whether function is hot, cold or unlikely executed. */
1782 static void
1783 compute_function_frequency (void)
1784 {
1785 basic_block bb;
1786
1787 if (!profile_info || !flag_branch_probabilities)
1788 return;
1789 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1790 FOR_EACH_BB (bb)
1791 {
1792 if (maybe_hot_bb_p (bb))
1793 {
1794 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1795 return;
1796 }
1797 if (!probably_never_executed_bb_p (bb))
1798 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1799 }
1800 }
1801
1802 /* Choose appropriate section for the function. */
1803 static void
1804 choose_function_section (void)
1805 {
1806 if (DECL_SECTION_NAME (current_function_decl)
1807 || !targetm.have_named_sections
1808 /* Theoretically we can split the gnu.linkonce text section too,
1809 but this requires more work as the frequency needs to match
1810 for all generated objects so we need to merge the frequency
1811 of all instances. For now just never set frequency for these. */
1812 || DECL_ONE_ONLY (current_function_decl))
1813 return;
1814
1815 /* If we are doing the partitioning optimization, let the optimization
1816 choose the correct section into which to put things. */
1817
1818 if (flag_reorder_blocks_and_partition)
1819 return;
1820
1821 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1822 DECL_SECTION_NAME (current_function_decl) =
1823 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1824 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1825 DECL_SECTION_NAME (current_function_decl) =
1826 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1827 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1828 }
1829
1830 static bool
1831 gate_estimate_probability (void)
1832 {
1833 return flag_guess_branch_prob;
1834 }
1835
1836 struct tree_opt_pass pass_profile =
1837 {
1838 "profile", /* name */
1839 gate_estimate_probability, /* gate */
1840 tree_estimate_probability, /* execute */
1841 NULL, /* sub */
1842 NULL, /* next */
1843 0, /* static_pass_number */
1844 TV_BRANCH_PROB, /* tv_id */
1845 PROP_cfg, /* properties_required */
1846 0, /* properties_provided */
1847 0, /* properties_destroyed */
1848 0, /* todo_flags_start */
1849 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1850 0 /* letter */
1851 };