cfgexpand.c (add_reg_br_prob_note): New function.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 /* References:
22
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
29
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "coverage.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "cfgloop.h"
55 #include "tree-flow.h"
56 #include "ggc.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
59 #include "timevar.h"
60 #include "tree-scalar-evolution.h"
61 #include "cfgloop.h"
62
63 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
65 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
66 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
67
68 /* Random guesstimation given names. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
73
74 static void combine_predictions_for_insn (rtx, basic_block);
75 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
76 static void estimate_loops_at_level (struct loop *loop);
77 static void propagate_freq (struct loop *);
78 static void estimate_bb_frequencies (struct loops *);
79 static int counts_to_freqs (void);
80 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
81 static bool last_basic_block_p (basic_block);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx);
85
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
88
89 struct predictor_info
90 {
91 const char *const name; /* Name used in the debugging dumps. */
92 const int hitrate; /* Expected hitrate used by
93 predict_insn_def call. */
94 const int flags;
95 };
96
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
100
101 /* Recompute hitrate in percent to our representation. */
102
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info[]= {
107 #include "predict.def"
108
109 /* Upper bound on predictors. */
110 {NULL, 0, 0}
111 };
112 #undef DEF_PREDICTOR
113
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
116
117 bool
118 maybe_hot_bb_p (basic_block bb)
119 {
120 if (profile_info && flag_branch_probabilities
121 && (bb->count
122 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
123 return false;
124 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
125 return false;
126 return true;
127 }
128
129 /* Return true in case BB is cold and should be optimized for size. */
130
131 bool
132 probably_cold_bb_p (basic_block bb)
133 {
134 if (profile_info && flag_branch_probabilities
135 && (bb->count
136 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
137 return true;
138 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
139 return true;
140 return false;
141 }
142
143 /* Return true in case BB is probably never executed. */
144 bool
145 probably_never_executed_bb_p (basic_block bb)
146 {
147 if (profile_info && flag_branch_probabilities)
148 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 return false;
150 }
151
152 /* Return true if the one of outgoing edges is already predicted by
153 PREDICTOR. */
154
155 bool
156 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
157 {
158 rtx note;
159 if (!INSN_P (BB_END (bb)))
160 return false;
161 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
164 return true;
165 return false;
166 }
167
168 /* Return true if the one of outgoing edges is already predicted by
169 PREDICTOR. */
170
171 bool
172 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
173 {
174 struct edge_prediction *i = bb_ann (bb)->predictions;
175 for (i = bb_ann (bb)->predictions; i; i = i->next)
176 if (i->predictor == predictor)
177 return true;
178 return false;
179 }
180
181 void
182 predict_insn (rtx insn, enum br_predictor predictor, int probability)
183 {
184 if (!any_condjump_p (insn))
185 abort ();
186 if (!flag_guess_branch_prob)
187 return;
188
189 REG_NOTES (insn)
190 = gen_rtx_EXPR_LIST (REG_BR_PRED,
191 gen_rtx_CONCAT (VOIDmode,
192 GEN_INT ((int) predictor),
193 GEN_INT ((int) probability)),
194 REG_NOTES (insn));
195 }
196
197 /* Predict insn by given predictor. */
198
199 void
200 predict_insn_def (rtx insn, enum br_predictor predictor,
201 enum prediction taken)
202 {
203 int probability = predictor_info[(int) predictor].hitrate;
204
205 if (taken != TAKEN)
206 probability = REG_BR_PROB_BASE - probability;
207
208 predict_insn (insn, predictor, probability);
209 }
210
211 /* Predict edge E with given probability if possible. */
212
213 void
214 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
215 {
216 rtx last_insn;
217 last_insn = BB_END (e->src);
218
219 /* We can store the branch prediction information only about
220 conditional jumps. */
221 if (!any_condjump_p (last_insn))
222 return;
223
224 /* We always store probability of branching. */
225 if (e->flags & EDGE_FALLTHRU)
226 probability = REG_BR_PROB_BASE - probability;
227
228 predict_insn (last_insn, predictor, probability);
229 }
230
231 /* Predict edge E with the given PROBABILITY. */
232 void
233 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
234 {
235 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
236
237 i->next = bb_ann (e->src)->predictions;
238 bb_ann (e->src)->predictions = i;
239 i->probability = probability;
240 i->predictor = predictor;
241 i->edge = e;
242 }
243
244 /* Return true when we can store prediction on insn INSN.
245 At the moment we represent predictions only on conditional
246 jumps, not at computed jump or other complicated cases. */
247 static bool
248 can_predict_insn_p (rtx insn)
249 {
250 return (JUMP_P (insn)
251 && any_condjump_p (insn)
252 && BLOCK_FOR_INSN (insn)->succ->succ_next);
253 }
254
255 /* Predict edge E by given predictor if possible. */
256
257 void
258 predict_edge_def (edge e, enum br_predictor predictor,
259 enum prediction taken)
260 {
261 int probability = predictor_info[(int) predictor].hitrate;
262
263 if (taken != TAKEN)
264 probability = REG_BR_PROB_BASE - probability;
265
266 predict_edge (e, predictor, probability);
267 }
268
269 /* Invert all branch predictions or probability notes in the INSN. This needs
270 to be done each time we invert the condition used by the jump. */
271
272 void
273 invert_br_probabilities (rtx insn)
274 {
275 rtx note;
276
277 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
278 if (REG_NOTE_KIND (note) == REG_BR_PROB)
279 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
280 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
281 XEXP (XEXP (note, 0), 1)
282 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
283 }
284
285 /* Dump information about the branch prediction to the output file. */
286
287 static void
288 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
289 basic_block bb, int used)
290 {
291 edge e = bb->succ;
292
293 if (!file)
294 return;
295
296 while (e && (e->flags & EDGE_FALLTHRU))
297 e = e->succ_next;
298
299 fprintf (file, " %s heuristics%s: %.1f%%",
300 predictor_info[predictor].name,
301 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
302
303 if (bb->count)
304 {
305 fprintf (file, " exec ");
306 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
307 if (e)
308 {
309 fprintf (file, " hit ");
310 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
311 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
312 }
313 }
314
315 fprintf (file, "\n");
316 }
317
318 /* We can not predict the probabilities of outgoing edges of bb. Set them
319 evenly and hope for the best. */
320 static void
321 set_even_probabilities (basic_block bb)
322 {
323 int nedges = 0;
324 edge e;
325
326 for (e = bb->succ; e; e = e->succ_next)
327 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
328 nedges ++;
329 for (e = bb->succ; e; e = e->succ_next)
330 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
331 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
332 else
333 e->probability = 0;
334 }
335
336 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
337 note if not already present. Remove now useless REG_BR_PRED notes. */
338
339 static void
340 combine_predictions_for_insn (rtx insn, basic_block bb)
341 {
342 rtx prob_note;
343 rtx *pnote;
344 rtx note;
345 int best_probability = PROB_EVEN;
346 int best_predictor = END_PREDICTORS;
347 int combined_probability = REG_BR_PROB_BASE / 2;
348 int d;
349 bool first_match = false;
350 bool found = false;
351
352 if (!can_predict_insn_p (insn))
353 {
354 set_even_probabilities (bb);
355 return;
356 }
357
358 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
359 pnote = &REG_NOTES (insn);
360 if (dump_file)
361 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
362 bb->index);
363
364 /* We implement "first match" heuristics and use probability guessed
365 by predictor with smallest index. */
366 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
367 if (REG_NOTE_KIND (note) == REG_BR_PRED)
368 {
369 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
370 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
371
372 found = true;
373 if (best_predictor > predictor)
374 best_probability = probability, best_predictor = predictor;
375
376 d = (combined_probability * probability
377 + (REG_BR_PROB_BASE - combined_probability)
378 * (REG_BR_PROB_BASE - probability));
379
380 /* Use FP math to avoid overflows of 32bit integers. */
381 if (d == 0)
382 /* If one probability is 0% and one 100%, avoid division by zero. */
383 combined_probability = REG_BR_PROB_BASE / 2;
384 else
385 combined_probability = (((double) combined_probability) * probability
386 * REG_BR_PROB_BASE / d + 0.5);
387 }
388
389 /* Decide which heuristic to use. In case we didn't match anything,
390 use no_prediction heuristic, in case we did match, use either
391 first match or Dempster-Shaffer theory depending on the flags. */
392
393 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
394 first_match = true;
395
396 if (!found)
397 dump_prediction (dump_file, PRED_NO_PREDICTION,
398 combined_probability, bb, true);
399 else
400 {
401 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
402 bb, !first_match);
403 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
404 bb, first_match);
405 }
406
407 if (first_match)
408 combined_probability = best_probability;
409 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
410
411 while (*pnote)
412 {
413 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
414 {
415 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
416 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
417
418 dump_prediction (dump_file, predictor, probability, bb,
419 !first_match || best_predictor == predictor);
420 *pnote = XEXP (*pnote, 1);
421 }
422 else
423 pnote = &XEXP (*pnote, 1);
424 }
425
426 if (!prob_note)
427 {
428 REG_NOTES (insn)
429 = gen_rtx_EXPR_LIST (REG_BR_PROB,
430 GEN_INT (combined_probability), REG_NOTES (insn));
431
432 /* Save the prediction into CFG in case we are seeing non-degenerated
433 conditional jump. */
434 if (bb->succ->succ_next)
435 {
436 BRANCH_EDGE (bb)->probability = combined_probability;
437 FALLTHRU_EDGE (bb)->probability
438 = REG_BR_PROB_BASE - combined_probability;
439 }
440 }
441 else if (bb->succ->succ_next)
442 {
443 int prob = INTVAL (XEXP (prob_note, 0));
444
445 BRANCH_EDGE (bb)->probability = prob;
446 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
447 }
448 else
449 bb->succ->probability = REG_BR_PROB_BASE;
450 }
451
452 /* Combine predictions into single probability and store them into CFG.
453 Remove now useless prediction entries. */
454
455 static void
456 combine_predictions_for_bb (FILE *file, basic_block bb)
457 {
458 int best_probability = PROB_EVEN;
459 int best_predictor = END_PREDICTORS;
460 int combined_probability = REG_BR_PROB_BASE / 2;
461 int d;
462 bool first_match = false;
463 bool found = false;
464 struct edge_prediction *pred;
465 int nedges = 0;
466 edge e, first = NULL, second = NULL;
467
468 for (e = bb->succ; e; e = e->succ_next)
469 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
470 {
471 nedges ++;
472 if (first && !second)
473 second = e;
474 if (!first)
475 first = e;
476 }
477
478 /* When there is no successor or only one choice, prediction is easy.
479
480 We are lazy for now and predict only basic blocks with two outgoing
481 edges. It is possible to predict generic case too, but we have to
482 ignore first match heuristics and do more involved combining. Implement
483 this later. */
484 if (nedges != 2)
485 {
486 if (!bb->count)
487 set_even_probabilities (bb);
488 bb_ann (bb)->predictions = NULL;
489 if (file)
490 fprintf (file, "%i edges in bb %i predicted to even probabilities\n",
491 nedges, bb->index);
492 return;
493 }
494
495 if (file)
496 fprintf (file, "Predictions for bb %i\n", bb->index);
497
498 /* We implement "first match" heuristics and use probability guessed
499 by predictor with smallest index. */
500 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
501 {
502 int predictor = pred->predictor;
503 int probability = pred->probability;
504
505 if (pred->edge != first)
506 probability = REG_BR_PROB_BASE - probability;
507
508 found = true;
509 if (best_predictor > predictor)
510 best_probability = probability, best_predictor = predictor;
511
512 d = (combined_probability * probability
513 + (REG_BR_PROB_BASE - combined_probability)
514 * (REG_BR_PROB_BASE - probability));
515
516 /* Use FP math to avoid overflows of 32bit integers. */
517 if (d == 0)
518 /* If one probability is 0% and one 100%, avoid division by zero. */
519 combined_probability = REG_BR_PROB_BASE / 2;
520 else
521 combined_probability = (((double) combined_probability) * probability
522 * REG_BR_PROB_BASE / d + 0.5);
523 }
524
525 /* Decide which heuristic to use. In case we didn't match anything,
526 use no_prediction heuristic, in case we did match, use either
527 first match or Dempster-Shaffer theory depending on the flags. */
528
529 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
530 first_match = true;
531
532 if (!found)
533 dump_prediction (file, PRED_NO_PREDICTION, combined_probability, bb, true);
534 else
535 {
536 dump_prediction (file, PRED_DS_THEORY, combined_probability, bb,
537 !first_match);
538 dump_prediction (file, PRED_FIRST_MATCH, best_probability, bb,
539 first_match);
540 }
541
542 if (first_match)
543 combined_probability = best_probability;
544 dump_prediction (file, PRED_COMBINED, combined_probability, bb, true);
545
546 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
547 {
548 int predictor = pred->predictor;
549 int probability = pred->probability;
550
551 if (pred->edge != bb->succ)
552 probability = REG_BR_PROB_BASE - probability;
553 dump_prediction (file, predictor, probability, bb,
554 !first_match || best_predictor == predictor);
555 }
556 bb_ann (bb)->predictions = NULL;
557
558 if (!bb->count)
559 {
560 first->probability = combined_probability;
561 second->probability = REG_BR_PROB_BASE - combined_probability;
562 }
563 }
564
565 /* Predict edge probabilities by exploiting loop structure.
566 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
567 RTL otherwise use tree based approach. */
568 static void
569 predict_loops (struct loops *loops_info, bool rtlsimpleloops)
570 {
571 unsigned i;
572
573 if (!rtlsimpleloops)
574 scev_initialize (loops_info);
575
576 /* Try to predict out blocks in a loop that are not part of a
577 natural loop. */
578 for (i = 1; i < loops_info->num; i++)
579 {
580 basic_block bb, *bbs;
581 unsigned j;
582 int exits;
583 struct loop *loop = loops_info->parray[i];
584 struct niter_desc desc;
585 unsigned HOST_WIDE_INT niter;
586
587 flow_loop_scan (loop, LOOP_EXIT_EDGES);
588 exits = loop->num_exits;
589
590 if (rtlsimpleloops)
591 {
592 iv_analysis_loop_init (loop);
593 find_simple_exit (loop, &desc);
594
595 if (desc.simple_p && desc.const_iter)
596 {
597 int prob;
598 niter = desc.niter + 1;
599 if (niter == 0) /* We might overflow here. */
600 niter = desc.niter;
601
602 prob = (REG_BR_PROB_BASE
603 - (REG_BR_PROB_BASE + niter /2) / niter);
604 /* Branch prediction algorithm gives 0 frequency for everything
605 after the end of loop for loop having 0 probability to finish. */
606 if (prob == REG_BR_PROB_BASE)
607 prob = REG_BR_PROB_BASE - 1;
608 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
609 prob);
610 }
611 }
612 else
613 {
614 edge *exits;
615 unsigned j, n_exits;
616 struct tree_niter_desc niter_desc;
617
618 exits = get_loop_exit_edges (loop, &n_exits);
619 for (j = 0; j < n_exits; j++)
620 {
621 tree niter = NULL;
622
623 if (number_of_iterations_exit (loop, exits[j], &niter_desc))
624 niter = niter_desc.niter;
625 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
626 niter = loop_niter_by_eval (loop, exits[j]);
627
628 if (TREE_CODE (niter) == INTEGER_CST)
629 {
630 int probability;
631 if (host_integerp (niter, 1)
632 && tree_int_cst_lt (niter,
633 build_int_cstu (NULL_TREE,
634 REG_BR_PROB_BASE - 1)))
635 {
636 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
637 probability = (REG_BR_PROB_BASE + nitercst / 2) / nitercst;
638 }
639 else
640 probability = 1;
641
642 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
643 }
644 }
645
646 free (exits);
647 }
648
649 bbs = get_loop_body (loop);
650
651 for (j = 0; j < loop->num_nodes; j++)
652 {
653 int header_found = 0;
654 edge e;
655
656 bb = bbs[j];
657
658 /* Bypass loop heuristics on continue statement. These
659 statements construct loops via "non-loop" constructs
660 in the source language and are better to be handled
661 separately. */
662 if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
663 || predicted_by_p (bb, PRED_CONTINUE))
664 continue;
665
666 /* Loop branch heuristics - predict an edge back to a
667 loop's head as taken. */
668 for (e = bb->succ; e; e = e->succ_next)
669 if (e->dest == loop->header
670 && e->src == loop->latch)
671 {
672 header_found = 1;
673 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
674 }
675
676 /* Loop exit heuristics - predict an edge exiting the loop if the
677 conditional has no loop header successors as not taken. */
678 if (!header_found)
679 for (e = bb->succ; e; e = e->succ_next)
680 if (e->dest->index < 0
681 || !flow_bb_inside_loop_p (loop, e->dest))
682 predict_edge
683 (e, PRED_LOOP_EXIT,
684 (REG_BR_PROB_BASE
685 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
686 / exits);
687 }
688
689 /* Free basic blocks from get_loop_body. */
690 free (bbs);
691 }
692
693 if (!rtlsimpleloops)
694 scev_reset ();
695 }
696
697 /* Attempt to predict probabilities of BB outgoing edges using local
698 properties. */
699 static void
700 bb_estimate_probability_locally (basic_block bb)
701 {
702 rtx last_insn = BB_END (bb);
703 rtx cond;
704
705 if (! can_predict_insn_p (last_insn))
706 return;
707 cond = get_condition (last_insn, NULL, false, false);
708 if (! cond)
709 return;
710
711 /* Try "pointer heuristic."
712 A comparison ptr == 0 is predicted as false.
713 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
714 if (COMPARISON_P (cond)
715 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
716 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
717 {
718 if (GET_CODE (cond) == EQ)
719 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
720 else if (GET_CODE (cond) == NE)
721 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
722 }
723 else
724
725 /* Try "opcode heuristic."
726 EQ tests are usually false and NE tests are usually true. Also,
727 most quantities are positive, so we can make the appropriate guesses
728 about signed comparisons against zero. */
729 switch (GET_CODE (cond))
730 {
731 case CONST_INT:
732 /* Unconditional branch. */
733 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
734 cond == const0_rtx ? NOT_TAKEN : TAKEN);
735 break;
736
737 case EQ:
738 case UNEQ:
739 /* Floating point comparisons appears to behave in a very
740 unpredictable way because of special role of = tests in
741 FP code. */
742 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
743 ;
744 /* Comparisons with 0 are often used for booleans and there is
745 nothing useful to predict about them. */
746 else if (XEXP (cond, 1) == const0_rtx
747 || XEXP (cond, 0) == const0_rtx)
748 ;
749 else
750 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
751 break;
752
753 case NE:
754 case LTGT:
755 /* Floating point comparisons appears to behave in a very
756 unpredictable way because of special role of = tests in
757 FP code. */
758 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
759 ;
760 /* Comparisons with 0 are often used for booleans and there is
761 nothing useful to predict about them. */
762 else if (XEXP (cond, 1) == const0_rtx
763 || XEXP (cond, 0) == const0_rtx)
764 ;
765 else
766 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
767 break;
768
769 case ORDERED:
770 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
771 break;
772
773 case UNORDERED:
774 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
775 break;
776
777 case LE:
778 case LT:
779 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
780 || XEXP (cond, 1) == constm1_rtx)
781 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
782 break;
783
784 case GE:
785 case GT:
786 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
787 || XEXP (cond, 1) == constm1_rtx)
788 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
789 break;
790
791 default:
792 break;
793 }
794 }
795
796 /* Statically estimate the probability that a branch will be taken and produce
797 estimated profile. When profile feedback is present never executed portions
798 of function gets estimated. */
799
800 void
801 estimate_probability (struct loops *loops_info)
802 {
803 basic_block bb;
804
805 connect_infinite_loops_to_exit ();
806 calculate_dominance_info (CDI_DOMINATORS);
807 calculate_dominance_info (CDI_POST_DOMINATORS);
808
809 predict_loops (loops_info, true);
810
811 iv_analysis_done ();
812
813 /* Attempt to predict conditional jumps using a number of heuristics. */
814 FOR_EACH_BB (bb)
815 {
816 rtx last_insn = BB_END (bb);
817 edge e;
818
819 if (! can_predict_insn_p (last_insn))
820 continue;
821
822 for (e = bb->succ; e; e = e->succ_next)
823 {
824 /* Predict early returns to be probable, as we've already taken
825 care for error returns and other are often used for fast paths
826 trought function. */
827 if ((e->dest == EXIT_BLOCK_PTR
828 || (e->dest->succ && !e->dest->succ->succ_next
829 && e->dest->succ->dest == EXIT_BLOCK_PTR))
830 && !predicted_by_p (bb, PRED_NULL_RETURN)
831 && !predicted_by_p (bb, PRED_CONST_RETURN)
832 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
833 && !last_basic_block_p (e->dest))
834 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
835
836 /* Look for block we are guarding (i.e. we dominate it,
837 but it doesn't postdominate us). */
838 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
839 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
840 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
841 {
842 rtx insn;
843
844 /* The call heuristic claims that a guarded function call
845 is improbable. This is because such calls are often used
846 to signal exceptional situations such as printing error
847 messages. */
848 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
849 insn = NEXT_INSN (insn))
850 if (CALL_P (insn)
851 /* Constant and pure calls are hardly used to signalize
852 something exceptional. */
853 && ! CONST_OR_PURE_CALL_P (insn))
854 {
855 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
856 break;
857 }
858 }
859 }
860 bb_estimate_probability_locally (bb);
861 }
862
863 /* Attach the combined probability to each conditional jump. */
864 FOR_EACH_BB (bb)
865 combine_predictions_for_insn (BB_END (bb), bb);
866
867 remove_fake_edges ();
868 estimate_bb_frequencies (loops_info);
869 free_dominance_info (CDI_POST_DOMINATORS);
870 if (profile_status == PROFILE_ABSENT)
871 profile_status = PROFILE_GUESSED;
872 }
873
874 /* Set edge->probability for each successor edge of BB. */
875 void
876 guess_outgoing_edge_probabilities (basic_block bb)
877 {
878 bb_estimate_probability_locally (bb);
879 combine_predictions_for_insn (BB_END (bb), bb);
880 }
881 \f
882 /* Return constant EXPR will likely have at execution time, NULL if unknown.
883 The function is used by builtin_expect branch predictor so the evidence
884 must come from this construct and additional possible constant folding.
885
886 We may want to implement more involved value guess (such as value range
887 propagation based prediction), but such tricks shall go to new
888 implementation. */
889
890 static tree
891 expr_expected_value (tree expr, bitmap visited)
892 {
893 if (TREE_CONSTANT (expr))
894 return expr;
895 else if (TREE_CODE (expr) == SSA_NAME)
896 {
897 tree def = SSA_NAME_DEF_STMT (expr);
898
899 /* If we were already here, break the infinite cycle. */
900 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
901 return NULL;
902 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
903
904 if (TREE_CODE (def) == PHI_NODE)
905 {
906 /* All the arguments of the PHI node must have the same constant
907 length. */
908 int i;
909 tree val = NULL, new_val;
910
911 for (i = 0; i < PHI_NUM_ARGS (def); i++)
912 {
913 tree arg = PHI_ARG_DEF (def, i);
914
915 /* If this PHI has itself as an argument, we cannot
916 determine the string length of this argument. However,
917 if we can find a expected constant value for the other
918 PHI args then we can still be sure that this is
919 likely a constant. So be optimistic and just
920 continue with the next argument. */
921 if (arg == PHI_RESULT (def))
922 continue;
923
924 new_val = expr_expected_value (arg, visited);
925 if (!new_val)
926 return NULL;
927 if (!val)
928 val = new_val;
929 else if (!operand_equal_p (val, new_val, false))
930 return NULL;
931 }
932 return val;
933 }
934 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
935 return NULL;
936 return expr_expected_value (TREE_OPERAND (def, 1), visited);
937 }
938 else if (TREE_CODE (expr) == CALL_EXPR)
939 {
940 tree decl = get_callee_fndecl (expr);
941 if (!decl)
942 return NULL;
943 if (DECL_BUILT_IN (decl) && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
944 {
945 tree arglist = TREE_OPERAND (expr, 1);
946 tree val;
947
948 if (arglist == NULL_TREE
949 || TREE_CHAIN (arglist) == NULL_TREE)
950 return NULL;
951 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
952 if (TREE_CONSTANT (val))
953 return val;
954 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
955 }
956 }
957 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
958 {
959 tree op0, op1, res;
960 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
961 if (!op0)
962 return NULL;
963 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
964 if (!op1)
965 return NULL;
966 res = fold (build (TREE_CODE (expr), TREE_TYPE (expr), op0, op1));
967 if (TREE_CONSTANT (res))
968 return res;
969 return NULL;
970 }
971 if (UNARY_CLASS_P (expr))
972 {
973 tree op0, res;
974 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
975 if (!op0)
976 return NULL;
977 res = fold (build1 (TREE_CODE (expr), TREE_TYPE (expr), op0));
978 if (TREE_CONSTANT (res))
979 return res;
980 return NULL;
981 }
982 return NULL;
983 }
984 \f
985 /* Get rid of all builtin_expect calls we no longer need. */
986 static void
987 strip_builtin_expect (void)
988 {
989 basic_block bb;
990 FOR_EACH_BB (bb)
991 {
992 block_stmt_iterator bi;
993 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
994 {
995 tree stmt = bsi_stmt (bi);
996 tree fndecl;
997 tree arglist;
998
999 if (TREE_CODE (stmt) == MODIFY_EXPR
1000 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
1001 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
1002 && DECL_BUILT_IN (fndecl)
1003 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1004 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
1005 && TREE_CHAIN (arglist))
1006 {
1007 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
1008 modify_stmt (stmt);
1009 }
1010 }
1011 }
1012 }
1013 \f
1014 /* Predict using opcode of the last statement in basic block. */
1015 static void
1016 tree_predict_by_opcode (basic_block bb)
1017 {
1018 tree stmt = last_stmt (bb);
1019 edge then_edge;
1020 tree cond;
1021 tree op0;
1022 tree type;
1023 tree val;
1024 bitmap visited;
1025
1026 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1027 return;
1028 for (then_edge = bb->succ; then_edge; then_edge = then_edge->succ_next)
1029 if (then_edge->flags & EDGE_TRUE_VALUE)
1030 break;
1031 cond = TREE_OPERAND (stmt, 0);
1032 if (!COMPARISON_CLASS_P (cond))
1033 return;
1034 op0 = TREE_OPERAND (cond, 0);
1035 type = TREE_TYPE (op0);
1036 visited = BITMAP_XMALLOC ();
1037 val = expr_expected_value (cond, visited);
1038 BITMAP_XFREE (visited);
1039 if (val)
1040 {
1041 if (integer_zerop (val))
1042 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1043 else
1044 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1045 return;
1046 }
1047 /* Try "pointer heuristic."
1048 A comparison ptr == 0 is predicted as false.
1049 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1050 if (POINTER_TYPE_P (type))
1051 {
1052 if (TREE_CODE (cond) == EQ_EXPR)
1053 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1054 else if (TREE_CODE (cond) == NE_EXPR)
1055 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1056 }
1057 else
1058
1059 /* Try "opcode heuristic."
1060 EQ tests are usually false and NE tests are usually true. Also,
1061 most quantities are positive, so we can make the appropriate guesses
1062 about signed comparisons against zero. */
1063 switch (TREE_CODE (cond))
1064 {
1065 case EQ_EXPR:
1066 case UNEQ_EXPR:
1067 /* Floating point comparisons appears to behave in a very
1068 unpredictable way because of special role of = tests in
1069 FP code. */
1070 if (FLOAT_TYPE_P (type))
1071 ;
1072 /* Comparisons with 0 are often used for booleans and there is
1073 nothing useful to predict about them. */
1074 else if (integer_zerop (op0)
1075 || integer_zerop (TREE_OPERAND (cond, 1)))
1076 ;
1077 else
1078 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1079 break;
1080
1081 case NE_EXPR:
1082 case LTGT_EXPR:
1083 /* Floating point comparisons appears to behave in a very
1084 unpredictable way because of special role of = tests in
1085 FP code. */
1086 if (FLOAT_TYPE_P (type))
1087 ;
1088 /* Comparisons with 0 are often used for booleans and there is
1089 nothing useful to predict about them. */
1090 else if (integer_zerop (op0)
1091 || integer_zerop (TREE_OPERAND (cond, 1)))
1092 ;
1093 else
1094 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1095 break;
1096
1097 case ORDERED_EXPR:
1098 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1099 break;
1100
1101 case UNORDERED_EXPR:
1102 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1103 break;
1104
1105 case LE_EXPR:
1106 case LT_EXPR:
1107 if (integer_zerop (TREE_OPERAND (cond, 1))
1108 || integer_onep (TREE_OPERAND (cond, 1))
1109 || integer_all_onesp (TREE_OPERAND (cond, 1))
1110 || real_zerop (TREE_OPERAND (cond, 1))
1111 || real_onep (TREE_OPERAND (cond, 1))
1112 || real_minus_onep (TREE_OPERAND (cond, 1)))
1113 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1114 break;
1115
1116 case GE_EXPR:
1117 case GT_EXPR:
1118 if (integer_zerop (TREE_OPERAND (cond, 1))
1119 || integer_onep (TREE_OPERAND (cond, 1))
1120 || integer_all_onesp (TREE_OPERAND (cond, 1))
1121 || real_zerop (TREE_OPERAND (cond, 1))
1122 || real_onep (TREE_OPERAND (cond, 1))
1123 || real_minus_onep (TREE_OPERAND (cond, 1)))
1124 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1125 break;
1126
1127 default:
1128 break;
1129 }
1130 }
1131
1132 /* Try to guess whether the value of return means error code. */
1133 static enum br_predictor
1134 return_prediction (tree val, enum prediction *prediction)
1135 {
1136 /* VOID. */
1137 if (!val)
1138 return PRED_NO_PREDICTION;
1139 /* Different heuristics for pointers and scalars. */
1140 if (POINTER_TYPE_P (TREE_TYPE (val)))
1141 {
1142 /* NULL is usually not returned. */
1143 if (integer_zerop (val))
1144 {
1145 *prediction = NOT_TAKEN;
1146 return PRED_NULL_RETURN;
1147 }
1148 }
1149 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1150 {
1151 /* Negative return values are often used to indicate
1152 errors. */
1153 if (TREE_CODE (val) == INTEGER_CST
1154 && tree_int_cst_sgn (val) < 0)
1155 {
1156 *prediction = NOT_TAKEN;
1157 return PRED_NEGATIVE_RETURN;
1158 }
1159 /* Constant return values seems to be commonly taken.
1160 Zero/one often represent booleans so exclude them from the
1161 heuristics. */
1162 if (TREE_CONSTANT (val)
1163 && (!integer_zerop (val) && !integer_onep (val)))
1164 {
1165 *prediction = TAKEN;
1166 return PRED_NEGATIVE_RETURN;
1167 }
1168 }
1169 return PRED_NO_PREDICTION;
1170 }
1171
1172 /* Find the basic block with return expression and look up for possible
1173 return value trying to apply RETURN_PREDICTION heuristics. */
1174 static void
1175 apply_return_prediction (int *heads)
1176 {
1177 tree return_stmt;
1178 tree return_val;
1179 edge e;
1180 tree phi;
1181 int phi_num_args, i;
1182 enum br_predictor pred;
1183 enum prediction direction;
1184
1185 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
1186 {
1187 return_stmt = last_stmt (e->src);
1188 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1189 break;
1190 }
1191 if (!e)
1192 return;
1193 return_val = TREE_OPERAND (return_stmt, 0);
1194 if (!return_val)
1195 return;
1196 if (TREE_CODE (return_val) == MODIFY_EXPR)
1197 return_val = TREE_OPERAND (return_val, 1);
1198 if (TREE_CODE (return_val) != SSA_NAME
1199 || !SSA_NAME_DEF_STMT (return_val)
1200 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1201 return;
1202 phi = SSA_NAME_DEF_STMT (return_val);
1203 while (phi)
1204 {
1205 tree next = PHI_CHAIN (phi);
1206 if (PHI_RESULT (phi) == return_val)
1207 break;
1208 phi = next;
1209 }
1210 if (!phi)
1211 return;
1212 phi_num_args = PHI_NUM_ARGS (phi);
1213 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1214
1215 /* Avoid the degenerate case where all return values form the function
1216 belongs to same category (ie they are all positive constants)
1217 so we can hardly say something about them. */
1218 for (i = 1; i < phi_num_args; i++)
1219 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1220 break;
1221 if (i != phi_num_args)
1222 for (i = 0; i < phi_num_args; i++)
1223 {
1224 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1225 if (pred != PRED_NO_PREDICTION)
1226 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1227 direction);
1228 }
1229 }
1230
1231 /* Look for basic block that contains unlikely to happen events
1232 (such as noreturn calls) and mark all paths leading to execution
1233 of this basic blocks as unlikely. */
1234
1235 static void
1236 tree_bb_level_predictions (void)
1237 {
1238 basic_block bb;
1239 int *heads;
1240
1241 heads = xmalloc (sizeof (int) * last_basic_block);
1242 memset (heads, -1, sizeof (int) * last_basic_block);
1243 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1244
1245 apply_return_prediction (heads);
1246
1247 FOR_EACH_BB (bb)
1248 {
1249 block_stmt_iterator bsi = bsi_last (bb);
1250
1251 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1252 {
1253 tree stmt = bsi_stmt (bsi);
1254 switch (TREE_CODE (stmt))
1255 {
1256 case MODIFY_EXPR:
1257 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1258 {
1259 stmt = TREE_OPERAND (stmt, 1);
1260 goto call_expr;
1261 }
1262 break;
1263 case CALL_EXPR:
1264 call_expr:;
1265 if (call_expr_flags (stmt) & ECF_NORETURN)
1266 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1267 NOT_TAKEN);
1268 break;
1269 default:
1270 break;
1271 }
1272 }
1273 }
1274
1275 free (heads);
1276 }
1277
1278 /* Predict branch probabilities and estimate profile of the tree CFG. */
1279 static void
1280 tree_estimate_probability (void)
1281 {
1282 basic_block bb;
1283 struct loops loops_info;
1284
1285 flow_loops_find (&loops_info, LOOP_TREE);
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1287 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1288
1289 add_noreturn_fake_exit_edges ();
1290 connect_infinite_loops_to_exit ();
1291 calculate_dominance_info (CDI_DOMINATORS);
1292 calculate_dominance_info (CDI_POST_DOMINATORS);
1293
1294 tree_bb_level_predictions ();
1295
1296 predict_loops (&loops_info, false);
1297
1298 FOR_EACH_BB (bb)
1299 {
1300 edge e;
1301
1302 for (e = bb->succ; e; e = e->succ_next)
1303 {
1304 /* Predict early returns to be probable, as we've already taken
1305 care for error returns and other cases are often used for
1306 fast paths trought function. */
1307 if (e->dest == EXIT_BLOCK_PTR
1308 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1309 && bb->pred && bb->pred->pred_next)
1310 {
1311 edge e1;
1312
1313 for (e1 = bb->pred; e1; e1 = e1->pred_next)
1314 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1315 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1316 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1317 && !last_basic_block_p (e1->src))
1318 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1319 }
1320
1321 /* Look for block we are guarding (ie we dominate it,
1322 but it doesn't postdominate us). */
1323 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1324 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1325 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1326 {
1327 block_stmt_iterator bi;
1328
1329 /* The call heuristic claims that a guarded function call
1330 is improbable. This is because such calls are often used
1331 to signal exceptional situations such as printing error
1332 messages. */
1333 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1334 bsi_next (&bi))
1335 {
1336 tree stmt = bsi_stmt (bi);
1337 if ((TREE_CODE (stmt) == CALL_EXPR
1338 || (TREE_CODE (stmt) == MODIFY_EXPR
1339 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1340 /* Constant and pure calls are hardly used to signalize
1341 something exceptional. */
1342 && TREE_SIDE_EFFECTS (stmt))
1343 {
1344 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1345 break;
1346 }
1347 }
1348 }
1349 }
1350 tree_predict_by_opcode (bb);
1351 }
1352 FOR_EACH_BB (bb)
1353 combine_predictions_for_bb (dump_file, bb);
1354
1355 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1356 strip_builtin_expect ();
1357 estimate_bb_frequencies (&loops_info);
1358 free_dominance_info (CDI_POST_DOMINATORS);
1359 remove_fake_exit_edges ();
1360 flow_loops_free (&loops_info);
1361 if (dump_file && (dump_flags & TDF_DETAILS))
1362 dump_tree_cfg (dump_file, dump_flags);
1363 if (profile_status == PROFILE_ABSENT)
1364 profile_status = PROFILE_GUESSED;
1365 }
1366 \f
1367 /* __builtin_expect dropped tokens into the insn stream describing expected
1368 values of registers. Generate branch probabilities based off these
1369 values. */
1370
1371 void
1372 expected_value_to_br_prob (void)
1373 {
1374 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
1375
1376 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1377 {
1378 switch (GET_CODE (insn))
1379 {
1380 case NOTE:
1381 /* Look for expected value notes. */
1382 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1383 {
1384 ev = NOTE_EXPECTED_VALUE (insn);
1385 ev_reg = XEXP (ev, 0);
1386 delete_insn (insn);
1387 }
1388 continue;
1389
1390 case CODE_LABEL:
1391 /* Never propagate across labels. */
1392 ev = NULL_RTX;
1393 continue;
1394
1395 case JUMP_INSN:
1396 /* Look for simple conditional branches. If we haven't got an
1397 expected value yet, no point going further. */
1398 if (!JUMP_P (insn) || ev == NULL_RTX
1399 || ! any_condjump_p (insn))
1400 continue;
1401 break;
1402
1403 default:
1404 /* Look for insns that clobber the EV register. */
1405 if (ev && reg_set_p (ev_reg, insn))
1406 ev = NULL_RTX;
1407 continue;
1408 }
1409
1410 /* Collect the branch condition, hopefully relative to EV_REG. */
1411 /* ??? At present we'll miss things like
1412 (expected_value (eq r70 0))
1413 (set r71 -1)
1414 (set r80 (lt r70 r71))
1415 (set pc (if_then_else (ne r80 0) ...))
1416 as canonicalize_condition will render this to us as
1417 (lt r70, r71)
1418 Could use cselib to try and reduce this further. */
1419 cond = XEXP (SET_SRC (pc_set (insn)), 0);
1420 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1421 false, false);
1422 if (! cond || XEXP (cond, 0) != ev_reg
1423 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
1424 continue;
1425
1426 /* Substitute and simplify. Given that the expression we're
1427 building involves two constants, we should wind up with either
1428 true or false. */
1429 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1430 XEXP (ev, 1), XEXP (cond, 1));
1431 cond = simplify_rtx (cond);
1432
1433 /* Turn the condition into a scaled branch probability. */
1434 if (cond != const_true_rtx && cond != const0_rtx)
1435 abort ();
1436 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1437 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
1438 }
1439 }
1440 \f
1441 /* Check whether this is the last basic block of function. Commonly
1442 there is one extra common cleanup block. */
1443 static bool
1444 last_basic_block_p (basic_block bb)
1445 {
1446 if (bb == EXIT_BLOCK_PTR)
1447 return false;
1448
1449 return (bb->next_bb == EXIT_BLOCK_PTR
1450 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1451 && bb->succ && !bb->succ->succ_next
1452 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
1453 }
1454
1455 /* Sets branch probabilities according to PREDiction and
1456 FLAGS. HEADS[bb->index] should be index of basic block in that we
1457 need to alter branch predictions (i.e. the first of our dominators
1458 such that we do not post-dominate it) (but we fill this information
1459 on demand, so -1 may be there in case this was not needed yet). */
1460
1461 static void
1462 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1463 enum prediction taken)
1464 {
1465 edge e;
1466 int y;
1467
1468 if (heads[bb->index] < 0)
1469 {
1470 /* This is first time we need this field in heads array; so
1471 find first dominator that we do not post-dominate (we are
1472 using already known members of heads array). */
1473 basic_block ai = bb;
1474 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1475 int head;
1476
1477 while (heads[next_ai->index] < 0)
1478 {
1479 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1480 break;
1481 heads[next_ai->index] = ai->index;
1482 ai = next_ai;
1483 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1484 }
1485 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1486 head = next_ai->index;
1487 else
1488 head = heads[next_ai->index];
1489 while (next_ai != bb)
1490 {
1491 next_ai = ai;
1492 if (heads[ai->index] == ENTRY_BLOCK)
1493 ai = ENTRY_BLOCK_PTR;
1494 else
1495 ai = BASIC_BLOCK (heads[ai->index]);
1496 heads[next_ai->index] = head;
1497 }
1498 }
1499 y = heads[bb->index];
1500
1501 /* Now find the edge that leads to our branch and aply the prediction. */
1502
1503 if (y == last_basic_block)
1504 return;
1505 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
1506 if (e->dest->index >= 0
1507 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1508 predict_edge_def (e, pred, taken);
1509 }
1510 \f
1511 /* This is used to carry information about basic blocks. It is
1512 attached to the AUX field of the standard CFG block. */
1513
1514 typedef struct block_info_def
1515 {
1516 /* Estimated frequency of execution of basic_block. */
1517 sreal frequency;
1518
1519 /* To keep queue of basic blocks to process. */
1520 basic_block next;
1521
1522 /* True if block needs to be visited in propagate_freq. */
1523 unsigned int tovisit:1;
1524
1525 /* Number of predecessors we need to visit first. */
1526 int npredecessors;
1527 } *block_info;
1528
1529 /* Similar information for edges. */
1530 typedef struct edge_info_def
1531 {
1532 /* In case edge is an loopback edge, the probability edge will be reached
1533 in case header is. Estimated number of iterations of the loop can be
1534 then computed as 1 / (1 - back_edge_prob). */
1535 sreal back_edge_prob;
1536 /* True if the edge is an loopback edge in the natural loop. */
1537 unsigned int back_edge:1;
1538 } *edge_info;
1539
1540 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1541 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1542
1543 /* Helper function for estimate_bb_frequencies.
1544 Propagate the frequencies for LOOP. */
1545
1546 static void
1547 propagate_freq (struct loop *loop)
1548 {
1549 basic_block head = loop->header;
1550 basic_block bb;
1551 basic_block last;
1552 edge e;
1553 basic_block nextbb;
1554
1555 /* For each basic block we need to visit count number of his predecessors
1556 we need to visit first. */
1557 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1558 {
1559 if (BLOCK_INFO (bb)->tovisit)
1560 {
1561 int count = 0;
1562
1563 for (e = bb->pred; e; e = e->pred_next)
1564 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
1565 count++;
1566 else if (BLOCK_INFO (e->src)->tovisit
1567 && dump_file && !EDGE_INFO (e)->back_edge)
1568 fprintf (dump_file,
1569 "Irreducible region hit, ignoring edge to %i->%i\n",
1570 e->src->index, bb->index);
1571 BLOCK_INFO (bb)->npredecessors = count;
1572 }
1573 }
1574
1575 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1576 last = head;
1577 for (bb = head; bb; bb = nextbb)
1578 {
1579 sreal cyclic_probability, frequency;
1580
1581 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1582 memcpy (&frequency, &real_zero, sizeof (real_zero));
1583
1584 nextbb = BLOCK_INFO (bb)->next;
1585 BLOCK_INFO (bb)->next = NULL;
1586
1587 /* Compute frequency of basic block. */
1588 if (bb != head)
1589 {
1590 #ifdef ENABLE_CHECKING
1591 for (e = bb->pred; e; e = e->pred_next)
1592 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
1593 abort ();
1594 #endif
1595
1596 for (e = bb->pred; e; e = e->pred_next)
1597 if (EDGE_INFO (e)->back_edge)
1598 {
1599 sreal_add (&cyclic_probability, &cyclic_probability,
1600 &EDGE_INFO (e)->back_edge_prob);
1601 }
1602 else if (!(e->flags & EDGE_DFS_BACK))
1603 {
1604 sreal tmp;
1605
1606 /* frequency += (e->probability
1607 * BLOCK_INFO (e->src)->frequency /
1608 REG_BR_PROB_BASE); */
1609
1610 sreal_init (&tmp, e->probability, 0);
1611 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1612 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1613 sreal_add (&frequency, &frequency, &tmp);
1614 }
1615
1616 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1617 {
1618 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1619 sizeof (frequency));
1620 }
1621 else
1622 {
1623 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1624 {
1625 memcpy (&cyclic_probability, &real_almost_one,
1626 sizeof (real_almost_one));
1627 }
1628
1629 /* BLOCK_INFO (bb)->frequency = frequency
1630 / (1 - cyclic_probability) */
1631
1632 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1633 sreal_div (&BLOCK_INFO (bb)->frequency,
1634 &frequency, &cyclic_probability);
1635 }
1636 }
1637
1638 BLOCK_INFO (bb)->tovisit = 0;
1639
1640 /* Compute back edge frequencies. */
1641 for (e = bb->succ; e; e = e->succ_next)
1642 if (e->dest == head)
1643 {
1644 sreal tmp;
1645
1646 /* EDGE_INFO (e)->back_edge_prob
1647 = ((e->probability * BLOCK_INFO (bb)->frequency)
1648 / REG_BR_PROB_BASE); */
1649
1650 sreal_init (&tmp, e->probability, 0);
1651 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1652 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1653 &tmp, &real_inv_br_prob_base);
1654 }
1655
1656 /* Propagate to successor blocks. */
1657 for (e = bb->succ; e; e = e->succ_next)
1658 if (!(e->flags & EDGE_DFS_BACK)
1659 && BLOCK_INFO (e->dest)->npredecessors)
1660 {
1661 BLOCK_INFO (e->dest)->npredecessors--;
1662 if (!BLOCK_INFO (e->dest)->npredecessors)
1663 {
1664 if (!nextbb)
1665 nextbb = e->dest;
1666 else
1667 BLOCK_INFO (last)->next = e->dest;
1668
1669 last = e->dest;
1670 }
1671 }
1672 }
1673 }
1674
1675 /* Estimate probabilities of loopback edges in loops at same nest level. */
1676
1677 static void
1678 estimate_loops_at_level (struct loop *first_loop)
1679 {
1680 struct loop *loop;
1681
1682 for (loop = first_loop; loop; loop = loop->next)
1683 {
1684 edge e;
1685 basic_block *bbs;
1686 unsigned i;
1687
1688 estimate_loops_at_level (loop->inner);
1689
1690 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1691 {
1692 /* Find current loop back edge and mark it. */
1693 e = loop_latch_edge (loop);
1694 EDGE_INFO (e)->back_edge = 1;
1695 }
1696
1697 bbs = get_loop_body (loop);
1698 for (i = 0; i < loop->num_nodes; i++)
1699 BLOCK_INFO (bbs[i])->tovisit = 1;
1700 free (bbs);
1701 propagate_freq (loop);
1702 }
1703 }
1704
1705 /* Convert counts measured by profile driven feedback to frequencies.
1706 Return nonzero iff there was any nonzero execution count. */
1707
1708 static int
1709 counts_to_freqs (void)
1710 {
1711 gcov_type count_max, true_count_max = 0;
1712 basic_block bb;
1713
1714 FOR_EACH_BB (bb)
1715 true_count_max = MAX (bb->count, true_count_max);
1716
1717 count_max = MAX (true_count_max, 1);
1718 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1719 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1720 return true_count_max;
1721 }
1722
1723 /* Return true if function is likely to be expensive, so there is no point to
1724 optimize performance of prologue, epilogue or do inlining at the expense
1725 of code size growth. THRESHOLD is the limit of number of instructions
1726 function can execute at average to be still considered not expensive. */
1727
1728 bool
1729 expensive_function_p (int threshold)
1730 {
1731 unsigned int sum = 0;
1732 basic_block bb;
1733 unsigned int limit;
1734
1735 /* We can not compute accurately for large thresholds due to scaled
1736 frequencies. */
1737 if (threshold > BB_FREQ_MAX)
1738 abort ();
1739
1740 /* Frequencies are out of range. This either means that function contains
1741 internal loop executing more than BB_FREQ_MAX times or profile feedback
1742 is available and function has not been executed at all. */
1743 if (ENTRY_BLOCK_PTR->frequency == 0)
1744 return true;
1745
1746 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1747 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1748 FOR_EACH_BB (bb)
1749 {
1750 rtx insn;
1751
1752 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1753 insn = NEXT_INSN (insn))
1754 if (active_insn_p (insn))
1755 {
1756 sum += bb->frequency;
1757 if (sum > limit)
1758 return true;
1759 }
1760 }
1761
1762 return false;
1763 }
1764
1765 /* Estimate basic blocks frequency by given branch probabilities. */
1766
1767 static void
1768 estimate_bb_frequencies (struct loops *loops)
1769 {
1770 basic_block bb;
1771 sreal freq_max;
1772
1773 if (!flag_branch_probabilities || !counts_to_freqs ())
1774 {
1775 static int real_values_initialized = 0;
1776
1777 if (!real_values_initialized)
1778 {
1779 real_values_initialized = 1;
1780 sreal_init (&real_zero, 0, 0);
1781 sreal_init (&real_one, 1, 0);
1782 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1783 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1784 sreal_init (&real_one_half, 1, -1);
1785 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1786 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1787 }
1788
1789 mark_dfs_back_edges ();
1790
1791 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1792
1793 /* Set up block info for each basic block. */
1794 alloc_aux_for_blocks (sizeof (struct block_info_def));
1795 alloc_aux_for_edges (sizeof (struct edge_info_def));
1796 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1797 {
1798 edge e;
1799
1800 BLOCK_INFO (bb)->tovisit = 0;
1801 for (e = bb->succ; e; e = e->succ_next)
1802 {
1803 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1804 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1805 &EDGE_INFO (e)->back_edge_prob,
1806 &real_inv_br_prob_base);
1807 }
1808 }
1809
1810 /* First compute probabilities locally for each loop from innermost
1811 to outermost to examine probabilities for back edges. */
1812 estimate_loops_at_level (loops->tree_root);
1813
1814 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1815 FOR_EACH_BB (bb)
1816 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1817 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1818
1819 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1820 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1821 {
1822 sreal tmp;
1823
1824 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1825 sreal_add (&tmp, &tmp, &real_one_half);
1826 bb->frequency = sreal_to_int (&tmp);
1827 }
1828
1829 free_aux_for_blocks ();
1830 free_aux_for_edges ();
1831 }
1832 compute_function_frequency ();
1833 if (flag_reorder_functions)
1834 choose_function_section ();
1835 }
1836
1837 /* Decide whether function is hot, cold or unlikely executed. */
1838 static void
1839 compute_function_frequency (void)
1840 {
1841 basic_block bb;
1842
1843 if (!profile_info || !flag_branch_probabilities)
1844 return;
1845 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1846 FOR_EACH_BB (bb)
1847 {
1848 if (maybe_hot_bb_p (bb))
1849 {
1850 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1851 return;
1852 }
1853 if (!probably_never_executed_bb_p (bb))
1854 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1855 }
1856 }
1857
1858 /* Choose appropriate section for the function. */
1859 static void
1860 choose_function_section (void)
1861 {
1862 if (DECL_SECTION_NAME (current_function_decl)
1863 || !targetm.have_named_sections
1864 /* Theoretically we can split the gnu.linkonce text section too,
1865 but this requires more work as the frequency needs to match
1866 for all generated objects so we need to merge the frequency
1867 of all instances. For now just never set frequency for these. */
1868 || DECL_ONE_ONLY (current_function_decl))
1869 return;
1870
1871 /* If we are doing the partitioning optimization, let the optimization
1872 choose the correct section into which to put things. */
1873
1874 if (flag_reorder_blocks_and_partition)
1875 return;
1876
1877 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1878 DECL_SECTION_NAME (current_function_decl) =
1879 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1880 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1881 DECL_SECTION_NAME (current_function_decl) =
1882 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1883 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1884 }
1885
1886
1887 struct tree_opt_pass pass_profile =
1888 {
1889 "profile", /* name */
1890 NULL, /* gate */
1891 tree_estimate_probability, /* execute */
1892 NULL, /* sub */
1893 NULL, /* next */
1894 0, /* static_pass_number */
1895 TV_BRANCH_PROB, /* tv_id */
1896 PROP_cfg, /* properties_required */
1897 0, /* properties_provided */
1898 0, /* properties_destroyed */
1899 0, /* todo_flags_start */
1900 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1901 0 /* letter */
1902 };