basic-block.h (remove_predictions_associated_with_edge): Declare.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void estimate_loops_at_level (struct loop *, bitmap);
78 static void propagate_freq (struct loop *, bitmap);
79 static void estimate_bb_frequencies (struct loops *);
80 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
81 static bool last_basic_block_p (basic_block);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx);
85
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
88
89 struct predictor_info
90 {
91 const char *const name; /* Name used in the debugging dumps. */
92 const int hitrate; /* Expected hitrate used by
93 predict_insn_def call. */
94 const int flags;
95 };
96
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
100
101 /* Recompute hitrate in percent to our representation. */
102
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info[]= {
107 #include "predict.def"
108
109 /* Upper bound on predictors. */
110 {NULL, 0, 0}
111 };
112 #undef DEF_PREDICTOR
113
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
116
117 bool
118 maybe_hot_bb_p (basic_block bb)
119 {
120 if (profile_info && flag_branch_probabilities
121 && (bb->count
122 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
123 return false;
124 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
125 return false;
126 return true;
127 }
128
129 /* Return true in case BB is cold and should be optimized for size. */
130
131 bool
132 probably_cold_bb_p (basic_block bb)
133 {
134 if (profile_info && flag_branch_probabilities
135 && (bb->count
136 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
137 return true;
138 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
139 return true;
140 return false;
141 }
142
143 /* Return true in case BB is probably never executed. */
144 bool
145 probably_never_executed_bb_p (basic_block bb)
146 {
147 if (profile_info && flag_branch_probabilities)
148 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 return false;
150 }
151
152 /* Return true if the one of outgoing edges is already predicted by
153 PREDICTOR. */
154
155 bool
156 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
157 {
158 rtx note;
159 if (!INSN_P (BB_END (bb)))
160 return false;
161 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
164 return true;
165 return false;
166 }
167
168 /* Return true if the one of outgoing edges is already predicted by
169 PREDICTOR. */
170
171 bool
172 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
173 {
174 struct edge_prediction *i;
175 for (i = bb->predictions; i; i = i->next)
176 if (i->predictor == predictor)
177 return true;
178 return false;
179 }
180
181 static void
182 predict_insn (rtx insn, enum br_predictor predictor, int probability)
183 {
184 gcc_assert (any_condjump_p (insn));
185 if (!flag_guess_branch_prob)
186 return;
187
188 REG_NOTES (insn)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED,
190 gen_rtx_CONCAT (VOIDmode,
191 GEN_INT ((int) predictor),
192 GEN_INT ((int) probability)),
193 REG_NOTES (insn));
194 }
195
196 /* Predict insn by given predictor. */
197
198 void
199 predict_insn_def (rtx insn, enum br_predictor predictor,
200 enum prediction taken)
201 {
202 int probability = predictor_info[(int) predictor].hitrate;
203
204 if (taken != TAKEN)
205 probability = REG_BR_PROB_BASE - probability;
206
207 predict_insn (insn, predictor, probability);
208 }
209
210 /* Predict edge E with given probability if possible. */
211
212 void
213 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
214 {
215 rtx last_insn;
216 last_insn = BB_END (e->src);
217
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn))
221 return;
222
223 /* We always store probability of branching. */
224 if (e->flags & EDGE_FALLTHRU)
225 probability = REG_BR_PROB_BASE - probability;
226
227 predict_insn (last_insn, predictor, probability);
228 }
229
230 /* Predict edge E with the given PROBABILITY. */
231 void
232 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
233 {
234 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
235
236 i->next = e->src->predictions;
237 e->src->predictions = i;
238 i->probability = probability;
239 i->predictor = predictor;
240 i->edge = e;
241 }
242
243 /* Remove all predictions on given basic block that are attached
244 to edge E. */
245 void
246 remove_predictions_associated_with_edge (edge e)
247 {
248 if (e->src->predictions)
249 {
250 struct edge_prediction **prediction = &e->src->predictions;
251 while (*prediction)
252 {
253 if ((*prediction)->edge == e)
254 *prediction = (*prediction)->next;
255 else
256 prediction = &((*prediction)->next);
257 }
258 }
259 }
260
261 /* Return true when we can store prediction on insn INSN.
262 At the moment we represent predictions only on conditional
263 jumps, not at computed jump or other complicated cases. */
264 static bool
265 can_predict_insn_p (rtx insn)
266 {
267 return (JUMP_P (insn)
268 && any_condjump_p (insn)
269 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
270 }
271
272 /* Predict edge E by given predictor if possible. */
273
274 void
275 predict_edge_def (edge e, enum br_predictor predictor,
276 enum prediction taken)
277 {
278 int probability = predictor_info[(int) predictor].hitrate;
279
280 if (taken != TAKEN)
281 probability = REG_BR_PROB_BASE - probability;
282
283 predict_edge (e, predictor, probability);
284 }
285
286 /* Invert all branch predictions or probability notes in the INSN. This needs
287 to be done each time we invert the condition used by the jump. */
288
289 void
290 invert_br_probabilities (rtx insn)
291 {
292 rtx note;
293
294 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
295 if (REG_NOTE_KIND (note) == REG_BR_PROB)
296 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
297 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
298 XEXP (XEXP (note, 0), 1)
299 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
300 }
301
302 /* Dump information about the branch prediction to the output file. */
303
304 static void
305 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
306 basic_block bb, int used)
307 {
308 edge e;
309 edge_iterator ei;
310
311 if (!file)
312 return;
313
314 FOR_EACH_EDGE (e, ei, bb->succs)
315 if (! (e->flags & EDGE_FALLTHRU))
316 break;
317
318 fprintf (file, " %s heuristics%s: %.1f%%",
319 predictor_info[predictor].name,
320 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
321
322 if (bb->count)
323 {
324 fprintf (file, " exec ");
325 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
326 if (e)
327 {
328 fprintf (file, " hit ");
329 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
330 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
331 }
332 }
333
334 fprintf (file, "\n");
335 }
336
337 /* We can not predict the probabilities of outgoing edges of bb. Set them
338 evenly and hope for the best. */
339 static void
340 set_even_probabilities (basic_block bb)
341 {
342 int nedges = 0;
343 edge e;
344 edge_iterator ei;
345
346 FOR_EACH_EDGE (e, ei, bb->succs)
347 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
348 nedges ++;
349 FOR_EACH_EDGE (e, ei, bb->succs)
350 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
351 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
352 else
353 e->probability = 0;
354 }
355
356 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
357 note if not already present. Remove now useless REG_BR_PRED notes. */
358
359 static void
360 combine_predictions_for_insn (rtx insn, basic_block bb)
361 {
362 rtx prob_note;
363 rtx *pnote;
364 rtx note;
365 int best_probability = PROB_EVEN;
366 int best_predictor = END_PREDICTORS;
367 int combined_probability = REG_BR_PROB_BASE / 2;
368 int d;
369 bool first_match = false;
370 bool found = false;
371
372 if (!can_predict_insn_p (insn))
373 {
374 set_even_probabilities (bb);
375 return;
376 }
377
378 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
379 pnote = &REG_NOTES (insn);
380 if (dump_file)
381 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
382 bb->index);
383
384 /* We implement "first match" heuristics and use probability guessed
385 by predictor with smallest index. */
386 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
387 if (REG_NOTE_KIND (note) == REG_BR_PRED)
388 {
389 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
390 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
391
392 found = true;
393 if (best_predictor > predictor)
394 best_probability = probability, best_predictor = predictor;
395
396 d = (combined_probability * probability
397 + (REG_BR_PROB_BASE - combined_probability)
398 * (REG_BR_PROB_BASE - probability));
399
400 /* Use FP math to avoid overflows of 32bit integers. */
401 if (d == 0)
402 /* If one probability is 0% and one 100%, avoid division by zero. */
403 combined_probability = REG_BR_PROB_BASE / 2;
404 else
405 combined_probability = (((double) combined_probability) * probability
406 * REG_BR_PROB_BASE / d + 0.5);
407 }
408
409 /* Decide which heuristic to use. In case we didn't match anything,
410 use no_prediction heuristic, in case we did match, use either
411 first match or Dempster-Shaffer theory depending on the flags. */
412
413 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
414 first_match = true;
415
416 if (!found)
417 dump_prediction (dump_file, PRED_NO_PREDICTION,
418 combined_probability, bb, true);
419 else
420 {
421 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
422 bb, !first_match);
423 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
424 bb, first_match);
425 }
426
427 if (first_match)
428 combined_probability = best_probability;
429 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
430
431 while (*pnote)
432 {
433 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
434 {
435 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
436 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
437
438 dump_prediction (dump_file, predictor, probability, bb,
439 !first_match || best_predictor == predictor);
440 *pnote = XEXP (*pnote, 1);
441 }
442 else
443 pnote = &XEXP (*pnote, 1);
444 }
445
446 if (!prob_note)
447 {
448 REG_NOTES (insn)
449 = gen_rtx_EXPR_LIST (REG_BR_PROB,
450 GEN_INT (combined_probability), REG_NOTES (insn));
451
452 /* Save the prediction into CFG in case we are seeing non-degenerated
453 conditional jump. */
454 if (!single_succ_p (bb))
455 {
456 BRANCH_EDGE (bb)->probability = combined_probability;
457 FALLTHRU_EDGE (bb)->probability
458 = REG_BR_PROB_BASE - combined_probability;
459 }
460 }
461 else if (!single_succ_p (bb))
462 {
463 int prob = INTVAL (XEXP (prob_note, 0));
464
465 BRANCH_EDGE (bb)->probability = prob;
466 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
467 }
468 else
469 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
470 }
471
472 /* Combine predictions into single probability and store them into CFG.
473 Remove now useless prediction entries. */
474
475 static void
476 combine_predictions_for_bb (FILE *file, basic_block bb)
477 {
478 int best_probability = PROB_EVEN;
479 int best_predictor = END_PREDICTORS;
480 int combined_probability = REG_BR_PROB_BASE / 2;
481 int d;
482 bool first_match = false;
483 bool found = false;
484 struct edge_prediction *pred;
485 int nedges = 0;
486 edge e, first = NULL, second = NULL;
487 edge_iterator ei;
488
489 FOR_EACH_EDGE (e, ei, bb->succs)
490 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
491 {
492 nedges ++;
493 if (first && !second)
494 second = e;
495 if (!first)
496 first = e;
497 }
498
499 /* When there is no successor or only one choice, prediction is easy.
500
501 We are lazy for now and predict only basic blocks with two outgoing
502 edges. It is possible to predict generic case too, but we have to
503 ignore first match heuristics and do more involved combining. Implement
504 this later. */
505 if (nedges != 2)
506 {
507 if (!bb->count)
508 set_even_probabilities (bb);
509 bb->predictions = NULL;
510 if (file)
511 fprintf (file, "%i edges in bb %i predicted to even probabilities\n",
512 nedges, bb->index);
513 return;
514 }
515
516 if (file)
517 fprintf (file, "Predictions for bb %i\n", bb->index);
518
519 /* We implement "first match" heuristics and use probability guessed
520 by predictor with smallest index. */
521 for (pred = bb->predictions; pred; pred = pred->next)
522 {
523 int predictor = pred->predictor;
524 int probability = pred->probability;
525
526 if (pred->edge != first)
527 probability = REG_BR_PROB_BASE - probability;
528
529 found = true;
530 if (best_predictor > predictor)
531 best_probability = probability, best_predictor = predictor;
532
533 d = (combined_probability * probability
534 + (REG_BR_PROB_BASE - combined_probability)
535 * (REG_BR_PROB_BASE - probability));
536
537 /* Use FP math to avoid overflows of 32bit integers. */
538 if (d == 0)
539 /* If one probability is 0% and one 100%, avoid division by zero. */
540 combined_probability = REG_BR_PROB_BASE / 2;
541 else
542 combined_probability = (((double) combined_probability) * probability
543 * REG_BR_PROB_BASE / d + 0.5);
544 }
545
546 /* Decide which heuristic to use. In case we didn't match anything,
547 use no_prediction heuristic, in case we did match, use either
548 first match or Dempster-Shaffer theory depending on the flags. */
549
550 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
551 first_match = true;
552
553 if (!found)
554 dump_prediction (file, PRED_NO_PREDICTION, combined_probability, bb, true);
555 else
556 {
557 dump_prediction (file, PRED_DS_THEORY, combined_probability, bb,
558 !first_match);
559 dump_prediction (file, PRED_FIRST_MATCH, best_probability, bb,
560 first_match);
561 }
562
563 if (first_match)
564 combined_probability = best_probability;
565 dump_prediction (file, PRED_COMBINED, combined_probability, bb, true);
566
567 for (pred = bb->predictions; pred; pred = pred->next)
568 {
569 int predictor = pred->predictor;
570 int probability = pred->probability;
571
572 if (pred->edge != EDGE_SUCC (bb, 0))
573 probability = REG_BR_PROB_BASE - probability;
574 dump_prediction (file, predictor, probability, bb,
575 !first_match || best_predictor == predictor);
576 }
577 bb->predictions = NULL;
578
579 if (!bb->count)
580 {
581 first->probability = combined_probability;
582 second->probability = REG_BR_PROB_BASE - combined_probability;
583 }
584 }
585
586 /* Predict edge probabilities by exploiting loop structure.
587 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
588 RTL otherwise use tree based approach. */
589 static void
590 predict_loops (struct loops *loops_info, bool rtlsimpleloops)
591 {
592 unsigned i;
593
594 if (!rtlsimpleloops)
595 scev_initialize (loops_info);
596
597 /* Try to predict out blocks in a loop that are not part of a
598 natural loop. */
599 for (i = 1; i < loops_info->num; i++)
600 {
601 basic_block bb, *bbs;
602 unsigned j;
603 unsigned n_exits;
604 struct loop *loop = loops_info->parray[i];
605 struct niter_desc desc;
606 unsigned HOST_WIDE_INT niter;
607 edge *exits;
608
609 exits = get_loop_exit_edges (loop, &n_exits);
610
611 if (rtlsimpleloops)
612 {
613 iv_analysis_loop_init (loop);
614 find_simple_exit (loop, &desc);
615
616 if (desc.simple_p && desc.const_iter)
617 {
618 int prob;
619 niter = desc.niter + 1;
620 if (niter == 0) /* We might overflow here. */
621 niter = desc.niter;
622
623 prob = (REG_BR_PROB_BASE
624 - (REG_BR_PROB_BASE + niter /2) / niter);
625 /* Branch prediction algorithm gives 0 frequency for everything
626 after the end of loop for loop having 0 probability to finish. */
627 if (prob == REG_BR_PROB_BASE)
628 prob = REG_BR_PROB_BASE - 1;
629 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
630 prob);
631 }
632 }
633 else
634 {
635 struct tree_niter_desc niter_desc;
636
637 for (j = 0; j < n_exits; j++)
638 {
639 tree niter = NULL;
640
641 if (number_of_iterations_exit (loop, exits[j], &niter_desc))
642 niter = niter_desc.niter;
643 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
644 niter = loop_niter_by_eval (loop, exits[j]);
645
646 if (TREE_CODE (niter) == INTEGER_CST)
647 {
648 int probability;
649 if (host_integerp (niter, 1)
650 && tree_int_cst_lt (niter,
651 build_int_cstu (NULL_TREE,
652 REG_BR_PROB_BASE - 1)))
653 {
654 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
655 probability = (REG_BR_PROB_BASE + nitercst / 2) / nitercst;
656 }
657 else
658 probability = 1;
659
660 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
661 }
662 }
663
664 }
665 free (exits);
666
667 bbs = get_loop_body (loop);
668
669 for (j = 0; j < loop->num_nodes; j++)
670 {
671 int header_found = 0;
672 edge e;
673 edge_iterator ei;
674
675 bb = bbs[j];
676
677 /* Bypass loop heuristics on continue statement. These
678 statements construct loops via "non-loop" constructs
679 in the source language and are better to be handled
680 separately. */
681 if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
682 || predicted_by_p (bb, PRED_CONTINUE))
683 continue;
684
685 /* Loop branch heuristics - predict an edge back to a
686 loop's head as taken. */
687 if (bb == loop->latch)
688 {
689 e = find_edge (loop->latch, loop->header);
690 if (e)
691 {
692 header_found = 1;
693 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
694 }
695 }
696
697 /* Loop exit heuristics - predict an edge exiting the loop if the
698 conditional has no loop header successors as not taken. */
699 if (!header_found)
700 FOR_EACH_EDGE (e, ei, bb->succs)
701 if (e->dest->index < 0
702 || !flow_bb_inside_loop_p (loop, e->dest))
703 predict_edge
704 (e, PRED_LOOP_EXIT,
705 (REG_BR_PROB_BASE
706 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
707 / n_exits);
708 }
709
710 /* Free basic blocks from get_loop_body. */
711 free (bbs);
712 }
713
714 if (!rtlsimpleloops)
715 {
716 scev_finalize ();
717 current_loops = NULL;
718 }
719 }
720
721 /* Attempt to predict probabilities of BB outgoing edges using local
722 properties. */
723 static void
724 bb_estimate_probability_locally (basic_block bb)
725 {
726 rtx last_insn = BB_END (bb);
727 rtx cond;
728
729 if (! can_predict_insn_p (last_insn))
730 return;
731 cond = get_condition (last_insn, NULL, false, false);
732 if (! cond)
733 return;
734
735 /* Try "pointer heuristic."
736 A comparison ptr == 0 is predicted as false.
737 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
738 if (COMPARISON_P (cond)
739 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
740 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
741 {
742 if (GET_CODE (cond) == EQ)
743 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
744 else if (GET_CODE (cond) == NE)
745 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
746 }
747 else
748
749 /* Try "opcode heuristic."
750 EQ tests are usually false and NE tests are usually true. Also,
751 most quantities are positive, so we can make the appropriate guesses
752 about signed comparisons against zero. */
753 switch (GET_CODE (cond))
754 {
755 case CONST_INT:
756 /* Unconditional branch. */
757 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
758 cond == const0_rtx ? NOT_TAKEN : TAKEN);
759 break;
760
761 case EQ:
762 case UNEQ:
763 /* Floating point comparisons appears to behave in a very
764 unpredictable way because of special role of = tests in
765 FP code. */
766 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
767 ;
768 /* Comparisons with 0 are often used for booleans and there is
769 nothing useful to predict about them. */
770 else if (XEXP (cond, 1) == const0_rtx
771 || XEXP (cond, 0) == const0_rtx)
772 ;
773 else
774 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
775 break;
776
777 case NE:
778 case LTGT:
779 /* Floating point comparisons appears to behave in a very
780 unpredictable way because of special role of = tests in
781 FP code. */
782 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
783 ;
784 /* Comparisons with 0 are often used for booleans and there is
785 nothing useful to predict about them. */
786 else if (XEXP (cond, 1) == const0_rtx
787 || XEXP (cond, 0) == const0_rtx)
788 ;
789 else
790 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
791 break;
792
793 case ORDERED:
794 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
795 break;
796
797 case UNORDERED:
798 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
799 break;
800
801 case LE:
802 case LT:
803 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
804 || XEXP (cond, 1) == constm1_rtx)
805 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
806 break;
807
808 case GE:
809 case GT:
810 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
811 || XEXP (cond, 1) == constm1_rtx)
812 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
813 break;
814
815 default:
816 break;
817 }
818 }
819
820 /* Statically estimate the probability that a branch will be taken and produce
821 estimated profile. When profile feedback is present never executed portions
822 of function gets estimated. */
823
824 void
825 estimate_probability (struct loops *loops_info)
826 {
827 basic_block bb;
828
829 connect_infinite_loops_to_exit ();
830 calculate_dominance_info (CDI_DOMINATORS);
831 calculate_dominance_info (CDI_POST_DOMINATORS);
832
833 predict_loops (loops_info, true);
834
835 iv_analysis_done ();
836
837 /* Attempt to predict conditional jumps using a number of heuristics. */
838 FOR_EACH_BB (bb)
839 {
840 rtx last_insn = BB_END (bb);
841 edge e;
842 edge_iterator ei;
843
844 if (! can_predict_insn_p (last_insn))
845 continue;
846
847 FOR_EACH_EDGE (e, ei, bb->succs)
848 {
849 /* Predict early returns to be probable, as we've already taken
850 care for error returns and other are often used for fast paths
851 trought function. */
852 if ((e->dest == EXIT_BLOCK_PTR
853 || (single_succ_p (e->dest)
854 && single_succ (e->dest) == EXIT_BLOCK_PTR))
855 && !predicted_by_p (bb, PRED_NULL_RETURN)
856 && !predicted_by_p (bb, PRED_CONST_RETURN)
857 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
858 && !last_basic_block_p (e->dest))
859 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
860
861 /* Look for block we are guarding (i.e. we dominate it,
862 but it doesn't postdominate us). */
863 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
864 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
865 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
866 {
867 rtx insn;
868
869 /* The call heuristic claims that a guarded function call
870 is improbable. This is because such calls are often used
871 to signal exceptional situations such as printing error
872 messages. */
873 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
874 insn = NEXT_INSN (insn))
875 if (CALL_P (insn)
876 /* Constant and pure calls are hardly used to signalize
877 something exceptional. */
878 && ! CONST_OR_PURE_CALL_P (insn))
879 {
880 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
881 break;
882 }
883 }
884 }
885 bb_estimate_probability_locally (bb);
886 }
887
888 /* Attach the combined probability to each conditional jump. */
889 FOR_EACH_BB (bb)
890 combine_predictions_for_insn (BB_END (bb), bb);
891
892 remove_fake_edges ();
893 estimate_bb_frequencies (loops_info);
894 free_dominance_info (CDI_POST_DOMINATORS);
895 if (profile_status == PROFILE_ABSENT)
896 profile_status = PROFILE_GUESSED;
897 }
898
899 /* Set edge->probability for each successor edge of BB. */
900 void
901 guess_outgoing_edge_probabilities (basic_block bb)
902 {
903 bb_estimate_probability_locally (bb);
904 combine_predictions_for_insn (BB_END (bb), bb);
905 }
906 \f
907 /* Return constant EXPR will likely have at execution time, NULL if unknown.
908 The function is used by builtin_expect branch predictor so the evidence
909 must come from this construct and additional possible constant folding.
910
911 We may want to implement more involved value guess (such as value range
912 propagation based prediction), but such tricks shall go to new
913 implementation. */
914
915 static tree
916 expr_expected_value (tree expr, bitmap visited)
917 {
918 if (TREE_CONSTANT (expr))
919 return expr;
920 else if (TREE_CODE (expr) == SSA_NAME)
921 {
922 tree def = SSA_NAME_DEF_STMT (expr);
923
924 /* If we were already here, break the infinite cycle. */
925 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
926 return NULL;
927 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
928
929 if (TREE_CODE (def) == PHI_NODE)
930 {
931 /* All the arguments of the PHI node must have the same constant
932 length. */
933 int i;
934 tree val = NULL, new_val;
935
936 for (i = 0; i < PHI_NUM_ARGS (def); i++)
937 {
938 tree arg = PHI_ARG_DEF (def, i);
939
940 /* If this PHI has itself as an argument, we cannot
941 determine the string length of this argument. However,
942 if we can find an expected constant value for the other
943 PHI args then we can still be sure that this is
944 likely a constant. So be optimistic and just
945 continue with the next argument. */
946 if (arg == PHI_RESULT (def))
947 continue;
948
949 new_val = expr_expected_value (arg, visited);
950 if (!new_val)
951 return NULL;
952 if (!val)
953 val = new_val;
954 else if (!operand_equal_p (val, new_val, false))
955 return NULL;
956 }
957 return val;
958 }
959 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
960 return NULL;
961 return expr_expected_value (TREE_OPERAND (def, 1), visited);
962 }
963 else if (TREE_CODE (expr) == CALL_EXPR)
964 {
965 tree decl = get_callee_fndecl (expr);
966 if (!decl)
967 return NULL;
968 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
969 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
970 {
971 tree arglist = TREE_OPERAND (expr, 1);
972 tree val;
973
974 if (arglist == NULL_TREE
975 || TREE_CHAIN (arglist) == NULL_TREE)
976 return NULL;
977 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
978 if (TREE_CONSTANT (val))
979 return val;
980 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
981 }
982 }
983 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
984 {
985 tree op0, op1, res;
986 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
987 if (!op0)
988 return NULL;
989 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
990 if (!op1)
991 return NULL;
992 res = fold (build (TREE_CODE (expr), TREE_TYPE (expr), op0, op1));
993 if (TREE_CONSTANT (res))
994 return res;
995 return NULL;
996 }
997 if (UNARY_CLASS_P (expr))
998 {
999 tree op0, res;
1000 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
1001 if (!op0)
1002 return NULL;
1003 res = fold (build1 (TREE_CODE (expr), TREE_TYPE (expr), op0));
1004 if (TREE_CONSTANT (res))
1005 return res;
1006 return NULL;
1007 }
1008 return NULL;
1009 }
1010 \f
1011 /* Get rid of all builtin_expect calls we no longer need. */
1012 static void
1013 strip_builtin_expect (void)
1014 {
1015 basic_block bb;
1016 FOR_EACH_BB (bb)
1017 {
1018 block_stmt_iterator bi;
1019 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
1020 {
1021 tree stmt = bsi_stmt (bi);
1022 tree fndecl;
1023 tree arglist;
1024
1025 if (TREE_CODE (stmt) == MODIFY_EXPR
1026 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
1027 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
1028 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1029 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1030 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
1031 && TREE_CHAIN (arglist))
1032 {
1033 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
1034 update_stmt (stmt);
1035 }
1036 }
1037 }
1038 }
1039 \f
1040 /* Predict using opcode of the last statement in basic block. */
1041 static void
1042 tree_predict_by_opcode (basic_block bb)
1043 {
1044 tree stmt = last_stmt (bb);
1045 edge then_edge;
1046 tree cond;
1047 tree op0;
1048 tree type;
1049 tree val;
1050 bitmap visited;
1051 edge_iterator ei;
1052
1053 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1054 return;
1055 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1056 if (then_edge->flags & EDGE_TRUE_VALUE)
1057 break;
1058 cond = TREE_OPERAND (stmt, 0);
1059 if (!COMPARISON_CLASS_P (cond))
1060 return;
1061 op0 = TREE_OPERAND (cond, 0);
1062 type = TREE_TYPE (op0);
1063 visited = BITMAP_ALLOC (NULL);
1064 val = expr_expected_value (cond, visited);
1065 BITMAP_FREE (visited);
1066 if (val)
1067 {
1068 if (integer_zerop (val))
1069 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1070 else
1071 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1072 return;
1073 }
1074 /* Try "pointer heuristic."
1075 A comparison ptr == 0 is predicted as false.
1076 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1077 if (POINTER_TYPE_P (type))
1078 {
1079 if (TREE_CODE (cond) == EQ_EXPR)
1080 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1081 else if (TREE_CODE (cond) == NE_EXPR)
1082 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1083 }
1084 else
1085
1086 /* Try "opcode heuristic."
1087 EQ tests are usually false and NE tests are usually true. Also,
1088 most quantities are positive, so we can make the appropriate guesses
1089 about signed comparisons against zero. */
1090 switch (TREE_CODE (cond))
1091 {
1092 case EQ_EXPR:
1093 case UNEQ_EXPR:
1094 /* Floating point comparisons appears to behave in a very
1095 unpredictable way because of special role of = tests in
1096 FP code. */
1097 if (FLOAT_TYPE_P (type))
1098 ;
1099 /* Comparisons with 0 are often used for booleans and there is
1100 nothing useful to predict about them. */
1101 else if (integer_zerop (op0)
1102 || integer_zerop (TREE_OPERAND (cond, 1)))
1103 ;
1104 else
1105 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1106 break;
1107
1108 case NE_EXPR:
1109 case LTGT_EXPR:
1110 /* Floating point comparisons appears to behave in a very
1111 unpredictable way because of special role of = tests in
1112 FP code. */
1113 if (FLOAT_TYPE_P (type))
1114 ;
1115 /* Comparisons with 0 are often used for booleans and there is
1116 nothing useful to predict about them. */
1117 else if (integer_zerop (op0)
1118 || integer_zerop (TREE_OPERAND (cond, 1)))
1119 ;
1120 else
1121 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1122 break;
1123
1124 case ORDERED_EXPR:
1125 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1126 break;
1127
1128 case UNORDERED_EXPR:
1129 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1130 break;
1131
1132 case LE_EXPR:
1133 case LT_EXPR:
1134 if (integer_zerop (TREE_OPERAND (cond, 1))
1135 || integer_onep (TREE_OPERAND (cond, 1))
1136 || integer_all_onesp (TREE_OPERAND (cond, 1))
1137 || real_zerop (TREE_OPERAND (cond, 1))
1138 || real_onep (TREE_OPERAND (cond, 1))
1139 || real_minus_onep (TREE_OPERAND (cond, 1)))
1140 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1141 break;
1142
1143 case GE_EXPR:
1144 case GT_EXPR:
1145 if (integer_zerop (TREE_OPERAND (cond, 1))
1146 || integer_onep (TREE_OPERAND (cond, 1))
1147 || integer_all_onesp (TREE_OPERAND (cond, 1))
1148 || real_zerop (TREE_OPERAND (cond, 1))
1149 || real_onep (TREE_OPERAND (cond, 1))
1150 || real_minus_onep (TREE_OPERAND (cond, 1)))
1151 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1152 break;
1153
1154 default:
1155 break;
1156 }
1157 }
1158
1159 /* Try to guess whether the value of return means error code. */
1160 static enum br_predictor
1161 return_prediction (tree val, enum prediction *prediction)
1162 {
1163 /* VOID. */
1164 if (!val)
1165 return PRED_NO_PREDICTION;
1166 /* Different heuristics for pointers and scalars. */
1167 if (POINTER_TYPE_P (TREE_TYPE (val)))
1168 {
1169 /* NULL is usually not returned. */
1170 if (integer_zerop (val))
1171 {
1172 *prediction = NOT_TAKEN;
1173 return PRED_NULL_RETURN;
1174 }
1175 }
1176 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1177 {
1178 /* Negative return values are often used to indicate
1179 errors. */
1180 if (TREE_CODE (val) == INTEGER_CST
1181 && tree_int_cst_sgn (val) < 0)
1182 {
1183 *prediction = NOT_TAKEN;
1184 return PRED_NEGATIVE_RETURN;
1185 }
1186 /* Constant return values seems to be commonly taken.
1187 Zero/one often represent booleans so exclude them from the
1188 heuristics. */
1189 if (TREE_CONSTANT (val)
1190 && (!integer_zerop (val) && !integer_onep (val)))
1191 {
1192 *prediction = TAKEN;
1193 return PRED_NEGATIVE_RETURN;
1194 }
1195 }
1196 return PRED_NO_PREDICTION;
1197 }
1198
1199 /* Find the basic block with return expression and look up for possible
1200 return value trying to apply RETURN_PREDICTION heuristics. */
1201 static void
1202 apply_return_prediction (int *heads)
1203 {
1204 tree return_stmt;
1205 tree return_val;
1206 edge e;
1207 tree phi;
1208 int phi_num_args, i;
1209 enum br_predictor pred;
1210 enum prediction direction;
1211 edge_iterator ei;
1212
1213 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1214 {
1215 return_stmt = last_stmt (e->src);
1216 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1217 break;
1218 }
1219 if (!e)
1220 return;
1221 return_val = TREE_OPERAND (return_stmt, 0);
1222 if (!return_val)
1223 return;
1224 if (TREE_CODE (return_val) == MODIFY_EXPR)
1225 return_val = TREE_OPERAND (return_val, 1);
1226 if (TREE_CODE (return_val) != SSA_NAME
1227 || !SSA_NAME_DEF_STMT (return_val)
1228 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1229 return;
1230 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1231 if (PHI_RESULT (phi) == return_val)
1232 break;
1233 if (!phi)
1234 return;
1235 phi_num_args = PHI_NUM_ARGS (phi);
1236 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1237
1238 /* Avoid the degenerate case where all return values form the function
1239 belongs to same category (ie they are all positive constants)
1240 so we can hardly say something about them. */
1241 for (i = 1; i < phi_num_args; i++)
1242 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1243 break;
1244 if (i != phi_num_args)
1245 for (i = 0; i < phi_num_args; i++)
1246 {
1247 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1248 if (pred != PRED_NO_PREDICTION)
1249 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1250 direction);
1251 }
1252 }
1253
1254 /* Look for basic block that contains unlikely to happen events
1255 (such as noreturn calls) and mark all paths leading to execution
1256 of this basic blocks as unlikely. */
1257
1258 static void
1259 tree_bb_level_predictions (void)
1260 {
1261 basic_block bb;
1262 int *heads;
1263
1264 heads = xmalloc (sizeof (int) * last_basic_block);
1265 memset (heads, -1, sizeof (int) * last_basic_block);
1266 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1267
1268 apply_return_prediction (heads);
1269
1270 FOR_EACH_BB (bb)
1271 {
1272 block_stmt_iterator bsi = bsi_last (bb);
1273
1274 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1275 {
1276 tree stmt = bsi_stmt (bsi);
1277 switch (TREE_CODE (stmt))
1278 {
1279 case MODIFY_EXPR:
1280 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1281 {
1282 stmt = TREE_OPERAND (stmt, 1);
1283 goto call_expr;
1284 }
1285 break;
1286 case CALL_EXPR:
1287 call_expr:;
1288 if (call_expr_flags (stmt) & ECF_NORETURN)
1289 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1290 NOT_TAKEN);
1291 break;
1292 default:
1293 break;
1294 }
1295 }
1296 }
1297
1298 free (heads);
1299 }
1300
1301 /* Predict branch probabilities and estimate profile of the tree CFG. */
1302 static void
1303 tree_estimate_probability (void)
1304 {
1305 basic_block bb;
1306 struct loops loops_info;
1307
1308 flow_loops_find (&loops_info);
1309 if (dump_file && (dump_flags & TDF_DETAILS))
1310 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1311
1312 add_noreturn_fake_exit_edges ();
1313 connect_infinite_loops_to_exit ();
1314 calculate_dominance_info (CDI_DOMINATORS);
1315 calculate_dominance_info (CDI_POST_DOMINATORS);
1316
1317 tree_bb_level_predictions ();
1318
1319 mark_irreducible_loops (&loops_info);
1320 predict_loops (&loops_info, false);
1321
1322 FOR_EACH_BB (bb)
1323 {
1324 edge e;
1325 edge_iterator ei;
1326
1327 FOR_EACH_EDGE (e, ei, bb->succs)
1328 {
1329 /* Predict early returns to be probable, as we've already taken
1330 care for error returns and other cases are often used for
1331 fast paths trought function. */
1332 if (e->dest == EXIT_BLOCK_PTR
1333 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1334 && !single_pred_p (bb))
1335 {
1336 edge e1;
1337 edge_iterator ei1;
1338
1339 FOR_EACH_EDGE (e1, ei1, bb->preds)
1340 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1341 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1342 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1343 && !last_basic_block_p (e1->src))
1344 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1345 }
1346
1347 /* Look for block we are guarding (ie we dominate it,
1348 but it doesn't postdominate us). */
1349 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1350 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1351 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1352 {
1353 block_stmt_iterator bi;
1354
1355 /* The call heuristic claims that a guarded function call
1356 is improbable. This is because such calls are often used
1357 to signal exceptional situations such as printing error
1358 messages. */
1359 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1360 bsi_next (&bi))
1361 {
1362 tree stmt = bsi_stmt (bi);
1363 if ((TREE_CODE (stmt) == CALL_EXPR
1364 || (TREE_CODE (stmt) == MODIFY_EXPR
1365 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1366 /* Constant and pure calls are hardly used to signalize
1367 something exceptional. */
1368 && TREE_SIDE_EFFECTS (stmt))
1369 {
1370 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1371 break;
1372 }
1373 }
1374 }
1375 }
1376 tree_predict_by_opcode (bb);
1377 }
1378 FOR_EACH_BB (bb)
1379 combine_predictions_for_bb (dump_file, bb);
1380
1381 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1382 strip_builtin_expect ();
1383 estimate_bb_frequencies (&loops_info);
1384 free_dominance_info (CDI_POST_DOMINATORS);
1385 remove_fake_exit_edges ();
1386 flow_loops_free (&loops_info);
1387 if (dump_file && (dump_flags & TDF_DETAILS))
1388 dump_tree_cfg (dump_file, dump_flags);
1389 if (profile_status == PROFILE_ABSENT)
1390 profile_status = PROFILE_GUESSED;
1391 }
1392 \f
1393 /* __builtin_expect dropped tokens into the insn stream describing expected
1394 values of registers. Generate branch probabilities based off these
1395 values. */
1396
1397 void
1398 expected_value_to_br_prob (void)
1399 {
1400 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
1401
1402 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1403 {
1404 switch (GET_CODE (insn))
1405 {
1406 case NOTE:
1407 /* Look for expected value notes. */
1408 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1409 {
1410 ev = NOTE_EXPECTED_VALUE (insn);
1411 ev_reg = XEXP (ev, 0);
1412 delete_insn (insn);
1413 }
1414 continue;
1415
1416 case CODE_LABEL:
1417 /* Never propagate across labels. */
1418 ev = NULL_RTX;
1419 continue;
1420
1421 case JUMP_INSN:
1422 /* Look for simple conditional branches. If we haven't got an
1423 expected value yet, no point going further. */
1424 if (!JUMP_P (insn) || ev == NULL_RTX
1425 || ! any_condjump_p (insn))
1426 continue;
1427 break;
1428
1429 default:
1430 /* Look for insns that clobber the EV register. */
1431 if (ev && reg_set_p (ev_reg, insn))
1432 ev = NULL_RTX;
1433 continue;
1434 }
1435
1436 /* Collect the branch condition, hopefully relative to EV_REG. */
1437 /* ??? At present we'll miss things like
1438 (expected_value (eq r70 0))
1439 (set r71 -1)
1440 (set r80 (lt r70 r71))
1441 (set pc (if_then_else (ne r80 0) ...))
1442 as canonicalize_condition will render this to us as
1443 (lt r70, r71)
1444 Could use cselib to try and reduce this further. */
1445 cond = XEXP (SET_SRC (pc_set (insn)), 0);
1446 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1447 false, false);
1448 if (! cond || XEXP (cond, 0) != ev_reg
1449 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
1450 continue;
1451
1452 /* Substitute and simplify. Given that the expression we're
1453 building involves two constants, we should wind up with either
1454 true or false. */
1455 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1456 XEXP (ev, 1), XEXP (cond, 1));
1457 cond = simplify_rtx (cond);
1458
1459 /* Turn the condition into a scaled branch probability. */
1460 gcc_assert (cond == const_true_rtx || cond == const0_rtx);
1461 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1462 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
1463 }
1464 }
1465 \f
1466 /* Check whether this is the last basic block of function. Commonly
1467 there is one extra common cleanup block. */
1468 static bool
1469 last_basic_block_p (basic_block bb)
1470 {
1471 if (bb == EXIT_BLOCK_PTR)
1472 return false;
1473
1474 return (bb->next_bb == EXIT_BLOCK_PTR
1475 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1476 && single_succ_p (bb)
1477 && single_succ (bb)->next_bb == EXIT_BLOCK_PTR));
1478 }
1479
1480 /* Sets branch probabilities according to PREDiction and
1481 FLAGS. HEADS[bb->index] should be index of basic block in that we
1482 need to alter branch predictions (i.e. the first of our dominators
1483 such that we do not post-dominate it) (but we fill this information
1484 on demand, so -1 may be there in case this was not needed yet). */
1485
1486 static void
1487 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1488 enum prediction taken)
1489 {
1490 edge e;
1491 edge_iterator ei;
1492 int y;
1493
1494 if (heads[bb->index] < 0)
1495 {
1496 /* This is first time we need this field in heads array; so
1497 find first dominator that we do not post-dominate (we are
1498 using already known members of heads array). */
1499 basic_block ai = bb;
1500 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1501 int head;
1502
1503 while (heads[next_ai->index] < 0)
1504 {
1505 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1506 break;
1507 heads[next_ai->index] = ai->index;
1508 ai = next_ai;
1509 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1510 }
1511 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1512 head = next_ai->index;
1513 else
1514 head = heads[next_ai->index];
1515 while (next_ai != bb)
1516 {
1517 next_ai = ai;
1518 if (heads[ai->index] == ENTRY_BLOCK)
1519 ai = ENTRY_BLOCK_PTR;
1520 else
1521 ai = BASIC_BLOCK (heads[ai->index]);
1522 heads[next_ai->index] = head;
1523 }
1524 }
1525 y = heads[bb->index];
1526
1527 /* Now find the edge that leads to our branch and aply the prediction. */
1528
1529 if (y == last_basic_block)
1530 return;
1531 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1532 if (e->dest->index >= 0
1533 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1534 predict_edge_def (e, pred, taken);
1535 }
1536 \f
1537 /* This is used to carry information about basic blocks. It is
1538 attached to the AUX field of the standard CFG block. */
1539
1540 typedef struct block_info_def
1541 {
1542 /* Estimated frequency of execution of basic_block. */
1543 sreal frequency;
1544
1545 /* To keep queue of basic blocks to process. */
1546 basic_block next;
1547
1548 /* Number of predecessors we need to visit first. */
1549 int npredecessors;
1550 } *block_info;
1551
1552 /* Similar information for edges. */
1553 typedef struct edge_info_def
1554 {
1555 /* In case edge is an loopback edge, the probability edge will be reached
1556 in case header is. Estimated number of iterations of the loop can be
1557 then computed as 1 / (1 - back_edge_prob). */
1558 sreal back_edge_prob;
1559 /* True if the edge is an loopback edge in the natural loop. */
1560 unsigned int back_edge:1;
1561 } *edge_info;
1562
1563 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1564 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1565
1566 /* Helper function for estimate_bb_frequencies.
1567 Propagate the frequencies for LOOP. */
1568
1569 static void
1570 propagate_freq (struct loop *loop, bitmap tovisit)
1571 {
1572 basic_block head = loop->header;
1573 basic_block bb;
1574 basic_block last;
1575 unsigned i;
1576 edge e;
1577 basic_block nextbb;
1578 bitmap_iterator bi;
1579
1580 /* For each basic block we need to visit count number of his predecessors
1581 we need to visit first. */
1582 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1583 {
1584 edge_iterator ei;
1585 int count = 0;
1586
1587 /* The outermost "loop" includes the exit block, which we can not
1588 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1589 directly. Do the same for the entry block. */
1590 if (i == (unsigned)ENTRY_BLOCK)
1591 bb = ENTRY_BLOCK_PTR;
1592 else if (i == (unsigned)EXIT_BLOCK)
1593 bb = EXIT_BLOCK_PTR;
1594 else
1595 bb = BASIC_BLOCK (i);
1596
1597 FOR_EACH_EDGE (e, ei, bb->preds)
1598 {
1599 bool visit = bitmap_bit_p (tovisit, e->src->index);
1600
1601 if (visit && !(e->flags & EDGE_DFS_BACK))
1602 count++;
1603 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1604 fprintf (dump_file,
1605 "Irreducible region hit, ignoring edge to %i->%i\n",
1606 e->src->index, bb->index);
1607 }
1608 BLOCK_INFO (bb)->npredecessors = count;
1609 }
1610
1611 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1612 last = head;
1613 for (bb = head; bb; bb = nextbb)
1614 {
1615 edge_iterator ei;
1616 sreal cyclic_probability, frequency;
1617
1618 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1619 memcpy (&frequency, &real_zero, sizeof (real_zero));
1620
1621 nextbb = BLOCK_INFO (bb)->next;
1622 BLOCK_INFO (bb)->next = NULL;
1623
1624 /* Compute frequency of basic block. */
1625 if (bb != head)
1626 {
1627 #ifdef ENABLE_CHECKING
1628 FOR_EACH_EDGE (e, ei, bb->preds)
1629 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1630 || (e->flags & EDGE_DFS_BACK));
1631 #endif
1632
1633 FOR_EACH_EDGE (e, ei, bb->preds)
1634 if (EDGE_INFO (e)->back_edge)
1635 {
1636 sreal_add (&cyclic_probability, &cyclic_probability,
1637 &EDGE_INFO (e)->back_edge_prob);
1638 }
1639 else if (!(e->flags & EDGE_DFS_BACK))
1640 {
1641 sreal tmp;
1642
1643 /* frequency += (e->probability
1644 * BLOCK_INFO (e->src)->frequency /
1645 REG_BR_PROB_BASE); */
1646
1647 sreal_init (&tmp, e->probability, 0);
1648 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1649 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1650 sreal_add (&frequency, &frequency, &tmp);
1651 }
1652
1653 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1654 {
1655 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1656 sizeof (frequency));
1657 }
1658 else
1659 {
1660 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1661 {
1662 memcpy (&cyclic_probability, &real_almost_one,
1663 sizeof (real_almost_one));
1664 }
1665
1666 /* BLOCK_INFO (bb)->frequency = frequency
1667 / (1 - cyclic_probability) */
1668
1669 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1670 sreal_div (&BLOCK_INFO (bb)->frequency,
1671 &frequency, &cyclic_probability);
1672 }
1673 }
1674
1675 bitmap_clear_bit (tovisit, bb->index);
1676
1677 e = find_edge (bb, head);
1678 if (e)
1679 {
1680 sreal tmp;
1681
1682 /* EDGE_INFO (e)->back_edge_prob
1683 = ((e->probability * BLOCK_INFO (bb)->frequency)
1684 / REG_BR_PROB_BASE); */
1685
1686 sreal_init (&tmp, e->probability, 0);
1687 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1688 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1689 &tmp, &real_inv_br_prob_base);
1690 }
1691
1692 /* Propagate to successor blocks. */
1693 FOR_EACH_EDGE (e, ei, bb->succs)
1694 if (!(e->flags & EDGE_DFS_BACK)
1695 && BLOCK_INFO (e->dest)->npredecessors)
1696 {
1697 BLOCK_INFO (e->dest)->npredecessors--;
1698 if (!BLOCK_INFO (e->dest)->npredecessors)
1699 {
1700 if (!nextbb)
1701 nextbb = e->dest;
1702 else
1703 BLOCK_INFO (last)->next = e->dest;
1704
1705 last = e->dest;
1706 }
1707 }
1708 }
1709 }
1710
1711 /* Estimate probabilities of loopback edges in loops at same nest level. */
1712
1713 static void
1714 estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1715 {
1716 struct loop *loop;
1717
1718 for (loop = first_loop; loop; loop = loop->next)
1719 {
1720 edge e;
1721 basic_block *bbs;
1722 unsigned i;
1723
1724 estimate_loops_at_level (loop->inner, tovisit);
1725
1726 /* Do not do this for dummy function loop. */
1727 if (EDGE_COUNT (loop->latch->succs) > 0)
1728 {
1729 /* Find current loop back edge and mark it. */
1730 e = loop_latch_edge (loop);
1731 EDGE_INFO (e)->back_edge = 1;
1732 }
1733
1734 bbs = get_loop_body (loop);
1735 for (i = 0; i < loop->num_nodes; i++)
1736 bitmap_set_bit (tovisit, bbs[i]->index);
1737 free (bbs);
1738 propagate_freq (loop, tovisit);
1739 }
1740 }
1741
1742 /* Convert counts measured by profile driven feedback to frequencies.
1743 Return nonzero iff there was any nonzero execution count. */
1744
1745 int
1746 counts_to_freqs (void)
1747 {
1748 gcov_type count_max, true_count_max = 0;
1749 basic_block bb;
1750
1751 FOR_EACH_BB (bb)
1752 true_count_max = MAX (bb->count, true_count_max);
1753
1754 count_max = MAX (true_count_max, 1);
1755 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1756 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1757 return true_count_max;
1758 }
1759
1760 /* Return true if function is likely to be expensive, so there is no point to
1761 optimize performance of prologue, epilogue or do inlining at the expense
1762 of code size growth. THRESHOLD is the limit of number of instructions
1763 function can execute at average to be still considered not expensive. */
1764
1765 bool
1766 expensive_function_p (int threshold)
1767 {
1768 unsigned int sum = 0;
1769 basic_block bb;
1770 unsigned int limit;
1771
1772 /* We can not compute accurately for large thresholds due to scaled
1773 frequencies. */
1774 gcc_assert (threshold <= BB_FREQ_MAX);
1775
1776 /* Frequencies are out of range. This either means that function contains
1777 internal loop executing more than BB_FREQ_MAX times or profile feedback
1778 is available and function has not been executed at all. */
1779 if (ENTRY_BLOCK_PTR->frequency == 0)
1780 return true;
1781
1782 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1783 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1784 FOR_EACH_BB (bb)
1785 {
1786 rtx insn;
1787
1788 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1789 insn = NEXT_INSN (insn))
1790 if (active_insn_p (insn))
1791 {
1792 sum += bb->frequency;
1793 if (sum > limit)
1794 return true;
1795 }
1796 }
1797
1798 return false;
1799 }
1800
1801 /* Estimate basic blocks frequency by given branch probabilities. */
1802
1803 static void
1804 estimate_bb_frequencies (struct loops *loops)
1805 {
1806 basic_block bb;
1807 sreal freq_max;
1808
1809 if (!flag_branch_probabilities || !counts_to_freqs ())
1810 {
1811 static int real_values_initialized = 0;
1812 bitmap tovisit;
1813
1814 if (!real_values_initialized)
1815 {
1816 real_values_initialized = 1;
1817 sreal_init (&real_zero, 0, 0);
1818 sreal_init (&real_one, 1, 0);
1819 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1820 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1821 sreal_init (&real_one_half, 1, -1);
1822 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1823 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1824 }
1825
1826 mark_dfs_back_edges ();
1827
1828 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1829
1830 /* Set up block info for each basic block. */
1831 tovisit = BITMAP_ALLOC (NULL);
1832 alloc_aux_for_blocks (sizeof (struct block_info_def));
1833 alloc_aux_for_edges (sizeof (struct edge_info_def));
1834 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1835 {
1836 edge e;
1837 edge_iterator ei;
1838
1839 FOR_EACH_EDGE (e, ei, bb->succs)
1840 {
1841 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1842 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1843 &EDGE_INFO (e)->back_edge_prob,
1844 &real_inv_br_prob_base);
1845 }
1846 }
1847
1848 /* First compute probabilities locally for each loop from innermost
1849 to outermost to examine probabilities for back edges. */
1850 estimate_loops_at_level (loops->tree_root, tovisit);
1851
1852 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1853 FOR_EACH_BB (bb)
1854 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1855 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1856
1857 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1858 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1859 {
1860 sreal tmp;
1861
1862 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1863 sreal_add (&tmp, &tmp, &real_one_half);
1864 bb->frequency = sreal_to_int (&tmp);
1865 }
1866
1867 free_aux_for_blocks ();
1868 free_aux_for_edges ();
1869 BITMAP_FREE (tovisit);
1870 }
1871 compute_function_frequency ();
1872 if (flag_reorder_functions)
1873 choose_function_section ();
1874 }
1875
1876 /* Decide whether function is hot, cold or unlikely executed. */
1877 static void
1878 compute_function_frequency (void)
1879 {
1880 basic_block bb;
1881
1882 if (!profile_info || !flag_branch_probabilities)
1883 return;
1884 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1885 FOR_EACH_BB (bb)
1886 {
1887 if (maybe_hot_bb_p (bb))
1888 {
1889 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1890 return;
1891 }
1892 if (!probably_never_executed_bb_p (bb))
1893 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1894 }
1895 }
1896
1897 /* Choose appropriate section for the function. */
1898 static void
1899 choose_function_section (void)
1900 {
1901 if (DECL_SECTION_NAME (current_function_decl)
1902 || !targetm.have_named_sections
1903 /* Theoretically we can split the gnu.linkonce text section too,
1904 but this requires more work as the frequency needs to match
1905 for all generated objects so we need to merge the frequency
1906 of all instances. For now just never set frequency for these. */
1907 || DECL_ONE_ONLY (current_function_decl))
1908 return;
1909
1910 /* If we are doing the partitioning optimization, let the optimization
1911 choose the correct section into which to put things. */
1912
1913 if (flag_reorder_blocks_and_partition)
1914 return;
1915
1916 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1917 DECL_SECTION_NAME (current_function_decl) =
1918 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1919 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1920 DECL_SECTION_NAME (current_function_decl) =
1921 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1922 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1923 }
1924
1925
1926 struct tree_opt_pass pass_profile =
1927 {
1928 "profile", /* name */
1929 NULL, /* gate */
1930 tree_estimate_probability, /* execute */
1931 NULL, /* sub */
1932 NULL, /* next */
1933 0, /* static_pass_number */
1934 TV_BRANCH_PROB, /* tv_id */
1935 PROP_cfg, /* properties_required */
1936 0, /* properties_provided */
1937 0, /* properties_destroyed */
1938 0, /* todo_flags_start */
1939 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1940 0 /* letter */
1941 };