calls.c (expand_call): Check DECL_BUILT_IN_CLASS before accessing DECL_FUNCTION_CODE.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 /* References:
22
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
29
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "coverage.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "cfgloop.h"
55 #include "tree-flow.h"
56 #include "ggc.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
59 #include "timevar.h"
60 #include "tree-scalar-evolution.h"
61 #include "cfgloop.h"
62
63 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
65 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
66 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
67
68 /* Random guesstimation given names. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
73
74 static void combine_predictions_for_insn (rtx, basic_block);
75 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
76 static void estimate_loops_at_level (struct loop *, bitmap);
77 static void propagate_freq (struct loop *, bitmap);
78 static void estimate_bb_frequencies (struct loops *);
79 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
80 static bool last_basic_block_p (basic_block);
81 static void compute_function_frequency (void);
82 static void choose_function_section (void);
83 static bool can_predict_insn_p (rtx);
84
85 /* Information we hold about each branch predictor.
86 Filled using information from predict.def. */
87
88 struct predictor_info
89 {
90 const char *const name; /* Name used in the debugging dumps. */
91 const int hitrate; /* Expected hitrate used by
92 predict_insn_def call. */
93 const int flags;
94 };
95
96 /* Use given predictor without Dempster-Shaffer theory if it matches
97 using first_match heuristics. */
98 #define PRED_FLAG_FIRST_MATCH 1
99
100 /* Recompute hitrate in percent to our representation. */
101
102 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
103
104 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
105 static const struct predictor_info predictor_info[]= {
106 #include "predict.def"
107
108 /* Upper bound on predictors. */
109 {NULL, 0, 0}
110 };
111 #undef DEF_PREDICTOR
112
113 /* Return true in case BB can be CPU intensive and should be optimized
114 for maximal performance. */
115
116 bool
117 maybe_hot_bb_p (basic_block bb)
118 {
119 if (profile_info && flag_branch_probabilities
120 && (bb->count
121 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
122 return false;
123 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
124 return false;
125 return true;
126 }
127
128 /* Return true in case BB is cold and should be optimized for size. */
129
130 bool
131 probably_cold_bb_p (basic_block bb)
132 {
133 if (profile_info && flag_branch_probabilities
134 && (bb->count
135 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
136 return true;
137 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
138 return true;
139 return false;
140 }
141
142 /* Return true in case BB is probably never executed. */
143 bool
144 probably_never_executed_bb_p (basic_block bb)
145 {
146 if (profile_info && flag_branch_probabilities)
147 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
148 return false;
149 }
150
151 /* Return true if the one of outgoing edges is already predicted by
152 PREDICTOR. */
153
154 bool
155 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
156 {
157 rtx note;
158 if (!INSN_P (BB_END (bb)))
159 return false;
160 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
161 if (REG_NOTE_KIND (note) == REG_BR_PRED
162 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
163 return true;
164 return false;
165 }
166
167 /* Return true if the one of outgoing edges is already predicted by
168 PREDICTOR. */
169
170 bool
171 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
172 {
173 struct edge_prediction *i = bb_ann (bb)->predictions;
174 for (i = bb_ann (bb)->predictions; i; i = i->next)
175 if (i->predictor == predictor)
176 return true;
177 return false;
178 }
179
180 static void
181 predict_insn (rtx insn, enum br_predictor predictor, int probability)
182 {
183 if (!any_condjump_p (insn))
184 abort ();
185 if (!flag_guess_branch_prob)
186 return;
187
188 REG_NOTES (insn)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED,
190 gen_rtx_CONCAT (VOIDmode,
191 GEN_INT ((int) predictor),
192 GEN_INT ((int) probability)),
193 REG_NOTES (insn));
194 }
195
196 /* Predict insn by given predictor. */
197
198 void
199 predict_insn_def (rtx insn, enum br_predictor predictor,
200 enum prediction taken)
201 {
202 int probability = predictor_info[(int) predictor].hitrate;
203
204 if (taken != TAKEN)
205 probability = REG_BR_PROB_BASE - probability;
206
207 predict_insn (insn, predictor, probability);
208 }
209
210 /* Predict edge E with given probability if possible. */
211
212 void
213 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
214 {
215 rtx last_insn;
216 last_insn = BB_END (e->src);
217
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn))
221 return;
222
223 /* We always store probability of branching. */
224 if (e->flags & EDGE_FALLTHRU)
225 probability = REG_BR_PROB_BASE - probability;
226
227 predict_insn (last_insn, predictor, probability);
228 }
229
230 /* Predict edge E with the given PROBABILITY. */
231 void
232 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
233 {
234 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
235
236 i->next = bb_ann (e->src)->predictions;
237 bb_ann (e->src)->predictions = i;
238 i->probability = probability;
239 i->predictor = predictor;
240 i->edge = e;
241 }
242
243 /* Return true when we can store prediction on insn INSN.
244 At the moment we represent predictions only on conditional
245 jumps, not at computed jump or other complicated cases. */
246 static bool
247 can_predict_insn_p (rtx insn)
248 {
249 return (JUMP_P (insn)
250 && any_condjump_p (insn)
251 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
252 }
253
254 /* Predict edge E by given predictor if possible. */
255
256 void
257 predict_edge_def (edge e, enum br_predictor predictor,
258 enum prediction taken)
259 {
260 int probability = predictor_info[(int) predictor].hitrate;
261
262 if (taken != TAKEN)
263 probability = REG_BR_PROB_BASE - probability;
264
265 predict_edge (e, predictor, probability);
266 }
267
268 /* Invert all branch predictions or probability notes in the INSN. This needs
269 to be done each time we invert the condition used by the jump. */
270
271 void
272 invert_br_probabilities (rtx insn)
273 {
274 rtx note;
275
276 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
277 if (REG_NOTE_KIND (note) == REG_BR_PROB)
278 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
279 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
280 XEXP (XEXP (note, 0), 1)
281 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
282 }
283
284 /* Dump information about the branch prediction to the output file. */
285
286 static void
287 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
288 basic_block bb, int used)
289 {
290 edge e;
291 edge_iterator ei;
292
293 if (!file)
294 return;
295
296 FOR_EACH_EDGE (e, ei, bb->succs)
297 if (! (e->flags & EDGE_FALLTHRU))
298 break;
299
300 fprintf (file, " %s heuristics%s: %.1f%%",
301 predictor_info[predictor].name,
302 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
303
304 if (bb->count)
305 {
306 fprintf (file, " exec ");
307 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
308 if (e)
309 {
310 fprintf (file, " hit ");
311 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
312 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
313 }
314 }
315
316 fprintf (file, "\n");
317 }
318
319 /* We can not predict the probabilities of outgoing edges of bb. Set them
320 evenly and hope for the best. */
321 static void
322 set_even_probabilities (basic_block bb)
323 {
324 int nedges = 0;
325 edge e;
326 edge_iterator ei;
327
328 FOR_EACH_EDGE (e, ei, bb->succs)
329 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
330 nedges ++;
331 FOR_EACH_EDGE (e, ei, bb->succs)
332 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
333 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
334 else
335 e->probability = 0;
336 }
337
338 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
339 note if not already present. Remove now useless REG_BR_PRED notes. */
340
341 static void
342 combine_predictions_for_insn (rtx insn, basic_block bb)
343 {
344 rtx prob_note;
345 rtx *pnote;
346 rtx note;
347 int best_probability = PROB_EVEN;
348 int best_predictor = END_PREDICTORS;
349 int combined_probability = REG_BR_PROB_BASE / 2;
350 int d;
351 bool first_match = false;
352 bool found = false;
353
354 if (!can_predict_insn_p (insn))
355 {
356 set_even_probabilities (bb);
357 return;
358 }
359
360 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
361 pnote = &REG_NOTES (insn);
362 if (dump_file)
363 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
364 bb->index);
365
366 /* We implement "first match" heuristics and use probability guessed
367 by predictor with smallest index. */
368 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
369 if (REG_NOTE_KIND (note) == REG_BR_PRED)
370 {
371 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
372 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
373
374 found = true;
375 if (best_predictor > predictor)
376 best_probability = probability, best_predictor = predictor;
377
378 d = (combined_probability * probability
379 + (REG_BR_PROB_BASE - combined_probability)
380 * (REG_BR_PROB_BASE - probability));
381
382 /* Use FP math to avoid overflows of 32bit integers. */
383 if (d == 0)
384 /* If one probability is 0% and one 100%, avoid division by zero. */
385 combined_probability = REG_BR_PROB_BASE / 2;
386 else
387 combined_probability = (((double) combined_probability) * probability
388 * REG_BR_PROB_BASE / d + 0.5);
389 }
390
391 /* Decide which heuristic to use. In case we didn't match anything,
392 use no_prediction heuristic, in case we did match, use either
393 first match or Dempster-Shaffer theory depending on the flags. */
394
395 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
396 first_match = true;
397
398 if (!found)
399 dump_prediction (dump_file, PRED_NO_PREDICTION,
400 combined_probability, bb, true);
401 else
402 {
403 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
404 bb, !first_match);
405 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
406 bb, first_match);
407 }
408
409 if (first_match)
410 combined_probability = best_probability;
411 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
412
413 while (*pnote)
414 {
415 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
416 {
417 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
418 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
419
420 dump_prediction (dump_file, predictor, probability, bb,
421 !first_match || best_predictor == predictor);
422 *pnote = XEXP (*pnote, 1);
423 }
424 else
425 pnote = &XEXP (*pnote, 1);
426 }
427
428 if (!prob_note)
429 {
430 REG_NOTES (insn)
431 = gen_rtx_EXPR_LIST (REG_BR_PROB,
432 GEN_INT (combined_probability), REG_NOTES (insn));
433
434 /* Save the prediction into CFG in case we are seeing non-degenerated
435 conditional jump. */
436 if (EDGE_COUNT (bb->succs) > 1)
437 {
438 BRANCH_EDGE (bb)->probability = combined_probability;
439 FALLTHRU_EDGE (bb)->probability
440 = REG_BR_PROB_BASE - combined_probability;
441 }
442 }
443 else if (EDGE_COUNT (bb->succs) > 1)
444 {
445 int prob = INTVAL (XEXP (prob_note, 0));
446
447 BRANCH_EDGE (bb)->probability = prob;
448 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
449 }
450 else
451 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
452 }
453
454 /* Combine predictions into single probability and store them into CFG.
455 Remove now useless prediction entries. */
456
457 static void
458 combine_predictions_for_bb (FILE *file, basic_block bb)
459 {
460 int best_probability = PROB_EVEN;
461 int best_predictor = END_PREDICTORS;
462 int combined_probability = REG_BR_PROB_BASE / 2;
463 int d;
464 bool first_match = false;
465 bool found = false;
466 struct edge_prediction *pred;
467 int nedges = 0;
468 edge e, first = NULL, second = NULL;
469 edge_iterator ei;
470
471 FOR_EACH_EDGE (e, ei, bb->succs)
472 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
473 {
474 nedges ++;
475 if (first && !second)
476 second = e;
477 if (!first)
478 first = e;
479 }
480
481 /* When there is no successor or only one choice, prediction is easy.
482
483 We are lazy for now and predict only basic blocks with two outgoing
484 edges. It is possible to predict generic case too, but we have to
485 ignore first match heuristics and do more involved combining. Implement
486 this later. */
487 if (nedges != 2)
488 {
489 if (!bb->count)
490 set_even_probabilities (bb);
491 bb_ann (bb)->predictions = NULL;
492 if (file)
493 fprintf (file, "%i edges in bb %i predicted to even probabilities\n",
494 nedges, bb->index);
495 return;
496 }
497
498 if (file)
499 fprintf (file, "Predictions for bb %i\n", bb->index);
500
501 /* We implement "first match" heuristics and use probability guessed
502 by predictor with smallest index. */
503 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
504 {
505 int predictor = pred->predictor;
506 int probability = pred->probability;
507
508 if (pred->edge != first)
509 probability = REG_BR_PROB_BASE - probability;
510
511 found = true;
512 if (best_predictor > predictor)
513 best_probability = probability, best_predictor = predictor;
514
515 d = (combined_probability * probability
516 + (REG_BR_PROB_BASE - combined_probability)
517 * (REG_BR_PROB_BASE - probability));
518
519 /* Use FP math to avoid overflows of 32bit integers. */
520 if (d == 0)
521 /* If one probability is 0% and one 100%, avoid division by zero. */
522 combined_probability = REG_BR_PROB_BASE / 2;
523 else
524 combined_probability = (((double) combined_probability) * probability
525 * REG_BR_PROB_BASE / d + 0.5);
526 }
527
528 /* Decide which heuristic to use. In case we didn't match anything,
529 use no_prediction heuristic, in case we did match, use either
530 first match or Dempster-Shaffer theory depending on the flags. */
531
532 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
533 first_match = true;
534
535 if (!found)
536 dump_prediction (file, PRED_NO_PREDICTION, combined_probability, bb, true);
537 else
538 {
539 dump_prediction (file, PRED_DS_THEORY, combined_probability, bb,
540 !first_match);
541 dump_prediction (file, PRED_FIRST_MATCH, best_probability, bb,
542 first_match);
543 }
544
545 if (first_match)
546 combined_probability = best_probability;
547 dump_prediction (file, PRED_COMBINED, combined_probability, bb, true);
548
549 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
550 {
551 int predictor = pred->predictor;
552 int probability = pred->probability;
553
554 if (pred->edge != EDGE_SUCC (bb, 0))
555 probability = REG_BR_PROB_BASE - probability;
556 dump_prediction (file, predictor, probability, bb,
557 !first_match || best_predictor == predictor);
558 }
559 bb_ann (bb)->predictions = NULL;
560
561 if (!bb->count)
562 {
563 first->probability = combined_probability;
564 second->probability = REG_BR_PROB_BASE - combined_probability;
565 }
566 }
567
568 /* Predict edge probabilities by exploiting loop structure.
569 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
570 RTL otherwise use tree based approach. */
571 static void
572 predict_loops (struct loops *loops_info, bool rtlsimpleloops)
573 {
574 unsigned i;
575
576 if (!rtlsimpleloops)
577 scev_initialize (loops_info);
578
579 /* Try to predict out blocks in a loop that are not part of a
580 natural loop. */
581 for (i = 1; i < loops_info->num; i++)
582 {
583 basic_block bb, *bbs;
584 unsigned j;
585 int exits;
586 struct loop *loop = loops_info->parray[i];
587 struct niter_desc desc;
588 unsigned HOST_WIDE_INT niter;
589
590 flow_loop_scan (loop, LOOP_EXIT_EDGES);
591 exits = loop->num_exits;
592
593 if (rtlsimpleloops)
594 {
595 iv_analysis_loop_init (loop);
596 find_simple_exit (loop, &desc);
597
598 if (desc.simple_p && desc.const_iter)
599 {
600 int prob;
601 niter = desc.niter + 1;
602 if (niter == 0) /* We might overflow here. */
603 niter = desc.niter;
604
605 prob = (REG_BR_PROB_BASE
606 - (REG_BR_PROB_BASE + niter /2) / niter);
607 /* Branch prediction algorithm gives 0 frequency for everything
608 after the end of loop for loop having 0 probability to finish. */
609 if (prob == REG_BR_PROB_BASE)
610 prob = REG_BR_PROB_BASE - 1;
611 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
612 prob);
613 }
614 }
615 else
616 {
617 edge *exits;
618 unsigned j, n_exits;
619 struct tree_niter_desc niter_desc;
620
621 exits = get_loop_exit_edges (loop, &n_exits);
622 for (j = 0; j < n_exits; j++)
623 {
624 tree niter = NULL;
625
626 if (number_of_iterations_exit (loop, exits[j], &niter_desc))
627 niter = niter_desc.niter;
628 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
629 niter = loop_niter_by_eval (loop, exits[j]);
630
631 if (TREE_CODE (niter) == INTEGER_CST)
632 {
633 int probability;
634 if (host_integerp (niter, 1)
635 && tree_int_cst_lt (niter,
636 build_int_cstu (NULL_TREE,
637 REG_BR_PROB_BASE - 1)))
638 {
639 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
640 probability = (REG_BR_PROB_BASE + nitercst / 2) / nitercst;
641 }
642 else
643 probability = 1;
644
645 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
646 }
647 }
648
649 free (exits);
650 }
651
652 bbs = get_loop_body (loop);
653
654 for (j = 0; j < loop->num_nodes; j++)
655 {
656 int header_found = 0;
657 edge e;
658 edge_iterator ei;
659
660 bb = bbs[j];
661
662 /* Bypass loop heuristics on continue statement. These
663 statements construct loops via "non-loop" constructs
664 in the source language and are better to be handled
665 separately. */
666 if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
667 || predicted_by_p (bb, PRED_CONTINUE))
668 continue;
669
670 /* Loop branch heuristics - predict an edge back to a
671 loop's head as taken. */
672 if (bb == loop->latch)
673 {
674 e = find_edge (loop->latch, loop->header);
675 if (e)
676 {
677 header_found = 1;
678 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
679 }
680 }
681
682 /* Loop exit heuristics - predict an edge exiting the loop if the
683 conditional has no loop header successors as not taken. */
684 if (!header_found)
685 FOR_EACH_EDGE (e, ei, bb->succs)
686 if (e->dest->index < 0
687 || !flow_bb_inside_loop_p (loop, e->dest))
688 predict_edge
689 (e, PRED_LOOP_EXIT,
690 (REG_BR_PROB_BASE
691 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
692 / exits);
693 }
694
695 /* Free basic blocks from get_loop_body. */
696 free (bbs);
697 }
698
699 if (!rtlsimpleloops)
700 scev_finalize ();
701 }
702
703 /* Attempt to predict probabilities of BB outgoing edges using local
704 properties. */
705 static void
706 bb_estimate_probability_locally (basic_block bb)
707 {
708 rtx last_insn = BB_END (bb);
709 rtx cond;
710
711 if (! can_predict_insn_p (last_insn))
712 return;
713 cond = get_condition (last_insn, NULL, false, false);
714 if (! cond)
715 return;
716
717 /* Try "pointer heuristic."
718 A comparison ptr == 0 is predicted as false.
719 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
720 if (COMPARISON_P (cond)
721 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
722 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
723 {
724 if (GET_CODE (cond) == EQ)
725 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
726 else if (GET_CODE (cond) == NE)
727 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
728 }
729 else
730
731 /* Try "opcode heuristic."
732 EQ tests are usually false and NE tests are usually true. Also,
733 most quantities are positive, so we can make the appropriate guesses
734 about signed comparisons against zero. */
735 switch (GET_CODE (cond))
736 {
737 case CONST_INT:
738 /* Unconditional branch. */
739 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
740 cond == const0_rtx ? NOT_TAKEN : TAKEN);
741 break;
742
743 case EQ:
744 case UNEQ:
745 /* Floating point comparisons appears to behave in a very
746 unpredictable way because of special role of = tests in
747 FP code. */
748 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
749 ;
750 /* Comparisons with 0 are often used for booleans and there is
751 nothing useful to predict about them. */
752 else if (XEXP (cond, 1) == const0_rtx
753 || XEXP (cond, 0) == const0_rtx)
754 ;
755 else
756 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
757 break;
758
759 case NE:
760 case LTGT:
761 /* Floating point comparisons appears to behave in a very
762 unpredictable way because of special role of = tests in
763 FP code. */
764 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
765 ;
766 /* Comparisons with 0 are often used for booleans and there is
767 nothing useful to predict about them. */
768 else if (XEXP (cond, 1) == const0_rtx
769 || XEXP (cond, 0) == const0_rtx)
770 ;
771 else
772 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
773 break;
774
775 case ORDERED:
776 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
777 break;
778
779 case UNORDERED:
780 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
781 break;
782
783 case LE:
784 case LT:
785 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
786 || XEXP (cond, 1) == constm1_rtx)
787 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
788 break;
789
790 case GE:
791 case GT:
792 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
793 || XEXP (cond, 1) == constm1_rtx)
794 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
795 break;
796
797 default:
798 break;
799 }
800 }
801
802 /* Statically estimate the probability that a branch will be taken and produce
803 estimated profile. When profile feedback is present never executed portions
804 of function gets estimated. */
805
806 void
807 estimate_probability (struct loops *loops_info)
808 {
809 basic_block bb;
810
811 connect_infinite_loops_to_exit ();
812 calculate_dominance_info (CDI_DOMINATORS);
813 calculate_dominance_info (CDI_POST_DOMINATORS);
814
815 predict_loops (loops_info, true);
816
817 iv_analysis_done ();
818
819 /* Attempt to predict conditional jumps using a number of heuristics. */
820 FOR_EACH_BB (bb)
821 {
822 rtx last_insn = BB_END (bb);
823 edge e;
824 edge_iterator ei;
825
826 if (! can_predict_insn_p (last_insn))
827 continue;
828
829 FOR_EACH_EDGE (e, ei, bb->succs)
830 {
831 /* Predict early returns to be probable, as we've already taken
832 care for error returns and other are often used for fast paths
833 trought function. */
834 if ((e->dest == EXIT_BLOCK_PTR
835 || (EDGE_COUNT (e->dest->succs) == 1
836 && EDGE_SUCC (e->dest, 0)->dest == EXIT_BLOCK_PTR))
837 && !predicted_by_p (bb, PRED_NULL_RETURN)
838 && !predicted_by_p (bb, PRED_CONST_RETURN)
839 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
840 && !last_basic_block_p (e->dest))
841 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
842
843 /* Look for block we are guarding (i.e. we dominate it,
844 but it doesn't postdominate us). */
845 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
846 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
847 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
848 {
849 rtx insn;
850
851 /* The call heuristic claims that a guarded function call
852 is improbable. This is because such calls are often used
853 to signal exceptional situations such as printing error
854 messages. */
855 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
856 insn = NEXT_INSN (insn))
857 if (CALL_P (insn)
858 /* Constant and pure calls are hardly used to signalize
859 something exceptional. */
860 && ! CONST_OR_PURE_CALL_P (insn))
861 {
862 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
863 break;
864 }
865 }
866 }
867 bb_estimate_probability_locally (bb);
868 }
869
870 /* Attach the combined probability to each conditional jump. */
871 FOR_EACH_BB (bb)
872 combine_predictions_for_insn (BB_END (bb), bb);
873
874 remove_fake_edges ();
875 estimate_bb_frequencies (loops_info);
876 free_dominance_info (CDI_POST_DOMINATORS);
877 if (profile_status == PROFILE_ABSENT)
878 profile_status = PROFILE_GUESSED;
879 }
880
881 /* Set edge->probability for each successor edge of BB. */
882 void
883 guess_outgoing_edge_probabilities (basic_block bb)
884 {
885 bb_estimate_probability_locally (bb);
886 combine_predictions_for_insn (BB_END (bb), bb);
887 }
888 \f
889 /* Return constant EXPR will likely have at execution time, NULL if unknown.
890 The function is used by builtin_expect branch predictor so the evidence
891 must come from this construct and additional possible constant folding.
892
893 We may want to implement more involved value guess (such as value range
894 propagation based prediction), but such tricks shall go to new
895 implementation. */
896
897 static tree
898 expr_expected_value (tree expr, bitmap visited)
899 {
900 if (TREE_CONSTANT (expr))
901 return expr;
902 else if (TREE_CODE (expr) == SSA_NAME)
903 {
904 tree def = SSA_NAME_DEF_STMT (expr);
905
906 /* If we were already here, break the infinite cycle. */
907 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
908 return NULL;
909 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
910
911 if (TREE_CODE (def) == PHI_NODE)
912 {
913 /* All the arguments of the PHI node must have the same constant
914 length. */
915 int i;
916 tree val = NULL, new_val;
917
918 for (i = 0; i < PHI_NUM_ARGS (def); i++)
919 {
920 tree arg = PHI_ARG_DEF (def, i);
921
922 /* If this PHI has itself as an argument, we cannot
923 determine the string length of this argument. However,
924 if we can find an expected constant value for the other
925 PHI args then we can still be sure that this is
926 likely a constant. So be optimistic and just
927 continue with the next argument. */
928 if (arg == PHI_RESULT (def))
929 continue;
930
931 new_val = expr_expected_value (arg, visited);
932 if (!new_val)
933 return NULL;
934 if (!val)
935 val = new_val;
936 else if (!operand_equal_p (val, new_val, false))
937 return NULL;
938 }
939 return val;
940 }
941 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
942 return NULL;
943 return expr_expected_value (TREE_OPERAND (def, 1), visited);
944 }
945 else if (TREE_CODE (expr) == CALL_EXPR)
946 {
947 tree decl = get_callee_fndecl (expr);
948 if (!decl)
949 return NULL;
950 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
951 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
952 {
953 tree arglist = TREE_OPERAND (expr, 1);
954 tree val;
955
956 if (arglist == NULL_TREE
957 || TREE_CHAIN (arglist) == NULL_TREE)
958 return NULL;
959 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
960 if (TREE_CONSTANT (val))
961 return val;
962 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
963 }
964 }
965 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
966 {
967 tree op0, op1, res;
968 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
969 if (!op0)
970 return NULL;
971 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
972 if (!op1)
973 return NULL;
974 res = fold (build (TREE_CODE (expr), TREE_TYPE (expr), op0, op1));
975 if (TREE_CONSTANT (res))
976 return res;
977 return NULL;
978 }
979 if (UNARY_CLASS_P (expr))
980 {
981 tree op0, res;
982 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
983 if (!op0)
984 return NULL;
985 res = fold (build1 (TREE_CODE (expr), TREE_TYPE (expr), op0));
986 if (TREE_CONSTANT (res))
987 return res;
988 return NULL;
989 }
990 return NULL;
991 }
992 \f
993 /* Get rid of all builtin_expect calls we no longer need. */
994 static void
995 strip_builtin_expect (void)
996 {
997 basic_block bb;
998 FOR_EACH_BB (bb)
999 {
1000 block_stmt_iterator bi;
1001 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
1002 {
1003 tree stmt = bsi_stmt (bi);
1004 tree fndecl;
1005 tree arglist;
1006
1007 if (TREE_CODE (stmt) == MODIFY_EXPR
1008 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
1009 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
1010 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1011 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1012 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
1013 && TREE_CHAIN (arglist))
1014 {
1015 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
1016 modify_stmt (stmt);
1017 }
1018 }
1019 }
1020 }
1021 \f
1022 /* Predict using opcode of the last statement in basic block. */
1023 static void
1024 tree_predict_by_opcode (basic_block bb)
1025 {
1026 tree stmt = last_stmt (bb);
1027 edge then_edge;
1028 tree cond;
1029 tree op0;
1030 tree type;
1031 tree val;
1032 bitmap visited;
1033 edge_iterator ei;
1034
1035 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1036 return;
1037 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1038 if (then_edge->flags & EDGE_TRUE_VALUE)
1039 break;
1040 cond = TREE_OPERAND (stmt, 0);
1041 if (!COMPARISON_CLASS_P (cond))
1042 return;
1043 op0 = TREE_OPERAND (cond, 0);
1044 type = TREE_TYPE (op0);
1045 visited = BITMAP_XMALLOC ();
1046 val = expr_expected_value (cond, visited);
1047 BITMAP_XFREE (visited);
1048 if (val)
1049 {
1050 if (integer_zerop (val))
1051 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1052 else
1053 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1054 return;
1055 }
1056 /* Try "pointer heuristic."
1057 A comparison ptr == 0 is predicted as false.
1058 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1059 if (POINTER_TYPE_P (type))
1060 {
1061 if (TREE_CODE (cond) == EQ_EXPR)
1062 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1063 else if (TREE_CODE (cond) == NE_EXPR)
1064 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1065 }
1066 else
1067
1068 /* Try "opcode heuristic."
1069 EQ tests are usually false and NE tests are usually true. Also,
1070 most quantities are positive, so we can make the appropriate guesses
1071 about signed comparisons against zero. */
1072 switch (TREE_CODE (cond))
1073 {
1074 case EQ_EXPR:
1075 case UNEQ_EXPR:
1076 /* Floating point comparisons appears to behave in a very
1077 unpredictable way because of special role of = tests in
1078 FP code. */
1079 if (FLOAT_TYPE_P (type))
1080 ;
1081 /* Comparisons with 0 are often used for booleans and there is
1082 nothing useful to predict about them. */
1083 else if (integer_zerop (op0)
1084 || integer_zerop (TREE_OPERAND (cond, 1)))
1085 ;
1086 else
1087 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1088 break;
1089
1090 case NE_EXPR:
1091 case LTGT_EXPR:
1092 /* Floating point comparisons appears to behave in a very
1093 unpredictable way because of special role of = tests in
1094 FP code. */
1095 if (FLOAT_TYPE_P (type))
1096 ;
1097 /* Comparisons with 0 are often used for booleans and there is
1098 nothing useful to predict about them. */
1099 else if (integer_zerop (op0)
1100 || integer_zerop (TREE_OPERAND (cond, 1)))
1101 ;
1102 else
1103 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1104 break;
1105
1106 case ORDERED_EXPR:
1107 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1108 break;
1109
1110 case UNORDERED_EXPR:
1111 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1112 break;
1113
1114 case LE_EXPR:
1115 case LT_EXPR:
1116 if (integer_zerop (TREE_OPERAND (cond, 1))
1117 || integer_onep (TREE_OPERAND (cond, 1))
1118 || integer_all_onesp (TREE_OPERAND (cond, 1))
1119 || real_zerop (TREE_OPERAND (cond, 1))
1120 || real_onep (TREE_OPERAND (cond, 1))
1121 || real_minus_onep (TREE_OPERAND (cond, 1)))
1122 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1123 break;
1124
1125 case GE_EXPR:
1126 case GT_EXPR:
1127 if (integer_zerop (TREE_OPERAND (cond, 1))
1128 || integer_onep (TREE_OPERAND (cond, 1))
1129 || integer_all_onesp (TREE_OPERAND (cond, 1))
1130 || real_zerop (TREE_OPERAND (cond, 1))
1131 || real_onep (TREE_OPERAND (cond, 1))
1132 || real_minus_onep (TREE_OPERAND (cond, 1)))
1133 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1134 break;
1135
1136 default:
1137 break;
1138 }
1139 }
1140
1141 /* Try to guess whether the value of return means error code. */
1142 static enum br_predictor
1143 return_prediction (tree val, enum prediction *prediction)
1144 {
1145 /* VOID. */
1146 if (!val)
1147 return PRED_NO_PREDICTION;
1148 /* Different heuristics for pointers and scalars. */
1149 if (POINTER_TYPE_P (TREE_TYPE (val)))
1150 {
1151 /* NULL is usually not returned. */
1152 if (integer_zerop (val))
1153 {
1154 *prediction = NOT_TAKEN;
1155 return PRED_NULL_RETURN;
1156 }
1157 }
1158 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1159 {
1160 /* Negative return values are often used to indicate
1161 errors. */
1162 if (TREE_CODE (val) == INTEGER_CST
1163 && tree_int_cst_sgn (val) < 0)
1164 {
1165 *prediction = NOT_TAKEN;
1166 return PRED_NEGATIVE_RETURN;
1167 }
1168 /* Constant return values seems to be commonly taken.
1169 Zero/one often represent booleans so exclude them from the
1170 heuristics. */
1171 if (TREE_CONSTANT (val)
1172 && (!integer_zerop (val) && !integer_onep (val)))
1173 {
1174 *prediction = TAKEN;
1175 return PRED_NEGATIVE_RETURN;
1176 }
1177 }
1178 return PRED_NO_PREDICTION;
1179 }
1180
1181 /* Find the basic block with return expression and look up for possible
1182 return value trying to apply RETURN_PREDICTION heuristics. */
1183 static void
1184 apply_return_prediction (int *heads)
1185 {
1186 tree return_stmt;
1187 tree return_val;
1188 edge e;
1189 tree phi;
1190 int phi_num_args, i;
1191 enum br_predictor pred;
1192 enum prediction direction;
1193 edge_iterator ei;
1194
1195 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1196 {
1197 return_stmt = last_stmt (e->src);
1198 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1199 break;
1200 }
1201 if (!e)
1202 return;
1203 return_val = TREE_OPERAND (return_stmt, 0);
1204 if (!return_val)
1205 return;
1206 if (TREE_CODE (return_val) == MODIFY_EXPR)
1207 return_val = TREE_OPERAND (return_val, 1);
1208 if (TREE_CODE (return_val) != SSA_NAME
1209 || !SSA_NAME_DEF_STMT (return_val)
1210 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1211 return;
1212 phi = SSA_NAME_DEF_STMT (return_val);
1213 while (phi)
1214 {
1215 tree next = PHI_CHAIN (phi);
1216 if (PHI_RESULT (phi) == return_val)
1217 break;
1218 phi = next;
1219 }
1220 if (!phi)
1221 return;
1222 phi_num_args = PHI_NUM_ARGS (phi);
1223 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1224
1225 /* Avoid the degenerate case where all return values form the function
1226 belongs to same category (ie they are all positive constants)
1227 so we can hardly say something about them. */
1228 for (i = 1; i < phi_num_args; i++)
1229 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1230 break;
1231 if (i != phi_num_args)
1232 for (i = 0; i < phi_num_args; i++)
1233 {
1234 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1235 if (pred != PRED_NO_PREDICTION)
1236 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1237 direction);
1238 }
1239 }
1240
1241 /* Look for basic block that contains unlikely to happen events
1242 (such as noreturn calls) and mark all paths leading to execution
1243 of this basic blocks as unlikely. */
1244
1245 static void
1246 tree_bb_level_predictions (void)
1247 {
1248 basic_block bb;
1249 int *heads;
1250
1251 heads = xmalloc (sizeof (int) * last_basic_block);
1252 memset (heads, -1, sizeof (int) * last_basic_block);
1253 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1254
1255 apply_return_prediction (heads);
1256
1257 FOR_EACH_BB (bb)
1258 {
1259 block_stmt_iterator bsi = bsi_last (bb);
1260
1261 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1262 {
1263 tree stmt = bsi_stmt (bsi);
1264 switch (TREE_CODE (stmt))
1265 {
1266 case MODIFY_EXPR:
1267 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1268 {
1269 stmt = TREE_OPERAND (stmt, 1);
1270 goto call_expr;
1271 }
1272 break;
1273 case CALL_EXPR:
1274 call_expr:;
1275 if (call_expr_flags (stmt) & ECF_NORETURN)
1276 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1277 NOT_TAKEN);
1278 break;
1279 default:
1280 break;
1281 }
1282 }
1283 }
1284
1285 free (heads);
1286 }
1287
1288 /* Predict branch probabilities and estimate profile of the tree CFG. */
1289 static void
1290 tree_estimate_probability (void)
1291 {
1292 basic_block bb;
1293 struct loops loops_info;
1294
1295 flow_loops_find (&loops_info, LOOP_TREE);
1296 if (dump_file && (dump_flags & TDF_DETAILS))
1297 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1298
1299 add_noreturn_fake_exit_edges ();
1300 connect_infinite_loops_to_exit ();
1301 calculate_dominance_info (CDI_DOMINATORS);
1302 calculate_dominance_info (CDI_POST_DOMINATORS);
1303
1304 tree_bb_level_predictions ();
1305
1306 mark_irreducible_loops (&loops_info);
1307 predict_loops (&loops_info, false);
1308
1309 FOR_EACH_BB (bb)
1310 {
1311 edge e;
1312 edge_iterator ei;
1313
1314 FOR_EACH_EDGE (e, ei, bb->succs)
1315 {
1316 /* Predict early returns to be probable, as we've already taken
1317 care for error returns and other cases are often used for
1318 fast paths trought function. */
1319 if (e->dest == EXIT_BLOCK_PTR
1320 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1321 && EDGE_COUNT (bb->preds) > 1)
1322 {
1323 edge e1;
1324 edge_iterator ei1;
1325
1326 FOR_EACH_EDGE (e1, ei1, bb->preds)
1327 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1328 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1329 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1330 && !last_basic_block_p (e1->src))
1331 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1332 }
1333
1334 /* Look for block we are guarding (ie we dominate it,
1335 but it doesn't postdominate us). */
1336 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1337 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1338 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1339 {
1340 block_stmt_iterator bi;
1341
1342 /* The call heuristic claims that a guarded function call
1343 is improbable. This is because such calls are often used
1344 to signal exceptional situations such as printing error
1345 messages. */
1346 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1347 bsi_next (&bi))
1348 {
1349 tree stmt = bsi_stmt (bi);
1350 if ((TREE_CODE (stmt) == CALL_EXPR
1351 || (TREE_CODE (stmt) == MODIFY_EXPR
1352 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1353 /* Constant and pure calls are hardly used to signalize
1354 something exceptional. */
1355 && TREE_SIDE_EFFECTS (stmt))
1356 {
1357 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1358 break;
1359 }
1360 }
1361 }
1362 }
1363 tree_predict_by_opcode (bb);
1364 }
1365 FOR_EACH_BB (bb)
1366 combine_predictions_for_bb (dump_file, bb);
1367
1368 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1369 strip_builtin_expect ();
1370 estimate_bb_frequencies (&loops_info);
1371 free_dominance_info (CDI_POST_DOMINATORS);
1372 remove_fake_exit_edges ();
1373 flow_loops_free (&loops_info);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 dump_tree_cfg (dump_file, dump_flags);
1376 if (profile_status == PROFILE_ABSENT)
1377 profile_status = PROFILE_GUESSED;
1378 }
1379 \f
1380 /* __builtin_expect dropped tokens into the insn stream describing expected
1381 values of registers. Generate branch probabilities based off these
1382 values. */
1383
1384 void
1385 expected_value_to_br_prob (void)
1386 {
1387 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
1388
1389 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1390 {
1391 switch (GET_CODE (insn))
1392 {
1393 case NOTE:
1394 /* Look for expected value notes. */
1395 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1396 {
1397 ev = NOTE_EXPECTED_VALUE (insn);
1398 ev_reg = XEXP (ev, 0);
1399 delete_insn (insn);
1400 }
1401 continue;
1402
1403 case CODE_LABEL:
1404 /* Never propagate across labels. */
1405 ev = NULL_RTX;
1406 continue;
1407
1408 case JUMP_INSN:
1409 /* Look for simple conditional branches. If we haven't got an
1410 expected value yet, no point going further. */
1411 if (!JUMP_P (insn) || ev == NULL_RTX
1412 || ! any_condjump_p (insn))
1413 continue;
1414 break;
1415
1416 default:
1417 /* Look for insns that clobber the EV register. */
1418 if (ev && reg_set_p (ev_reg, insn))
1419 ev = NULL_RTX;
1420 continue;
1421 }
1422
1423 /* Collect the branch condition, hopefully relative to EV_REG. */
1424 /* ??? At present we'll miss things like
1425 (expected_value (eq r70 0))
1426 (set r71 -1)
1427 (set r80 (lt r70 r71))
1428 (set pc (if_then_else (ne r80 0) ...))
1429 as canonicalize_condition will render this to us as
1430 (lt r70, r71)
1431 Could use cselib to try and reduce this further. */
1432 cond = XEXP (SET_SRC (pc_set (insn)), 0);
1433 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1434 false, false);
1435 if (! cond || XEXP (cond, 0) != ev_reg
1436 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
1437 continue;
1438
1439 /* Substitute and simplify. Given that the expression we're
1440 building involves two constants, we should wind up with either
1441 true or false. */
1442 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1443 XEXP (ev, 1), XEXP (cond, 1));
1444 cond = simplify_rtx (cond);
1445
1446 /* Turn the condition into a scaled branch probability. */
1447 if (cond != const_true_rtx && cond != const0_rtx)
1448 abort ();
1449 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1450 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
1451 }
1452 }
1453 \f
1454 /* Check whether this is the last basic block of function. Commonly
1455 there is one extra common cleanup block. */
1456 static bool
1457 last_basic_block_p (basic_block bb)
1458 {
1459 if (bb == EXIT_BLOCK_PTR)
1460 return false;
1461
1462 return (bb->next_bb == EXIT_BLOCK_PTR
1463 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1464 && EDGE_COUNT (bb->succs) == 1
1465 && EDGE_SUCC (bb, 0)->dest->next_bb == EXIT_BLOCK_PTR));
1466 }
1467
1468 /* Sets branch probabilities according to PREDiction and
1469 FLAGS. HEADS[bb->index] should be index of basic block in that we
1470 need to alter branch predictions (i.e. the first of our dominators
1471 such that we do not post-dominate it) (but we fill this information
1472 on demand, so -1 may be there in case this was not needed yet). */
1473
1474 static void
1475 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1476 enum prediction taken)
1477 {
1478 edge e;
1479 edge_iterator ei;
1480 int y;
1481
1482 if (heads[bb->index] < 0)
1483 {
1484 /* This is first time we need this field in heads array; so
1485 find first dominator that we do not post-dominate (we are
1486 using already known members of heads array). */
1487 basic_block ai = bb;
1488 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1489 int head;
1490
1491 while (heads[next_ai->index] < 0)
1492 {
1493 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1494 break;
1495 heads[next_ai->index] = ai->index;
1496 ai = next_ai;
1497 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1498 }
1499 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1500 head = next_ai->index;
1501 else
1502 head = heads[next_ai->index];
1503 while (next_ai != bb)
1504 {
1505 next_ai = ai;
1506 if (heads[ai->index] == ENTRY_BLOCK)
1507 ai = ENTRY_BLOCK_PTR;
1508 else
1509 ai = BASIC_BLOCK (heads[ai->index]);
1510 heads[next_ai->index] = head;
1511 }
1512 }
1513 y = heads[bb->index];
1514
1515 /* Now find the edge that leads to our branch and aply the prediction. */
1516
1517 if (y == last_basic_block)
1518 return;
1519 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1520 if (e->dest->index >= 0
1521 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1522 predict_edge_def (e, pred, taken);
1523 }
1524 \f
1525 /* This is used to carry information about basic blocks. It is
1526 attached to the AUX field of the standard CFG block. */
1527
1528 typedef struct block_info_def
1529 {
1530 /* Estimated frequency of execution of basic_block. */
1531 sreal frequency;
1532
1533 /* To keep queue of basic blocks to process. */
1534 basic_block next;
1535
1536 /* Number of predecessors we need to visit first. */
1537 int npredecessors;
1538 } *block_info;
1539
1540 /* Similar information for edges. */
1541 typedef struct edge_info_def
1542 {
1543 /* In case edge is an loopback edge, the probability edge will be reached
1544 in case header is. Estimated number of iterations of the loop can be
1545 then computed as 1 / (1 - back_edge_prob). */
1546 sreal back_edge_prob;
1547 /* True if the edge is an loopback edge in the natural loop. */
1548 unsigned int back_edge:1;
1549 } *edge_info;
1550
1551 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1552 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1553
1554 /* Helper function for estimate_bb_frequencies.
1555 Propagate the frequencies for LOOP. */
1556
1557 static void
1558 propagate_freq (struct loop *loop, bitmap tovisit)
1559 {
1560 basic_block head = loop->header;
1561 basic_block bb;
1562 basic_block last;
1563 unsigned i;
1564 edge e;
1565 basic_block nextbb;
1566 bitmap_iterator bi;
1567
1568 /* For each basic block we need to visit count number of his predecessors
1569 we need to visit first. */
1570 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1571 {
1572 edge_iterator ei;
1573 int count = 0;
1574
1575 /* The outermost "loop" includes the exit block, which we can not
1576 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1577 directly. Do the same for the entry block. */
1578 if (i == (unsigned)ENTRY_BLOCK)
1579 bb = ENTRY_BLOCK_PTR;
1580 else if (i == (unsigned)EXIT_BLOCK)
1581 bb = EXIT_BLOCK_PTR;
1582 else
1583 bb = BASIC_BLOCK (i);
1584
1585 FOR_EACH_EDGE (e, ei, bb->preds)
1586 {
1587 bool visit = bitmap_bit_p (tovisit, e->src->index);
1588
1589 if (visit && !(e->flags & EDGE_DFS_BACK))
1590 count++;
1591 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1592 fprintf (dump_file,
1593 "Irreducible region hit, ignoring edge to %i->%i\n",
1594 e->src->index, bb->index);
1595 }
1596 BLOCK_INFO (bb)->npredecessors = count;
1597 }
1598
1599 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1600 last = head;
1601 for (bb = head; bb; bb = nextbb)
1602 {
1603 edge_iterator ei;
1604 sreal cyclic_probability, frequency;
1605
1606 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1607 memcpy (&frequency, &real_zero, sizeof (real_zero));
1608
1609 nextbb = BLOCK_INFO (bb)->next;
1610 BLOCK_INFO (bb)->next = NULL;
1611
1612 /* Compute frequency of basic block. */
1613 if (bb != head)
1614 {
1615 #ifdef ENABLE_CHECKING
1616 FOR_EACH_EDGE (e, ei, bb->preds)
1617 if (bitmap_bit_p (tovisit, e->src->index)
1618 && !(e->flags & EDGE_DFS_BACK))
1619 abort ();
1620 #endif
1621
1622 FOR_EACH_EDGE (e, ei, bb->preds)
1623 if (EDGE_INFO (e)->back_edge)
1624 {
1625 sreal_add (&cyclic_probability, &cyclic_probability,
1626 &EDGE_INFO (e)->back_edge_prob);
1627 }
1628 else if (!(e->flags & EDGE_DFS_BACK))
1629 {
1630 sreal tmp;
1631
1632 /* frequency += (e->probability
1633 * BLOCK_INFO (e->src)->frequency /
1634 REG_BR_PROB_BASE); */
1635
1636 sreal_init (&tmp, e->probability, 0);
1637 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1638 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1639 sreal_add (&frequency, &frequency, &tmp);
1640 }
1641
1642 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1643 {
1644 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1645 sizeof (frequency));
1646 }
1647 else
1648 {
1649 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1650 {
1651 memcpy (&cyclic_probability, &real_almost_one,
1652 sizeof (real_almost_one));
1653 }
1654
1655 /* BLOCK_INFO (bb)->frequency = frequency
1656 / (1 - cyclic_probability) */
1657
1658 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1659 sreal_div (&BLOCK_INFO (bb)->frequency,
1660 &frequency, &cyclic_probability);
1661 }
1662 }
1663
1664 bitmap_clear_bit (tovisit, bb->index);
1665
1666 e = find_edge (bb, head);
1667 if (e)
1668 {
1669 sreal tmp;
1670
1671 /* EDGE_INFO (e)->back_edge_prob
1672 = ((e->probability * BLOCK_INFO (bb)->frequency)
1673 / REG_BR_PROB_BASE); */
1674
1675 sreal_init (&tmp, e->probability, 0);
1676 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1677 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1678 &tmp, &real_inv_br_prob_base);
1679 }
1680
1681 /* Propagate to successor blocks. */
1682 FOR_EACH_EDGE (e, ei, bb->succs)
1683 if (!(e->flags & EDGE_DFS_BACK)
1684 && BLOCK_INFO (e->dest)->npredecessors)
1685 {
1686 BLOCK_INFO (e->dest)->npredecessors--;
1687 if (!BLOCK_INFO (e->dest)->npredecessors)
1688 {
1689 if (!nextbb)
1690 nextbb = e->dest;
1691 else
1692 BLOCK_INFO (last)->next = e->dest;
1693
1694 last = e->dest;
1695 }
1696 }
1697 }
1698 }
1699
1700 /* Estimate probabilities of loopback edges in loops at same nest level. */
1701
1702 static void
1703 estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1704 {
1705 struct loop *loop;
1706
1707 for (loop = first_loop; loop; loop = loop->next)
1708 {
1709 edge e;
1710 basic_block *bbs;
1711 unsigned i;
1712
1713 estimate_loops_at_level (loop->inner, tovisit);
1714
1715 /* Do not do this for dummy function loop. */
1716 if (EDGE_COUNT (loop->latch->succs) > 0)
1717 {
1718 /* Find current loop back edge and mark it. */
1719 e = loop_latch_edge (loop);
1720 EDGE_INFO (e)->back_edge = 1;
1721 }
1722
1723 bbs = get_loop_body (loop);
1724 for (i = 0; i < loop->num_nodes; i++)
1725 bitmap_set_bit (tovisit, bbs[i]->index);
1726 free (bbs);
1727 propagate_freq (loop, tovisit);
1728 }
1729 }
1730
1731 /* Convert counts measured by profile driven feedback to frequencies.
1732 Return nonzero iff there was any nonzero execution count. */
1733
1734 int
1735 counts_to_freqs (void)
1736 {
1737 gcov_type count_max, true_count_max = 0;
1738 basic_block bb;
1739
1740 FOR_EACH_BB (bb)
1741 true_count_max = MAX (bb->count, true_count_max);
1742
1743 count_max = MAX (true_count_max, 1);
1744 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1745 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1746 return true_count_max;
1747 }
1748
1749 /* Return true if function is likely to be expensive, so there is no point to
1750 optimize performance of prologue, epilogue or do inlining at the expense
1751 of code size growth. THRESHOLD is the limit of number of instructions
1752 function can execute at average to be still considered not expensive. */
1753
1754 bool
1755 expensive_function_p (int threshold)
1756 {
1757 unsigned int sum = 0;
1758 basic_block bb;
1759 unsigned int limit;
1760
1761 /* We can not compute accurately for large thresholds due to scaled
1762 frequencies. */
1763 if (threshold > BB_FREQ_MAX)
1764 abort ();
1765
1766 /* Frequencies are out of range. This either means that function contains
1767 internal loop executing more than BB_FREQ_MAX times or profile feedback
1768 is available and function has not been executed at all. */
1769 if (ENTRY_BLOCK_PTR->frequency == 0)
1770 return true;
1771
1772 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1773 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1774 FOR_EACH_BB (bb)
1775 {
1776 rtx insn;
1777
1778 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1779 insn = NEXT_INSN (insn))
1780 if (active_insn_p (insn))
1781 {
1782 sum += bb->frequency;
1783 if (sum > limit)
1784 return true;
1785 }
1786 }
1787
1788 return false;
1789 }
1790
1791 /* Estimate basic blocks frequency by given branch probabilities. */
1792
1793 static void
1794 estimate_bb_frequencies (struct loops *loops)
1795 {
1796 basic_block bb;
1797 sreal freq_max;
1798
1799 if (!flag_branch_probabilities || !counts_to_freqs ())
1800 {
1801 static int real_values_initialized = 0;
1802 bitmap tovisit;
1803
1804 if (!real_values_initialized)
1805 {
1806 real_values_initialized = 1;
1807 sreal_init (&real_zero, 0, 0);
1808 sreal_init (&real_one, 1, 0);
1809 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1810 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1811 sreal_init (&real_one_half, 1, -1);
1812 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1813 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1814 }
1815
1816 mark_dfs_back_edges ();
1817
1818 EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->probability = REG_BR_PROB_BASE;
1819
1820 /* Set up block info for each basic block. */
1821 tovisit = BITMAP_XMALLOC ();
1822 alloc_aux_for_blocks (sizeof (struct block_info_def));
1823 alloc_aux_for_edges (sizeof (struct edge_info_def));
1824 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1825 {
1826 edge e;
1827 edge_iterator ei;
1828
1829 FOR_EACH_EDGE (e, ei, bb->succs)
1830 {
1831 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1832 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1833 &EDGE_INFO (e)->back_edge_prob,
1834 &real_inv_br_prob_base);
1835 }
1836 }
1837
1838 /* First compute probabilities locally for each loop from innermost
1839 to outermost to examine probabilities for back edges. */
1840 estimate_loops_at_level (loops->tree_root, tovisit);
1841
1842 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1843 FOR_EACH_BB (bb)
1844 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1845 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1846
1847 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1848 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1849 {
1850 sreal tmp;
1851
1852 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1853 sreal_add (&tmp, &tmp, &real_one_half);
1854 bb->frequency = sreal_to_int (&tmp);
1855 }
1856
1857 free_aux_for_blocks ();
1858 free_aux_for_edges ();
1859 BITMAP_XFREE (tovisit);
1860 }
1861 compute_function_frequency ();
1862 if (flag_reorder_functions)
1863 choose_function_section ();
1864 }
1865
1866 /* Decide whether function is hot, cold or unlikely executed. */
1867 static void
1868 compute_function_frequency (void)
1869 {
1870 basic_block bb;
1871
1872 if (!profile_info || !flag_branch_probabilities)
1873 return;
1874 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1875 FOR_EACH_BB (bb)
1876 {
1877 if (maybe_hot_bb_p (bb))
1878 {
1879 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1880 return;
1881 }
1882 if (!probably_never_executed_bb_p (bb))
1883 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1884 }
1885 }
1886
1887 /* Choose appropriate section for the function. */
1888 static void
1889 choose_function_section (void)
1890 {
1891 if (DECL_SECTION_NAME (current_function_decl)
1892 || !targetm.have_named_sections
1893 /* Theoretically we can split the gnu.linkonce text section too,
1894 but this requires more work as the frequency needs to match
1895 for all generated objects so we need to merge the frequency
1896 of all instances. For now just never set frequency for these. */
1897 || DECL_ONE_ONLY (current_function_decl))
1898 return;
1899
1900 /* If we are doing the partitioning optimization, let the optimization
1901 choose the correct section into which to put things. */
1902
1903 if (flag_reorder_blocks_and_partition)
1904 return;
1905
1906 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1907 DECL_SECTION_NAME (current_function_decl) =
1908 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1909 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1910 DECL_SECTION_NAME (current_function_decl) =
1911 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1912 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1913 }
1914
1915
1916 struct tree_opt_pass pass_profile =
1917 {
1918 "profile", /* name */
1919 NULL, /* gate */
1920 tree_estimate_probability, /* execute */
1921 NULL, /* sub */
1922 NULL, /* next */
1923 0, /* static_pass_number */
1924 TV_BRANCH_PROB, /* tv_id */
1925 PROP_cfg, /* properties_required */
1926 0, /* properties_provided */
1927 0, /* properties_destroyed */
1928 0, /* todo_flags_start */
1929 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1930 0 /* letter */
1931 };